together 0.2.3__py3-none-any.whl → 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
together/complete.py CHANGED
@@ -1,22 +1,14 @@
1
1
  import json
2
2
  from typing import Any, Dict, Iterator, List, Optional
3
3
 
4
- import requests
5
- import sseclient # type: ignore
6
-
7
4
  import together
8
- from together import get_logger, verify_api_key
5
+ from together.utils import create_post_request, get_logger, sse_client
9
6
 
10
7
 
11
- logger = get_logger(str(__name__), log_level=together.log_level)
8
+ logger = get_logger(str(__name__))
12
9
 
13
10
 
14
11
  class Complete:
15
- def __init__(
16
- self,
17
- ) -> None:
18
- verify_api_key(logger)
19
-
20
12
  @classmethod
21
13
  def create(
22
14
  self,
@@ -45,46 +37,16 @@ class Complete:
45
37
  "logprobs": logprobs,
46
38
  }
47
39
 
48
- # HTTP headers for authorization
49
- headers = {
50
- "Authorization": f"Bearer {together.api_key}",
51
- "Content-Type": "application/json",
52
- "User-Agent": together.user_agent,
53
- }
54
-
55
40
  # send request
56
- try:
57
- response = requests.post(
58
- together.api_base_complete,
59
- headers=headers,
60
- json=parameter_payload,
61
- )
62
- except requests.exceptions.RequestException as e:
63
- logger.critical(f"Response error raised: {e}")
64
- raise together.ResponseError(e)
65
-
66
- if response.status_code == 429:
67
- logger.critical(
68
- f"""No running instances for {model}.
69
- You can start an instance with one of the following methods:
70
- 1. navigating to the Together Playground at api.together.ai
71
- 2. starting one in python using together.Models.start(model_name)
72
- 3. `$ together models start <MODEL_NAME>` at the command line.
73
- See `together.Models.list()` in python or `$ together models list` in command line
74
- to get an updated list of valid model names.
75
- """
76
- )
77
- raise together.InstanceError(model=model)
78
-
79
- response.raise_for_status()
80
-
41
+ response = create_post_request(
42
+ url=together.api_base_complete, json=parameter_payload
43
+ )
44
+ if not response:
45
+ return {}
81
46
  try:
82
47
  response_json = dict(response.json())
83
48
 
84
49
  except Exception as e:
85
- logger.critical(
86
- f"Error raised: {e}\nResponse status code = {response.status_code}"
87
- )
88
50
  raise together.JSONError(e, http_status=response.status_code)
89
51
  return response_json
90
52
 
@@ -99,6 +61,7 @@ class Complete:
99
61
  top_p: Optional[float] = 0.7,
100
62
  top_k: Optional[int] = 50,
101
63
  repetition_penalty: Optional[float] = None,
64
+ raw: Optional[bool] = False,
102
65
  ) -> Iterator[str]:
103
66
  """
104
67
  Prints streaming responses and returns the completed text.
@@ -118,48 +81,30 @@ class Complete:
118
81
  "repetition_penalty": repetition_penalty,
119
82
  "stream_tokens": True,
120
83
  }
121
- # HTTP headers for authorization
122
- headers = {
123
- "Authorization": f"Bearer {together.api_key}",
124
- "Content-Type": "application/json",
125
- "User-Agent": together.user_agent,
126
- }
127
84
 
128
85
  # send request
129
- try:
130
- response = requests.post(
131
- together.api_base_complete,
132
- headers=headers,
133
- json=parameter_payload,
134
- stream=True,
135
- )
136
- except requests.exceptions.RequestException as e:
137
- logger.critical(f"Response error raised: {e}")
138
- raise together.ResponseError(e)
139
-
140
- if response.status_code == 200:
141
- output = ""
142
- client = sseclient.SSEClient(response)
143
- for event in client.events():
144
- if event.data != "[DONE]":
145
- text = json.loads(event.data)["choices"][0]["text"]
86
+ response = create_post_request(
87
+ url=together.api_base_complete, json=parameter_payload, stream=True
88
+ )
89
+ if not response:
90
+ return {}
91
+ output = ""
92
+ client = sse_client(response)
93
+ for event in client.events():
94
+ if raw:
95
+ yield str(event.data)
96
+ elif event.data != "[DONE]":
97
+ json_response = dict(json.loads(event.data))
98
+ if "error" in json_response.keys():
99
+ raise together.ResponseError(
100
+ json_response["error"]["error"],
101
+ request_id=json_response["error"]["request_id"],
102
+ )
103
+ elif "choices" in json_response.keys():
104
+ text = json_response["choices"][0]["text"]
146
105
  output += text
147
106
  yield text
148
- elif response.status_code == 429:
149
- logger.critical(
150
- f"""No running instances for {model}.
151
- You can start an instance with one of the following methods:
152
- 1. navigating to the Together Playground at api.together.ai
153
- 2. starting one in python using together.Models.start(model_name)
154
- 3. `$ together models start <MODEL_NAME>` at the command line.
155
- See `together.Models.list()` in python or `$ together models list` in command line
156
- to get an updated list of valid model names.
157
- """
158
- )
159
- raise together.InstanceError(model=model)
160
- else:
161
- logger.critical(
162
- f"Unknown error raised.\nResponse status code = {response.status_code}"
163
- )
164
- response.raise_for_status()
165
- raise together.ResponseError(http_status=response.status_code)
107
+ else:
108
+ raise together.ResponseError(
109
+ f"Unknown error occured. Received unhandled response: {event.data}"
110
+ )
together/error.py CHANGED
@@ -11,6 +11,7 @@ class TogetherException(Exception):
11
11
  http_status: Optional[int] = None,
12
12
  json_body: Optional[Any] = None,
13
13
  headers: Optional[Union[str, Dict[Any, Any]]] = None,
14
+ request_id: Optional[str] = "",
14
15
  ) -> None:
15
16
  super(TogetherException, self).__init__(message)
16
17
 
@@ -28,12 +29,14 @@ class TogetherException(Exception):
28
29
  self.http_status = http_status
29
30
  self.json_body = json_body
30
31
  self.headers = headers or {}
32
+ self.request_id = request_id
31
33
 
32
34
  def __repr__(self) -> str:
33
- return "%s(message=%r, http_status=%r)" % (
35
+ return "%s(message=%r, http_status=%r, request_id=%r)" % (
34
36
  self.__class__.__name__,
35
37
  self._message,
36
38
  self.http_status,
39
+ self.request_id,
37
40
  )
38
41
 
39
42
 
@@ -59,7 +62,14 @@ class InstanceError(TogetherException):
59
62
  headers: Optional[str] = None,
60
63
  model: Optional[str] = "model",
61
64
  ) -> None:
62
- message = f"No running instances for {model}. You can start an instance by navigating to the Together Playground at api.together.ai"
65
+ message = f"""No running instances for {model}.
66
+ You can start an instance with one of the following methods:
67
+ 1. navigating to the Together Playground at api.together.ai
68
+ 2. starting one in python using together.Models.start(model_name)
69
+ 3. `$ together models start <MODEL_NAME>` at the command line.
70
+ See `together.Models.list()` in python or `$ together models list` in command line
71
+ to get an updated list of valid model names.
72
+ """
63
73
  super(InstanceError, self).__init__(
64
74
  message, http_body, http_status, json_body, headers
65
75
  )
together/files.py CHANGED
@@ -2,58 +2,41 @@ import json
2
2
  import os
3
3
  import posixpath
4
4
  import urllib.parse
5
- from typing import Any, Dict, List, Mapping, Optional, Union, cast
5
+ from typing import Any, Dict, List, Mapping, Optional, Union
6
6
 
7
7
  import requests
8
8
  from tqdm import tqdm
9
9
  from tqdm.utils import CallbackIOWrapper
10
10
 
11
11
  import together
12
- from together import get_logger, verify_api_key
12
+ from together.utils import (
13
+ create_get_request,
14
+ get_logger,
15
+ response_to_dict,
16
+ )
13
17
 
14
18
 
15
- logger = get_logger(str(__name__), log_level=together.log_level)
16
-
17
19
  # the number of bytes in a gigabyte, used to convert bytes to GB for readable comparison
18
20
  NUM_BYTES_IN_GB = 2**30
19
21
 
20
22
  # maximum number of GB sized files we support finetuning for
21
23
  MAX_FT_GB = 4.9
22
24
 
25
+ logger = get_logger(str(__name__))
23
26
 
24
- class Files:
25
- def __init__(
26
- self,
27
- ) -> None:
28
- verify_api_key(logger)
29
27
 
28
+ class Files:
30
29
  @classmethod
31
30
  def list(self) -> Dict[str, List[Dict[str, Union[str, int]]]]:
32
- headers = {
33
- "Authorization": f"Bearer {together.api_key}",
34
- "User-Agent": together.user_agent,
35
- }
36
-
37
31
  # send request
38
- try:
39
- response = requests.get(together.api_base_files, headers=headers)
40
- response.raise_for_status()
41
- except requests.exceptions.RequestException as e:
42
- logger.critical(f"Response error raised: {e}")
43
- raise together.ResponseError(e)
44
- try:
45
- response_json = dict(response.json())
46
- except Exception as e:
47
- logger.critical(
48
- f"JSON Error raised: {e}\nResponse status code = {response.status_code}"
49
- )
50
- raise together.JSONError(e, http_status=response.status_code)
51
-
52
- return response_json
32
+ response = create_get_request(together.api_base_files)
33
+ if not response:
34
+ return {}
35
+ return response_to_dict(response)
53
36
 
54
37
  @classmethod
55
- def check(self, file: str, model: Optional[str] = None) -> Dict[str, object]:
56
- return check_json(file, model)
38
+ def check(self, file: str) -> Dict[str, object]:
39
+ return check_json(file)
57
40
 
58
41
  @classmethod
59
42
  def upload(
@@ -70,10 +53,11 @@ class Files:
70
53
  }
71
54
 
72
55
  if check:
73
- report_dict = check_json(file, model)
56
+ report_dict = check_json(file)
74
57
  if not report_dict["is_check_passed"]:
75
- print(report_dict)
76
- raise together.FileTypeError("Invalid file supplied. Failed to upload.")
58
+ raise together.FileTypeError(
59
+ f"Invalid file supplied. Failed to upload.\nReport:\n {report_dict}"
60
+ )
77
61
  else:
78
62
  report_dict = {}
79
63
 
@@ -116,7 +100,7 @@ class Files:
116
100
  file_id = response.headers["X-Together-File-Id"]
117
101
 
118
102
  logger.info(f"R2 Signed URL: {r2_signed_url}")
119
- logger.info("File-ID")
103
+ logger.info(f"File-ID: {file_id}")
120
104
 
121
105
  logger.info("Uploading file...")
122
106
 
@@ -177,44 +161,16 @@ class Files:
177
161
  logger.critical(f"Response error raised: {e}")
178
162
  raise together.ResponseError(e)
179
163
 
180
- try:
181
- response_json = dict(response.json())
182
- except Exception as e:
183
- logger.critical(
184
- f"JSON Error raised: {e}\nResponse status code = {response.status_code}"
185
- )
186
- raise together.JSONError(e, http_status=response.status_code)
187
-
188
- return response_json
164
+ return response_to_dict(response)
189
165
 
190
166
  @classmethod
191
167
  def retrieve(self, file_id: str) -> Dict[str, Union[str, int]]:
192
168
  retrieve_url = urllib.parse.urljoin(together.api_base_files, file_id)
193
-
194
169
  logger.info(f"Retrieve URL: {retrieve_url}")
195
-
196
- headers = {
197
- "Authorization": f"Bearer {together.api_key}",
198
- "User-Agent": together.user_agent,
199
- }
200
-
201
- # send request
202
- try:
203
- response = requests.get(retrieve_url, headers=headers)
204
- response.raise_for_status()
205
- except requests.exceptions.RequestException as e:
206
- logger.critical(f"Response error raised: {e}")
207
- raise together.ResponseError(e)
208
-
209
- try:
210
- response_json = dict(response.json())
211
- except Exception as e:
212
- logger.critical(
213
- f"JSON Error raised: {e}\nResponse status code = {response.status_code}"
214
- )
215
- raise together.JSONError(e, http_status=response.status_code)
216
-
217
- return response_json
170
+ response = create_get_request(retrieve_url)
171
+ if not response:
172
+ return {}
173
+ return response_to_dict(response)
218
174
 
219
175
  @classmethod
220
176
  def retrieve_content(self, file_id: str, output: Union[str, None] = None) -> str:
@@ -288,23 +244,11 @@ class Files:
288
244
 
289
245
  def check_json(
290
246
  file: str,
291
- model: Optional[str] = None,
292
247
  ) -> Dict[str, object]:
293
248
  report_dict = {
294
249
  "is_check_passed": True,
295
250
  "model_special_tokens": "we are not yet checking end of sentence tokens for this model",
296
251
  }
297
- num_samples_w_eos_token = 0
298
-
299
- model_info_dict = cast(Dict[str, Any], together.model_info_dict)
300
-
301
- eos_token = None
302
- if model is not None and model in model_info_dict:
303
- if "eos_token" in model_info_dict[model]:
304
- eos_token = model_info_dict[model]["eos_token"]
305
- report_dict[
306
- "model_special_tokens"
307
- ] = f"the end of sentence token for this model is {eos_token}"
308
252
 
309
253
  if not os.path.isfile(file):
310
254
  report_dict["file_present"] = f"File not found at given file path {file}"
@@ -358,10 +302,6 @@ def check_json(
358
302
 
359
303
  report_dict["is_check_passed"] = False
360
304
 
361
- elif eos_token:
362
- if eos_token in json_line["text"]:
363
- num_samples_w_eos_token += 1
364
-
365
305
  # make sure this is outside the for idx, line in enumerate(f): for loop
366
306
  if idx + 1 < together.min_samples:
367
307
  report_dict["min_samples"] = (
@@ -383,6 +323,4 @@ def check_json(
383
323
  )
384
324
  report_dict["is_check_passed"] = False
385
325
 
386
- report_dict["num_samples_w_eos_token"] = num_samples_w_eos_token
387
-
388
326
  return report_dict