together 0.2.3__py3-none-any.whl → 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
together/finetune.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import posixpath
2
+ import pprint
2
3
  import urllib.parse
3
4
  from typing import Any, Dict, List, Optional, Union
4
5
 
@@ -6,49 +7,23 @@ import requests
6
7
  from tqdm import tqdm
7
8
 
8
9
  import together
9
- from together import Files, get_logger, verify_api_key
10
-
11
-
12
- logger = get_logger(str(__name__), log_level=together.log_level)
13
-
14
-
15
- # this will change soon to be data driven and give a clearer estimate
16
- def model_param_count(name: str) -> int:
17
- pcount = {
18
- "togethercomputer/RedPajama-INCITE-7B-Chat": 6857302016,
19
- "togethercomputer/RedPajama-INCITE-7B-Base": 6857302016,
20
- "togethercomputer/RedPajama-INCITE-7B-Instruct": 6857302016,
21
- "togethercomputer/RedPajama-INCITE-Chat-3B-v1": 2775864320,
22
- "togethercomputer/RedPajama-INCITE-Base-3B-v1": 2775864320,
23
- "togethercomputer/RedPajama-INCITE-Instruct-3B-v1": 2775864320,
24
- "togethercomputer/Pythia-Chat-Base-7B": 6857302016,
25
- "togethercomputer/llama-2-7b": 6738415616,
26
- "togethercomputer/llama-2-7b-chat": 6738415616,
27
- "togethercomputer/llama-2-13b": 13015864320,
28
- "togethercomputer/llama-2-13b-chat": 13015864320,
29
- "togethercomputer/LLaMA-2-7B-32K": 6738415616,
30
- "togethercomputer/Llama-2-7B-32K-Instruct": 6738415616,
31
- "togethercomputer/CodeLlama-7b": 6738546688,
32
- "togethercomputer/CodeLlama-7b-Python": 6738546688,
33
- "togethercomputer/CodeLlama-7b-Instruct": 6738546688,
34
- "togethercomputer/CodeLlama-13b": 13016028160,
35
- "togethercomputer/CodeLlama-13b-Python": 13016028160,
36
- "togethercomputer/CodeLlama-13b-Instruct": 13016028160,
37
- "togethercomputer/llama-2-70b": 68976648192,
38
- "togethercomputer/llama-2-70b-chat": 68976648192,
39
- }
40
- try:
41
- return pcount[name]
42
- except Exception:
43
- return 0
10
+ from together import Files
11
+ from together.utils import (
12
+ create_get_request,
13
+ create_post_request,
14
+ get_logger,
15
+ response_to_dict,
16
+ round_to_closest_multiple_of_32,
17
+ )
44
18
 
45
19
 
46
- class Finetune:
47
- def __init__(
48
- self,
49
- ) -> None:
50
- verify_api_key(logger)
20
+ pp = pprint.PrettyPrinter(indent=4)
21
+
22
+ logger = get_logger(str(__name__))
51
23
 
24
+
25
+ class Finetune:
26
+ # TODO @orangetin: cleanup create validation etc
52
27
  @classmethod
53
28
  def create(
54
29
  self,
@@ -70,47 +45,52 @@ class Finetune:
70
45
  ] = None, # resulting finetuned model name will include the suffix
71
46
  estimate_price: bool = False,
72
47
  wandb_api_key: Optional[str] = None,
48
+ confirm_inputs: bool = True,
73
49
  ) -> Dict[Any, Any]:
50
+ adjusted_inputs = False
51
+
74
52
  if n_epochs is None or n_epochs < 1:
75
- logger.fatal("The number of epochs must be specified")
76
- raise ValueError("n_epochs is required")
53
+ n_epochs = 1
54
+ adjusted_inputs = True
77
55
 
78
56
  # Validate parameters
79
57
  if n_checkpoints is None:
80
58
  n_checkpoints = 1
81
59
  elif n_checkpoints < 1:
82
60
  n_checkpoints = 1
83
- logger.warning(
84
- f"The number of checkpoints must be >= 1, setting to {n_checkpoints}"
85
- )
61
+ adjusted_inputs = True
86
62
  elif n_checkpoints > n_epochs:
87
63
  n_checkpoints = n_epochs
88
- logger.warning(
89
- f"The number of checkpoints must be < the number of epochs, setting to {n_checkpoints}"
90
- )
91
-
92
- if (
93
- model
94
- in ["togethercomputer/llama-2-70b", "togethercomputer/llama-2-70b-chat"]
95
- and batch_size != 144
96
- ):
97
- raise ValueError(
98
- f"Batch size must be 144 for {model} model. Please set batch size to 144"
99
- )
64
+ adjusted_inputs = True
100
65
 
66
+ # TODO: Replace with mongodb retrieval for max, min, and default batch size
101
67
  if batch_size is None:
102
68
  batch_size = 32
103
69
  elif batch_size < 4:
104
- raise ValueError("Batch size must be >= 4.")
70
+ batch_size = 4
71
+ adjusted_inputs = True
72
+
73
+ max_batch_size = 128
74
+ if model.startswith("togethercomputer/llama-2-70b"):
75
+ max_batch_size = 64
76
+ batch_size = round_to_closest_multiple_of_32(batch_size)
77
+ adjusted_inputs = True
78
+ elif model.startswith("togethercomputer/CodeLlama-7b"):
79
+ max_batch_size = 16
80
+ elif model.startswith("togethercomputer/CodeLlama-13b"):
81
+ max_batch_size = 8
82
+
83
+ if batch_size > max_batch_size:
84
+ batch_size = max_batch_size
85
+ adjusted_inputs = True
105
86
 
106
87
  # TODO: REMOVE THIS CHECK WHEN WE HAVE CHECKPOINTING WORKING FOR 70B models
107
88
  if n_checkpoints > 1 and model in [
108
89
  "togethercomputer/llama-2-70b",
109
90
  "togethercomputer/llama-2-70b-chat",
110
91
  ]:
111
- raise ValueError(
112
- "Saving checkpoints during training currently not supported for {model}. Please set the number of checkpoints to 1"
113
- )
92
+ n_checkpoints = 1
93
+ adjusted_inputs = True
114
94
 
115
95
  parameter_payload = {
116
96
  "training_file": training_file,
@@ -130,8 +110,8 @@ class Finetune:
130
110
  }
131
111
 
132
112
  # check if model name is one of the models available for finetuning
133
- if parameter_payload["model"] not in together.finetune_model_names:
134
- logger.warning(
113
+ if not together.Models._is_finetune_model(model):
114
+ raise ValueError(
135
115
  "The finetune model name must be one of the subset of models available for finetuning. "
136
116
  "Here is a list of those models https://docs.together.ai/docs/models-fine-tuning"
137
117
  )
@@ -150,7 +130,7 @@ class Finetune:
150
130
  raise together.FileTypeError(training_file_feedback)
151
131
 
152
132
  if estimate_price:
153
- param_size = model_param_count(model)
133
+ param_size = together.Models._param_count(model)
154
134
  if param_size == 0:
155
135
  error = f"Unknown model {model}. Cannot estimate price. Please check the name of the model"
156
136
  raise together.FileTypeError(error)
@@ -168,7 +148,7 @@ class Finetune:
168
148
  {
169
149
  "tokens": token_estimate,
170
150
  "epochs": n_epochs,
171
- "parameters": model_param_count(model),
151
+ "parameters": together.Models._param_count(model),
172
152
  },
173
153
  ],
174
154
  "id": 1,
@@ -180,141 +160,59 @@ class Finetune:
180
160
  print(training_file_feedback)
181
161
  exit()
182
162
 
183
- # Send POST request to SUBMIT FINETUNE JOB
184
- # HTTP headers for authorization
185
- headers = {
186
- "Authorization": f"Bearer {together.api_key}",
187
- "Content-Type": "application/json",
188
- "User-Agent": together.user_agent,
189
- }
190
- try:
191
- response = requests.post(
192
- together.api_base_finetune, headers=headers, json=parameter_payload
193
- )
194
- response.raise_for_status()
195
- except requests.exceptions.RequestException as e:
196
- logger.critical(f"Response error raised: {e}")
197
- raise together.ResponseError(e)
198
-
199
- try:
200
- response_json = dict(response.json())
201
- except Exception as e:
202
- logger.critical(
203
- f"JSON Error raised: {e}\nResponse status code = {response.status_code}"
204
- )
205
- raise together.JSONError(e, http_status=response.status_code)
163
+ if confirm_inputs:
164
+ if adjusted_inputs:
165
+ print(
166
+ "Note: Some hyperparameters have been adjusted with their minimum/maximum values for a given model."
167
+ )
168
+ print("Job creation details:")
169
+ pp.pprint(parameter_payload)
170
+ confirm_response = input("\nDo you want to submit the job? [y/N]")
171
+ if "y" not in confirm_response.lower():
172
+ return {"status": "job not submitted"}
206
173
 
207
- return response_json
174
+ # Send POST request to SUBMIT FINETUNE JOB
175
+ response = create_post_request(
176
+ together.api_base_finetune, json=parameter_payload
177
+ )
178
+ if not response:
179
+ return {}
180
+ return response_to_dict(response)
208
181
 
209
182
  @classmethod
210
183
  def list(self) -> Dict[Any, Any]:
211
- verify_api_key(logger)
212
- headers = {
213
- "Authorization": f"Bearer {together.api_key}",
214
- "User-Agent": together.user_agent,
215
- }
216
-
217
184
  # send request
218
- try:
219
- response = requests.get(together.api_base_finetune, headers=headers)
220
- response.raise_for_status()
221
- except requests.exceptions.RequestException as e:
222
- logger.critical(f"Response error raised: {e}")
223
- raise together.ResponseError(e)
224
-
225
- try:
226
- response_json = dict(response.json())
227
- except Exception as e:
228
- logger.critical(
229
- f"JSON Error raised: {e}\nResponse status code = {response.status_code}"
230
- )
231
- raise together.JSONError(e, http_status=response.status_code)
232
-
233
- return response_json
185
+ response = create_get_request(together.api_base_finetune)
186
+ if not response:
187
+ return {}
188
+ return response_to_dict(response)
234
189
 
235
190
  @classmethod
236
191
  def retrieve(self, fine_tune_id: str) -> Dict[Any, Any]:
237
192
  retrieve_url = urllib.parse.urljoin(together.api_base_finetune, fine_tune_id)
238
-
239
- headers = {
240
- "Authorization": f"Bearer {together.api_key}",
241
- "User-Agent": together.user_agent,
242
- }
243
-
244
- # send request
245
- try:
246
- response = requests.get(retrieve_url, headers=headers)
247
- response.raise_for_status()
248
- except requests.exceptions.RequestException as e:
249
- logger.critical(f"Response error raised: {e}")
250
- raise together.ResponseError(e)
251
-
252
- try:
253
- response_json = dict(response.json())
254
- except Exception as e:
255
- logger.critical(
256
- f"JSON Error raised: {e}\nResponse status code = {response.status_code}"
257
- )
258
- raise together.JSONError(e, http_status=response.status_code)
259
-
260
- return response_json
193
+ response = create_get_request(retrieve_url)
194
+ if not response:
195
+ return {}
196
+ return response_to_dict(response)
261
197
 
262
198
  @classmethod
263
199
  def cancel(self, fine_tune_id: str) -> Dict[Any, Any]:
264
200
  relative_path = posixpath.join(fine_tune_id, "cancel")
265
201
  retrieve_url = urllib.parse.urljoin(together.api_base_finetune, relative_path)
266
-
267
- headers = {
268
- "Authorization": f"Bearer {together.api_key}",
269
- "User-Agent": together.user_agent,
270
- }
271
-
272
- # send request
273
- try:
274
- response = requests.post(retrieve_url, headers=headers)
275
- response.raise_for_status()
276
- except requests.exceptions.RequestException as e:
277
- logger.critical(f"Response error raised: {e}")
278
- raise together.ResponseError(e)
279
-
280
- try:
281
- response_json = dict(response.json())
282
- except Exception as e:
283
- logger.critical(
284
- f"JSON Error raised: {e}\nResponse status code = {response.status_code}"
285
- )
286
- raise together.JSONError(e, http_status=response.status_code)
287
-
288
- return response_json
202
+ response = create_post_request(retrieve_url)
203
+ if not response:
204
+ return {}
205
+ return response_to_dict(response)
289
206
 
290
207
  @classmethod
291
208
  def list_events(self, fine_tune_id: str) -> Dict[Any, Any]:
292
209
  # TODO enable stream
293
210
  relative_path = posixpath.join(fine_tune_id, "events")
294
211
  retrieve_url = urllib.parse.urljoin(together.api_base_finetune, relative_path)
295
-
296
- headers = {
297
- "Authorization": f"Bearer {together.api_key}",
298
- "User-Agent": together.user_agent,
299
- }
300
-
301
- # send request
302
- try:
303
- response = requests.get(retrieve_url, headers=headers)
304
- response.raise_for_status()
305
- except requests.exceptions.RequestException as e:
306
- logger.critical(f"Response error raised: {e}")
307
- raise together.ResponseError(e)
308
-
309
- try:
310
- response_json = dict(response.json())
311
- except Exception as e:
312
- logger.critical(
313
- f"JSON Error raised: {e}\nResponse status code = {response.status_code}"
314
- )
315
- raise together.JSONError(e, http_status=response.status_code)
316
-
317
- return response_json
212
+ response = create_get_request(retrieve_url)
213
+ if not response:
214
+ return {}
215
+ return response_to_dict(response)
318
216
 
319
217
  @classmethod
320
218
  def get_checkpoints(self, fine_tune_id: str) -> List[Dict[str, Any]]:
@@ -369,7 +267,7 @@ class Finetune:
369
267
  if step != -1:
370
268
  model_file_path += f"&checkpoint_step={step}"
371
269
 
372
- logger.info(f"Downloading weights from {model_file_path}...")
270
+ print(f"Downloading weights from {model_file_path}...")
373
271
 
374
272
  headers = {
375
273
  "Authorization": f"Bearer {together.api_key}",
together/image.py CHANGED
@@ -1,20 +1,13 @@
1
1
  from typing import Any, Dict, Optional
2
2
 
3
- import requests
4
-
5
3
  import together
6
- from together import get_logger, verify_api_key
4
+ from together.utils import create_post_request, get_logger, response_to_dict
7
5
 
8
6
 
9
- logger = get_logger(str(__name__), log_level=together.log_level)
7
+ logger = get_logger(str(__name__))
10
8
 
11
9
 
12
10
  class Image:
13
- def __init__(
14
- self,
15
- ) -> None:
16
- verify_api_key(logger)
17
-
18
11
  @classmethod
19
12
  def create(
20
13
  self,
@@ -42,37 +35,10 @@ class Image:
42
35
  "negative_prompt": negative_prompt,
43
36
  }
44
37
 
45
- # HTTP headers for authorization
46
- headers = {
47
- "Authorization": f"Bearer {together.api_key}",
48
- "Content-Type": "application/json",
49
- "User-Agent": together.user_agent,
50
- }
51
-
52
38
  # send request
53
- try:
54
- response = requests.post(
55
- together.api_base_complete,
56
- headers=headers,
57
- json=parameter_payload,
58
- )
59
- except requests.exceptions.RequestException as e:
60
- logger.critical(f"Response error raised: {e}")
61
- raise together.ResponseError(e)
62
-
63
- if response.status_code == 429:
64
- logger.critical(
65
- f"No running instances for {model}. You can start an instance by navigating to the Together Playground at api.together.ai"
66
- )
67
- raise together.InstanceError(model=model)
68
-
69
- response.raise_for_status()
70
-
71
- try:
72
- response_json = dict(response.json())
73
- except Exception as e:
74
- logger.critical(
75
- f"JSON Error raised: {e}\nResponse status code = {response.status_code}"
76
- )
77
- raise together.JSONError(e, http_status=response.status_code)
78
- return response_json
39
+ response = create_post_request(
40
+ together.api_base_complete, json=parameter_payload
41
+ )
42
+ if not response:
43
+ return {}
44
+ return response_to_dict(response)
together/models.py CHANGED
@@ -1,155 +1,107 @@
1
1
  import urllib.parse
2
2
  from typing import Any, Dict, List
3
3
 
4
- import requests
5
-
6
4
  import together
7
- from together import get_logger, verify_api_key
5
+ from together.utils import (
6
+ create_get_request,
7
+ create_post_request,
8
+ get_logger,
9
+ response_to_dict,
10
+ )
8
11
 
9
12
 
10
- logger = get_logger(str(__name__), log_level=together.log_level)
13
+ logger = get_logger(str(__name__))
11
14
 
12
15
 
13
16
  class Models:
14
- def __init__(
15
- self,
16
- ) -> None:
17
- verify_api_key(logger)
18
-
19
17
  @classmethod
20
18
  def list(self) -> List[Any]:
21
19
  model_url = urllib.parse.urljoin(together.api_base, "models/info?=")
22
- headers = {
23
- "Authorization": f"Bearer {together.api_key}",
24
- "User-Agent": together.user_agent,
25
- }
26
- try:
27
- response = requests.get(
28
- model_url,
29
- headers=headers,
30
- )
31
- response.raise_for_status()
32
- except requests.exceptions.RequestException as e:
33
- logger.critical(f"Response error raised: {e}")
34
- raise together.ResponseError(e)
35
-
20
+ response = create_get_request(model_url)
21
+ if not response:
22
+ return []
36
23
  try:
37
24
  response_list = list(response.json())
38
25
  except Exception as e:
39
- logger.critical(
40
- f"JSON Error raised: {e}\nResponse status code = {response.status_code}"
41
- )
42
- raise together.JSONError(e, http_status=response.status_code)
43
-
26
+ raise together.ResponseError(e, http_status=response.status_code)
44
27
  return response_list
45
28
 
46
29
  @classmethod
47
- def instances(self) -> Dict[str, bool]:
48
- headers = {
49
- "Authorization": f"Bearer {together.api_key}",
50
- "accept": "application/json",
51
- }
52
- try:
53
- response = requests.get(
54
- together.api_base_instances,
55
- headers=headers,
30
+ def info(self, model: str, hidden_keys: List[str] = []) -> Dict[str, Any]:
31
+ """
32
+ Gets info dictionary for model from model list and filters out hidden_keys
33
+ """
34
+ info_dict = next((item for item in self.list() if item["name"] == model), None)
35
+
36
+ if info_dict is not None:
37
+ for key in set(hidden_keys):
38
+ info_dict.pop(key, None)
39
+ else:
40
+ raise ValueError(
41
+ f"Unable to access {model}. Check your TOGETHER_API_KEY and use together.Models.list() to list available models."
56
42
  )
57
- response.raise_for_status()
58
- except requests.exceptions.RequestException as e:
59
- logger.critical(f"Response error raised: {e}")
60
- raise together.ResponseError(e)
61
43
 
62
- try:
63
- response_dict = response.json()
64
- except Exception as e:
65
- logger.critical(
66
- f"JSON Error raised: {e}\nResponse status code = {response.status_code}"
67
- )
68
- raise together.JSONError(e, http_status=response.status_code)
44
+ return dict(info_dict)
69
45
 
70
- return dict(response_dict)
46
+ @classmethod
47
+ def instances(self) -> Dict[str, bool]:
48
+ response = create_get_request(together.api_base_instances)
49
+ if not response:
50
+ return {}
51
+ return response_to_dict(response)
71
52
 
72
53
  @classmethod
73
54
  def start(self, model: str) -> Dict[str, str]:
74
55
  model_url = urllib.parse.urljoin(
75
56
  together.api_base_instances, f"start?model={model}"
76
57
  )
77
- headers = {
78
- "Authorization": f"Bearer {together.api_key}",
79
- "accept": "application/json",
80
- }
81
- try:
82
- response = requests.post(
83
- model_url,
84
- headers=headers,
85
- )
86
- response.raise_for_status()
87
- except requests.exceptions.RequestException as e:
88
- logger.critical(f"Response error raised: {e}")
89
- raise together.ResponseError(e)
90
-
91
- try:
92
- response_dict = response.json()
93
- except Exception as e:
94
- logger.critical(
95
- f"JSON Error raised: {e}\nResponse status code = {response.status_code}"
96
- )
97
- raise together.JSONError(e, http_status=response.status_code)
98
-
99
- return dict(response_dict)
58
+ response = create_post_request(model_url)
59
+ if not response:
60
+ return {}
61
+ return response_to_dict(response)
100
62
 
101
63
  @classmethod
102
64
  def stop(self, model: str) -> Dict[str, str]:
103
65
  model_url = urllib.parse.urljoin(
104
66
  together.api_base_instances, f"stop?model={model}"
105
67
  )
106
- headers = {
107
- "Authorization": f"Bearer {together.api_key}",
108
- "accept": "application/json",
109
- }
110
- try:
111
- response = requests.post(
112
- model_url,
113
- headers=headers,
114
- )
115
- response.raise_for_status()
116
- except requests.exceptions.RequestException as e:
117
- logger.critical(f"Response error raised: {e}")
118
- raise together.ResponseError(e)
68
+ response = create_post_request(model_url)
69
+ if not response:
70
+ return {}
71
+ return response_to_dict(response)
119
72
 
73
+ @classmethod
74
+ def ready(self, model: str) -> List[Any]:
75
+ ready_url = urllib.parse.urljoin(together.api_base, "models/info?name=" + model)
76
+ response = create_get_request(ready_url)
77
+ if not response:
78
+ return []
120
79
  try:
121
- response_dict = response.json()
80
+ response_list = list(response.json())
122
81
  except Exception as e:
123
82
  logger.critical(
124
83
  f"JSON Error raised: {e}\nResponse status code = {response.status_code}"
125
84
  )
126
85
  raise together.JSONError(e, http_status=response.status_code)
127
86
 
128
- return dict(response_dict)
87
+ return response_list
129
88
 
130
89
  @classmethod
131
- def ready(self, model: str) -> List[Any]:
132
- ready_url = urllib.parse.urljoin(together.api_base, "models/info?name=" + model)
133
- headers = {
134
- "Authorization": f"Bearer {together.api_key}",
135
- "accept": "application/json",
136
- }
137
- try:
138
- response = requests.get(
139
- ready_url,
140
- headers=headers,
141
- )
142
- response.raise_for_status()
143
- except requests.exceptions.RequestException as e:
144
- logger.critical(f"Response error raised: {e}")
145
- raise together.ResponseError(e)
90
+ def _is_finetune_model(self, model: str) -> bool:
91
+ """
92
+ Return boolean value of whether or not model is supported by the finetuning API
93
+ """
94
+ return bool(self.info(model=model).get("finetuning_supported"))
146
95
 
147
- try:
148
- response_list = response.json()
149
- except Exception as e:
150
- logger.critical(
151
- f"JSON Error raised: {e}\nResponse status code = {response.status_code}"
152
- )
153
- raise together.JSONError(e, http_status=response.status_code)
96
+ @classmethod
97
+ def _param_count(self, model: str) -> int:
98
+ """
99
+ Returns model's parameter count. Returns 0 if not found.
100
+ """
101
+
102
+ param_count = self.info(model=model).get("num_parameters")
103
+
104
+ if not param_count:
105
+ param_count = 0
154
106
 
155
- return list(response_list)
107
+ return param_count