together 1.5.6__tar.gz → 1.5.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. {together-1.5.6 → together-1.5.7}/PKG-INFO +1 -1
  2. {together-1.5.6 → together-1.5.7}/pyproject.toml +1 -1
  3. {together-1.5.6 → together-1.5.7}/src/together/cli/api/finetune.py +11 -3
  4. {together-1.5.6 → together-1.5.7}/src/together/resources/finetune.py +18 -6
  5. {together-1.5.6 → together-1.5.7}/src/together/types/finetune.py +9 -1
  6. {together-1.5.6 → together-1.5.7}/LICENSE +0 -0
  7. {together-1.5.6 → together-1.5.7}/README.md +0 -0
  8. {together-1.5.6 → together-1.5.7}/src/together/__init__.py +0 -0
  9. {together-1.5.6 → together-1.5.7}/src/together/abstract/__init__.py +0 -0
  10. {together-1.5.6 → together-1.5.7}/src/together/abstract/api_requestor.py +0 -0
  11. {together-1.5.6 → together-1.5.7}/src/together/cli/__init__.py +0 -0
  12. {together-1.5.6 → together-1.5.7}/src/together/cli/api/__init__.py +0 -0
  13. {together-1.5.6 → together-1.5.7}/src/together/cli/api/chat.py +0 -0
  14. {together-1.5.6 → together-1.5.7}/src/together/cli/api/completions.py +0 -0
  15. {together-1.5.6 → together-1.5.7}/src/together/cli/api/endpoints.py +0 -0
  16. {together-1.5.6 → together-1.5.7}/src/together/cli/api/files.py +0 -0
  17. {together-1.5.6 → together-1.5.7}/src/together/cli/api/images.py +0 -0
  18. {together-1.5.6 → together-1.5.7}/src/together/cli/api/models.py +0 -0
  19. {together-1.5.6 → together-1.5.7}/src/together/cli/api/utils.py +0 -0
  20. {together-1.5.6 → together-1.5.7}/src/together/cli/cli.py +0 -0
  21. {together-1.5.6 → together-1.5.7}/src/together/client.py +0 -0
  22. {together-1.5.6 → together-1.5.7}/src/together/constants.py +0 -0
  23. {together-1.5.6 → together-1.5.7}/src/together/error.py +0 -0
  24. {together-1.5.6 → together-1.5.7}/src/together/filemanager.py +0 -0
  25. {together-1.5.6 → together-1.5.7}/src/together/legacy/__init__.py +0 -0
  26. {together-1.5.6 → together-1.5.7}/src/together/legacy/base.py +0 -0
  27. {together-1.5.6 → together-1.5.7}/src/together/legacy/complete.py +0 -0
  28. {together-1.5.6 → together-1.5.7}/src/together/legacy/embeddings.py +0 -0
  29. {together-1.5.6 → together-1.5.7}/src/together/legacy/files.py +0 -0
  30. {together-1.5.6 → together-1.5.7}/src/together/legacy/finetune.py +0 -0
  31. {together-1.5.6 → together-1.5.7}/src/together/legacy/images.py +0 -0
  32. {together-1.5.6 → together-1.5.7}/src/together/legacy/models.py +0 -0
  33. {together-1.5.6 → together-1.5.7}/src/together/resources/__init__.py +0 -0
  34. {together-1.5.6 → together-1.5.7}/src/together/resources/audio/__init__.py +0 -0
  35. {together-1.5.6 → together-1.5.7}/src/together/resources/audio/speech.py +0 -0
  36. {together-1.5.6 → together-1.5.7}/src/together/resources/chat/__init__.py +0 -0
  37. {together-1.5.6 → together-1.5.7}/src/together/resources/chat/completions.py +0 -0
  38. {together-1.5.6 → together-1.5.7}/src/together/resources/code_interpreter.py +0 -0
  39. {together-1.5.6 → together-1.5.7}/src/together/resources/completions.py +0 -0
  40. {together-1.5.6 → together-1.5.7}/src/together/resources/embeddings.py +0 -0
  41. {together-1.5.6 → together-1.5.7}/src/together/resources/endpoints.py +0 -0
  42. {together-1.5.6 → together-1.5.7}/src/together/resources/files.py +0 -0
  43. {together-1.5.6 → together-1.5.7}/src/together/resources/images.py +0 -0
  44. {together-1.5.6 → together-1.5.7}/src/together/resources/models.py +0 -0
  45. {together-1.5.6 → together-1.5.7}/src/together/resources/rerank.py +0 -0
  46. {together-1.5.6 → together-1.5.7}/src/together/together_response.py +0 -0
  47. {together-1.5.6 → together-1.5.7}/src/together/types/__init__.py +0 -0
  48. {together-1.5.6 → together-1.5.7}/src/together/types/abstract.py +0 -0
  49. {together-1.5.6 → together-1.5.7}/src/together/types/audio_speech.py +0 -0
  50. {together-1.5.6 → together-1.5.7}/src/together/types/chat_completions.py +0 -0
  51. {together-1.5.6 → together-1.5.7}/src/together/types/code_interpreter.py +0 -0
  52. {together-1.5.6 → together-1.5.7}/src/together/types/common.py +0 -0
  53. {together-1.5.6 → together-1.5.7}/src/together/types/completions.py +0 -0
  54. {together-1.5.6 → together-1.5.7}/src/together/types/embeddings.py +0 -0
  55. {together-1.5.6 → together-1.5.7}/src/together/types/endpoints.py +0 -0
  56. {together-1.5.6 → together-1.5.7}/src/together/types/error.py +0 -0
  57. {together-1.5.6 → together-1.5.7}/src/together/types/files.py +0 -0
  58. {together-1.5.6 → together-1.5.7}/src/together/types/images.py +0 -0
  59. {together-1.5.6 → together-1.5.7}/src/together/types/models.py +0 -0
  60. {together-1.5.6 → together-1.5.7}/src/together/types/rerank.py +0 -0
  61. {together-1.5.6 → together-1.5.7}/src/together/utils/__init__.py +0 -0
  62. {together-1.5.6 → together-1.5.7}/src/together/utils/_log.py +0 -0
  63. {together-1.5.6 → together-1.5.7}/src/together/utils/api_helpers.py +0 -0
  64. {together-1.5.6 → together-1.5.7}/src/together/utils/files.py +0 -0
  65. {together-1.5.6 → together-1.5.7}/src/together/utils/tools.py +0 -0
  66. {together-1.5.6 → together-1.5.7}/src/together/version.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: together
3
- Version: 1.5.6
3
+ Version: 1.5.7
4
4
  Summary: Python client for Together's Cloud Platform!
5
5
  License: Apache-2.0
6
6
  Author: Together AI
@@ -12,7 +12,7 @@ build-backend = "poetry.masonry.api"
12
12
 
13
13
  [tool.poetry]
14
14
  name = "together"
15
- version = "1.5.6"
15
+ version = "1.5.7"
16
16
  authors = ["Together AI <support@together.ai>"]
17
17
  description = "Python client for Together's Cloud Platform!"
18
18
  readme = "README.md"
@@ -258,10 +258,13 @@ def create(
258
258
  raise click.BadParameter(
259
259
  f"LoRA fine-tuning is not supported for the model `{model}`"
260
260
  )
261
-
261
+ if training_method == "dpo":
262
+ default_batch_size = model_limits.lora_training.max_batch_size_dpo
263
+ else:
264
+ default_batch_size = model_limits.lora_training.max_batch_size
262
265
  default_values = {
263
266
  "lora_r": model_limits.lora_training.max_rank,
264
- "batch_size": model_limits.lora_training.max_batch_size,
267
+ "batch_size": default_batch_size,
265
268
  "learning_rate": 1e-3,
266
269
  }
267
270
 
@@ -288,7 +291,12 @@ def create(
288
291
 
289
292
  batch_size_source = ctx.get_parameter_source("batch_size") # type: ignore[attr-defined]
290
293
  if batch_size_source == ParameterSource.DEFAULT:
291
- training_args["batch_size"] = model_limits.full_training.max_batch_size
294
+ if training_method == "dpo":
295
+ training_args["batch_size"] = (
296
+ model_limits.full_training.max_batch_size_dpo
297
+ )
298
+ else:
299
+ training_args["batch_size"] = model_limits.full_training.max_batch_size
292
300
 
293
301
  if n_evals <= 0 and validation_file:
294
302
  log_warn(
@@ -102,6 +102,7 @@ def create_finetune_request(
102
102
 
103
103
  training_type: TrainingType = FullTrainingType()
104
104
  max_batch_size: int = 0
105
+ max_batch_size_dpo: int = 0
105
106
  min_batch_size: int = 0
106
107
  if lora:
107
108
  if model_limits.lora_training is None:
@@ -119,7 +120,7 @@ def create_finetune_request(
119
120
 
120
121
  max_batch_size = model_limits.lora_training.max_batch_size
121
122
  min_batch_size = model_limits.lora_training.min_batch_size
122
-
123
+ max_batch_size_dpo = model_limits.lora_training.max_batch_size_dpo
123
124
  else:
124
125
  if model_limits.full_training is None:
125
126
  raise ValueError(
@@ -128,13 +129,24 @@ def create_finetune_request(
128
129
 
129
130
  max_batch_size = model_limits.full_training.max_batch_size
130
131
  min_batch_size = model_limits.full_training.min_batch_size
132
+ max_batch_size_dpo = model_limits.full_training.max_batch_size_dpo
131
133
 
132
- batch_size = batch_size if batch_size != "max" else max_batch_size
134
+ if batch_size == "max":
135
+ if training_method == "dpo":
136
+ batch_size = max_batch_size_dpo
137
+ else:
138
+ batch_size = max_batch_size
133
139
 
134
- if batch_size > max_batch_size:
135
- raise ValueError(
136
- f"Requested batch size of {batch_size} is higher that the maximum allowed value of {max_batch_size}."
137
- )
140
+ if training_method == "sft":
141
+ if batch_size > max_batch_size:
142
+ raise ValueError(
143
+ f"Requested batch size of {batch_size} is higher that the maximum allowed value of {max_batch_size}."
144
+ )
145
+ elif training_method == "dpo":
146
+ if batch_size > max_batch_size_dpo:
147
+ raise ValueError(
148
+ f"Requested batch size of {batch_size} is higher that the maximum allowed value of {max_batch_size_dpo}."
149
+ )
138
150
 
139
151
  if batch_size < min_batch_size:
140
152
  raise ValueError(
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from enum import Enum
4
- from typing import List, Literal
4
+ from typing import List, Literal, Any
5
5
 
6
6
  from pydantic import StrictBool, Field, field_validator
7
7
 
@@ -329,8 +329,16 @@ class FinetuneDownloadResult(BaseModel):
329
329
 
330
330
  class FinetuneFullTrainingLimits(BaseModel):
331
331
  max_batch_size: int
332
+ max_batch_size_dpo: int = -1
332
333
  min_batch_size: int
333
334
 
335
+ def __init__(self, **data: Any) -> None:
336
+ super().__init__(**data)
337
+ if self.max_batch_size_dpo == -1:
338
+ half_max = self.max_batch_size // 2
339
+ rounded_half_max = (half_max // 8) * 8
340
+ self.max_batch_size_dpo = max(self.min_batch_size, rounded_half_max)
341
+
334
342
 
335
343
  class FinetuneLoraTrainingLimits(FinetuneFullTrainingLimits):
336
344
  max_rank: int
File without changes
File without changes
File without changes