together 1.2.12__tar.gz → 1.2.13__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. {together-1.2.12 → together-1.2.13}/PKG-INFO +2 -1
  2. {together-1.2.12 → together-1.2.13}/pyproject.toml +3 -2
  3. {together-1.2.12 → together-1.2.13}/src/together/cli/api/finetune.py +52 -24
  4. {together-1.2.12 → together-1.2.13}/src/together/constants.py +1 -1
  5. {together-1.2.12 → together-1.2.13}/src/together/resources/chat/completions.py +2 -2
  6. {together-1.2.12 → together-1.2.13}/src/together/resources/finetune.py +14 -8
  7. {together-1.2.12 → together-1.2.13}/src/together/types/chat_completions.py +16 -1
  8. {together-1.2.12 → together-1.2.13}/LICENSE +0 -0
  9. {together-1.2.12 → together-1.2.13}/README.md +0 -0
  10. {together-1.2.12 → together-1.2.13}/src/together/__init__.py +0 -0
  11. {together-1.2.12 → together-1.2.13}/src/together/abstract/__init__.py +0 -0
  12. {together-1.2.12 → together-1.2.13}/src/together/abstract/api_requestor.py +0 -0
  13. {together-1.2.12 → together-1.2.13}/src/together/cli/__init__.py +0 -0
  14. {together-1.2.12 → together-1.2.13}/src/together/cli/api/__init__.py +0 -0
  15. {together-1.2.12 → together-1.2.13}/src/together/cli/api/chat.py +0 -0
  16. {together-1.2.12 → together-1.2.13}/src/together/cli/api/completions.py +0 -0
  17. {together-1.2.12 → together-1.2.13}/src/together/cli/api/files.py +0 -0
  18. {together-1.2.12 → together-1.2.13}/src/together/cli/api/images.py +0 -0
  19. {together-1.2.12 → together-1.2.13}/src/together/cli/api/models.py +0 -0
  20. {together-1.2.12 → together-1.2.13}/src/together/cli/cli.py +0 -0
  21. {together-1.2.12 → together-1.2.13}/src/together/client.py +0 -0
  22. {together-1.2.12 → together-1.2.13}/src/together/error.py +0 -0
  23. {together-1.2.12 → together-1.2.13}/src/together/filemanager.py +0 -0
  24. {together-1.2.12 → together-1.2.13}/src/together/legacy/__init__.py +0 -0
  25. {together-1.2.12 → together-1.2.13}/src/together/legacy/base.py +0 -0
  26. {together-1.2.12 → together-1.2.13}/src/together/legacy/complete.py +0 -0
  27. {together-1.2.12 → together-1.2.13}/src/together/legacy/embeddings.py +0 -0
  28. {together-1.2.12 → together-1.2.13}/src/together/legacy/files.py +0 -0
  29. {together-1.2.12 → together-1.2.13}/src/together/legacy/finetune.py +0 -0
  30. {together-1.2.12 → together-1.2.13}/src/together/legacy/images.py +0 -0
  31. {together-1.2.12 → together-1.2.13}/src/together/legacy/models.py +0 -0
  32. {together-1.2.12 → together-1.2.13}/src/together/resources/__init__.py +0 -0
  33. {together-1.2.12 → together-1.2.13}/src/together/resources/chat/__init__.py +0 -0
  34. {together-1.2.12 → together-1.2.13}/src/together/resources/completions.py +0 -0
  35. {together-1.2.12 → together-1.2.13}/src/together/resources/embeddings.py +0 -0
  36. {together-1.2.12 → together-1.2.13}/src/together/resources/files.py +0 -0
  37. {together-1.2.12 → together-1.2.13}/src/together/resources/images.py +0 -0
  38. {together-1.2.12 → together-1.2.13}/src/together/resources/models.py +0 -0
  39. {together-1.2.12 → together-1.2.13}/src/together/resources/rerank.py +0 -0
  40. {together-1.2.12 → together-1.2.13}/src/together/together_response.py +0 -0
  41. {together-1.2.12 → together-1.2.13}/src/together/types/__init__.py +0 -0
  42. {together-1.2.12 → together-1.2.13}/src/together/types/abstract.py +0 -0
  43. {together-1.2.12 → together-1.2.13}/src/together/types/common.py +0 -0
  44. {together-1.2.12 → together-1.2.13}/src/together/types/completions.py +0 -0
  45. {together-1.2.12 → together-1.2.13}/src/together/types/embeddings.py +0 -0
  46. {together-1.2.12 → together-1.2.13}/src/together/types/error.py +0 -0
  47. {together-1.2.12 → together-1.2.13}/src/together/types/files.py +0 -0
  48. {together-1.2.12 → together-1.2.13}/src/together/types/finetune.py +0 -0
  49. {together-1.2.12 → together-1.2.13}/src/together/types/images.py +0 -0
  50. {together-1.2.12 → together-1.2.13}/src/together/types/models.py +0 -0
  51. {together-1.2.12 → together-1.2.13}/src/together/types/rerank.py +0 -0
  52. {together-1.2.12 → together-1.2.13}/src/together/utils/__init__.py +0 -0
  53. {together-1.2.12 → together-1.2.13}/src/together/utils/_log.py +0 -0
  54. {together-1.2.12 → together-1.2.13}/src/together/utils/api_helpers.py +0 -0
  55. {together-1.2.12 → together-1.2.13}/src/together/utils/files.py +0 -0
  56. {together-1.2.12 → together-1.2.13}/src/together/utils/tools.py +0 -0
  57. {together-1.2.12 → together-1.2.13}/src/together/version.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: together
3
- Version: 1.2.12
3
+ Version: 1.2.13
4
4
  Summary: Python client for Together's Cloud Platform!
5
5
  Home-page: https://github.com/togethercomputer/together-python
6
6
  License: Apache-2.0
@@ -25,6 +25,7 @@ Requires-Dist: pillow (>=10.3.0,<11.0.0)
25
25
  Requires-Dist: pyarrow (>=10.0.1)
26
26
  Requires-Dist: pydantic (>=2.6.3,<3.0.0)
27
27
  Requires-Dist: requests (>=2.31.0,<3.0.0)
28
+ Requires-Dist: rich (>=13.8.1,<14.0.0)
28
29
  Requires-Dist: tabulate (>=0.9.0,<0.10.0)
29
30
  Requires-Dist: tqdm (>=4.66.2,<5.0.0)
30
31
  Requires-Dist: typer (>=0.9,<0.13)
@@ -12,7 +12,7 @@ build-backend = "poetry.masonry.api"
12
12
 
13
13
  [tool.poetry]
14
14
  name = "together"
15
- version = "1.2.12"
15
+ version = "1.2.13"
16
16
  authors = [
17
17
  "Together AI <support@together.ai>"
18
18
  ]
@@ -31,6 +31,7 @@ homepage = "https://github.com/togethercomputer/together-python"
31
31
  python = "^3.8"
32
32
  typer = ">=0.9,<0.13"
33
33
  requests = "^2.31.0"
34
+ rich = "^13.8.1"
34
35
  tqdm = "^4.66.2"
35
36
  tabulate = "^0.9.0"
36
37
  pydantic = "^2.6.3"
@@ -70,7 +71,7 @@ tox = "^4.14.1"
70
71
  optional = true
71
72
 
72
73
  [tool.poetry.group.examples.dependencies]
73
- datasets = "^2.18.0"
74
+ datasets = ">=2.18,<4.0"
74
75
  transformers = "^4.39.3"
75
76
 
76
77
 
@@ -1,15 +1,27 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import json
4
+ from datetime import datetime
4
5
  from textwrap import wrap
5
6
 
6
7
  import click
7
8
  from click.core import ParameterSource # type: ignore[attr-defined]
9
+ from rich import print as rprint
8
10
  from tabulate import tabulate
9
11
 
10
12
  from together import Together
11
- from together.utils import finetune_price_to_dollars, log_warn, parse_timestamp
12
13
  from together.types.finetune import DownloadCheckpointType
14
+ from together.utils import finetune_price_to_dollars, log_warn, parse_timestamp
15
+
16
+
17
+ _CONFIRMATION_MESSAGE = (
18
+ "You are about to create a fine-tuning job. "
19
+ "The cost of your job will be determined by the model size, the number of tokens "
20
+ "in the training file, the number of tokens in the validation file, the number of epochs, and "
21
+ "the number of evaluations. Visit https://www.together.ai/pricing to get a price estimate.\n"
22
+ "You can pass `-y` or `--confirm` to your command to skip this message.\n\n"
23
+ "Do you want to proceed?"
24
+ )
13
25
 
14
26
 
15
27
  class DownloadCheckpointTypeChoice(click.Choice):
@@ -65,6 +77,14 @@ def fine_tuning(ctx: click.Context) -> None:
65
77
  "--suffix", type=str, default=None, help="Suffix for the fine-tuned model name"
66
78
  )
67
79
  @click.option("--wandb-api-key", type=str, default=None, help="Wandb API key")
80
+ @click.option(
81
+ "--confirm",
82
+ "-y",
83
+ type=bool,
84
+ is_flag=True,
85
+ default=False,
86
+ help="Whether to skip the launch confirmation message",
87
+ )
68
88
  def create(
69
89
  ctx: click.Context,
70
90
  training_file: str,
@@ -82,6 +102,7 @@ def create(
82
102
  lora_trainable_modules: str,
83
103
  suffix: str,
84
104
  wandb_api_key: str,
105
+ confirm: bool,
85
106
  ) -> None:
86
107
  """Start fine-tuning"""
87
108
  client: Together = ctx.obj
@@ -109,30 +130,37 @@ def create(
109
130
  "You have specified a number of evaluation loops but no validation file."
110
131
  )
111
132
 
112
- response = client.fine_tuning.create(
113
- training_file=training_file,
114
- model=model,
115
- n_epochs=n_epochs,
116
- validation_file=validation_file,
117
- n_evals=n_evals,
118
- n_checkpoints=n_checkpoints,
119
- batch_size=batch_size,
120
- learning_rate=learning_rate,
121
- lora=lora,
122
- lora_r=lora_r,
123
- lora_dropout=lora_dropout,
124
- lora_alpha=lora_alpha,
125
- lora_trainable_modules=lora_trainable_modules,
126
- suffix=suffix,
127
- wandb_api_key=wandb_api_key,
128
- )
129
-
130
- click.echo(json.dumps(response.model_dump(exclude_none=True), indent=4))
133
+ if confirm or click.confirm(_CONFIRMATION_MESSAGE, default=True, show_default=True):
134
+ response = client.fine_tuning.create(
135
+ training_file=training_file,
136
+ model=model,
137
+ n_epochs=n_epochs,
138
+ validation_file=validation_file,
139
+ n_evals=n_evals,
140
+ n_checkpoints=n_checkpoints,
141
+ batch_size=batch_size,
142
+ learning_rate=learning_rate,
143
+ lora=lora,
144
+ lora_r=lora_r,
145
+ lora_dropout=lora_dropout,
146
+ lora_alpha=lora_alpha,
147
+ lora_trainable_modules=lora_trainable_modules,
148
+ suffix=suffix,
149
+ wandb_api_key=wandb_api_key,
150
+ verbose=True,
151
+ )
131
152
 
132
- # TODO: Remove it after the 21st of August
133
- log_warn(
134
- "The default value of batch size has been changed from 32 to 16 since together version >= 1.2.6"
135
- )
153
+ report_string = f"Successfully submitted a fine-tuning job {response.id}"
154
+ if response.created_at is not None:
155
+ created_time = datetime.strptime(
156
+ response.created_at, "%Y-%m-%dT%H:%M:%S.%f%z"
157
+ )
158
+ # created_at reports UTC time, we use .astimezone() to convert to local time
159
+ formatted_time = created_time.astimezone().strftime("%m/%d/%Y, %H:%M:%S")
160
+ report_string += f" at {formatted_time}"
161
+ rprint(report_string)
162
+ else:
163
+ click.echo("No confirmation received, stopping job launch")
136
164
 
137
165
 
138
166
  @fine_tuning.command()
@@ -19,7 +19,7 @@ Please set it as an environment variable or set it as together.api_key
19
19
  Find your TOGETHER_API_KEY at https://api.together.xyz/settings/api-keys"""
20
20
 
21
21
  # Minimum number of samples required for fine-tuning file
22
- MIN_SAMPLES = 100
22
+ MIN_SAMPLES = 1
23
23
 
24
24
  # the number of bytes in a gigabyte, used to convert bytes to GB for readable comparison
25
25
  NUM_BYTES_IN_GB = 2**30
@@ -20,7 +20,7 @@ class ChatCompletions:
20
20
  def create(
21
21
  self,
22
22
  *,
23
- messages: List[Dict[str, str]],
23
+ messages: List[Dict[str, Any]],
24
24
  model: str,
25
25
  max_tokens: int | None = None,
26
26
  stop: List[str] | None = None,
@@ -39,7 +39,7 @@ class ChatCompletions:
39
39
  n: int | None = None,
40
40
  safety_model: str | None = None,
41
41
  response_format: Dict[str, str | Dict[str, Any]] | None = None,
42
- tools: Dict[str, str | Dict[str, Any]] | None = None,
42
+ tools: List[Dict[str, Any]] | None = None,
43
43
  tool_choice: str | Dict[str, str | Dict[str, str]] | None = None,
44
44
  **kwargs: Any,
45
45
  ) -> ChatCompletionResponse | Iterator[ChatCompletionChunk]:
@@ -2,6 +2,8 @@ from __future__ import annotations
2
2
 
3
3
  from pathlib import Path
4
4
 
5
+ from rich import print as rprint
6
+
5
7
  from together.abstract import api_requestor
6
8
  from together.filemanager import DownloadManager
7
9
  from together.together_response import TogetherResponse
@@ -43,6 +45,7 @@ class FineTuning:
43
45
  lora_trainable_modules: str | None = "all-linear",
44
46
  suffix: str | None = None,
45
47
  wandb_api_key: str | None = None,
48
+ verbose: bool = False,
46
49
  ) -> FinetuneResponse:
47
50
  """
48
51
  Method to initiate a fine-tuning job
@@ -67,6 +70,8 @@ class FineTuning:
67
70
  Defaults to None.
68
71
  wandb_api_key (str, optional): API key for Weights & Biases integration.
69
72
  Defaults to None.
73
+ verbose (bool, optional): whether to print the job parameters before submitting a request.
74
+ Defaults to False.
70
75
 
71
76
  Returns:
72
77
  FinetuneResponse: Object containing information about fine-tuning job.
@@ -85,7 +90,7 @@ class FineTuning:
85
90
  lora_trainable_modules=lora_trainable_modules,
86
91
  )
87
92
 
88
- parameter_payload = FinetuneRequest(
93
+ finetune_request = FinetuneRequest(
89
94
  model=model,
90
95
  training_file=training_file,
91
96
  validation_file=validation_file,
@@ -97,7 +102,13 @@ class FineTuning:
97
102
  training_type=training_type,
98
103
  suffix=suffix,
99
104
  wandb_key=wandb_api_key,
100
- ).model_dump(exclude_none=True)
105
+ )
106
+ if verbose:
107
+ rprint(
108
+ "Submitting a fine-tuning job with the following parameters:",
109
+ finetune_request,
110
+ )
111
+ parameter_payload = finetune_request.model_dump(exclude_none=True)
101
112
 
102
113
  response, _, _ = requestor.request(
103
114
  options=TogetherRequest(
@@ -110,11 +121,6 @@ class FineTuning:
110
121
 
111
122
  assert isinstance(response, TogetherResponse)
112
123
 
113
- # TODO: Remove it after the 21st of August
114
- log_warn(
115
- "The default value of batch size has been changed from 32 to 16 since together version >= 1.2.6"
116
- )
117
-
118
124
  # TODO: Remove after next LoRA default change
119
125
  log_warn(
120
126
  "Some of the jobs run _directly_ from the together-python library might be trained using LoRA adapters. "
@@ -266,7 +272,7 @@ class FineTuning:
266
272
  raise ValueError(
267
273
  "Only DEFAULT checkpoint type is allowed for FullTrainingType"
268
274
  )
269
- url += f"&checkpoint=modelOutputPath"
275
+ url += "&checkpoint=modelOutputPath"
270
276
  elif isinstance(ft_job.training_type, LoRATrainingType):
271
277
  if checkpoint_type == DownloadCheckpointType.DEFAULT:
272
278
  checkpoint_type = DownloadCheckpointType.MERGED
@@ -40,9 +40,24 @@ class ToolCalls(BaseModel):
40
40
  function: FunctionCall | None = None
41
41
 
42
42
 
43
+ class ChatCompletionMessageContentType(str, Enum):
44
+ TEXT = "text"
45
+ IMAGE_URL = "image_url"
46
+
47
+
48
+ class ChatCompletionMessageContentImageURL(BaseModel):
49
+ url: str
50
+
51
+
52
+ class ChatCompletionMessageContent(BaseModel):
53
+ type: ChatCompletionMessageContentType
54
+ text: str | None = None
55
+ image_url: ChatCompletionMessageContentImageURL | None = None
56
+
57
+
43
58
  class ChatCompletionMessage(BaseModel):
44
59
  role: MessageRole
45
- content: str | None = None
60
+ content: str | List[ChatCompletionMessageContent] | None = None
46
61
  tool_calls: List[ToolCalls] | None = None
47
62
 
48
63
 
File without changes
File without changes