sutro 0.1.13__tar.gz → 0.1.14__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sutro might be problematic. Click here for more details.
- sutro-0.1.14/PKG-INFO +23 -0
- sutro-0.1.14/README.md +3 -0
- {sutro-0.1.13 → sutro-0.1.14}/pyproject.toml +1 -1
- {sutro-0.1.13 → sutro-0.1.14}/sutro/cli.py +29 -29
- {sutro-0.1.13 → sutro-0.1.14}/sutro/sdk.py +47 -47
- sutro-0.1.13/PKG-INFO +0 -41
- sutro-0.1.13/README.md +0 -21
- {sutro-0.1.13 → sutro-0.1.14}/.gitignore +0 -0
- {sutro-0.1.13 → sutro-0.1.14}/LICENSE +0 -0
- {sutro-0.1.13 → sutro-0.1.14}/sutro/__init__.py +0 -0
sutro-0.1.14/PKG-INFO
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: sutro
|
|
3
|
+
Version: 0.1.14
|
|
4
|
+
Summary: Sutro Python SDK
|
|
5
|
+
Project-URL: Homepage, https://sutro.sh
|
|
6
|
+
Project-URL: Documentation, https://docs.sutro.sh
|
|
7
|
+
License-Expression: Apache-2.0
|
|
8
|
+
License-File: LICENSE
|
|
9
|
+
Requires-Python: >=3.10
|
|
10
|
+
Requires-Dist: click==8.1.7
|
|
11
|
+
Requires-Dist: colorama==0.4.4
|
|
12
|
+
Requires-Dist: numpy==2.1.1
|
|
13
|
+
Requires-Dist: pandas==2.2.3
|
|
14
|
+
Requires-Dist: polars==1.8.2
|
|
15
|
+
Requires-Dist: pydantic==2.11.4
|
|
16
|
+
Requires-Dist: requests==2.32.3
|
|
17
|
+
Requires-Dist: tqdm==4.67.1
|
|
18
|
+
Requires-Dist: yaspin==3.1.0
|
|
19
|
+
Description-Content-Type: text/markdown
|
|
20
|
+
|
|
21
|
+
# sutro-client
|
|
22
|
+
|
|
23
|
+
The official Python client for Sutro. See [docs.sutro.sh](https://docs.sutro.sh/) for more information.
|
sutro-0.1.14/README.md
ADDED
|
@@ -275,34 +275,34 @@ def cancel(job_id):
|
|
|
275
275
|
|
|
276
276
|
|
|
277
277
|
@cli.group()
|
|
278
|
-
def
|
|
279
|
-
"""Manage
|
|
278
|
+
def datasets():
|
|
279
|
+
"""Manage datasets."""
|
|
280
280
|
pass
|
|
281
281
|
|
|
282
282
|
|
|
283
|
-
@
|
|
283
|
+
@datasets.command()
|
|
284
284
|
def create():
|
|
285
|
-
"""Create a new
|
|
285
|
+
"""Create a new dataset."""
|
|
286
286
|
sdk = get_sdk()
|
|
287
|
-
|
|
288
|
-
if not
|
|
287
|
+
dataset_id = sdk.create_dataset()
|
|
288
|
+
if not dataset_id:
|
|
289
289
|
return
|
|
290
290
|
click.echo(
|
|
291
291
|
Fore.GREEN
|
|
292
|
-
+ f"
|
|
292
|
+
+ f"Dataset created successfully. Dataset ID: {dataset_id}"
|
|
293
293
|
+ Style.RESET_ALL
|
|
294
294
|
)
|
|
295
295
|
|
|
296
296
|
|
|
297
|
-
@
|
|
297
|
+
@datasets.command()
|
|
298
298
|
def list():
|
|
299
|
-
"""List all
|
|
299
|
+
"""List all datasets."""
|
|
300
300
|
sdk = get_sdk()
|
|
301
|
-
|
|
302
|
-
if
|
|
303
|
-
click.echo(Fore.YELLOW + "No
|
|
301
|
+
datasets = sdk.list_datasets()
|
|
302
|
+
if datasets is None or len(datasets) == 0:
|
|
303
|
+
click.echo(Fore.YELLOW + "No datasets found." + Style.RESET_ALL)
|
|
304
304
|
return
|
|
305
|
-
df = pl.DataFrame(
|
|
305
|
+
df = pl.DataFrame(datasets)
|
|
306
306
|
|
|
307
307
|
df = df.with_columns(
|
|
308
308
|
pl.col("schema")
|
|
@@ -319,37 +319,37 @@ def list():
|
|
|
319
319
|
print(df.select(pl.all()))
|
|
320
320
|
|
|
321
321
|
|
|
322
|
-
@
|
|
323
|
-
@click.argument("
|
|
324
|
-
def files(
|
|
325
|
-
"""List all files in a
|
|
322
|
+
@datasets.command()
|
|
323
|
+
@click.argument("dataset_id")
|
|
324
|
+
def files(dataset_id):
|
|
325
|
+
"""List all files in a dataset."""
|
|
326
326
|
sdk = get_sdk()
|
|
327
|
-
files = sdk.
|
|
327
|
+
files = sdk.list_dataset_files(dataset_id)
|
|
328
328
|
if not files:
|
|
329
329
|
return
|
|
330
330
|
|
|
331
|
-
print(Fore.YELLOW + "Files in
|
|
331
|
+
print(Fore.YELLOW + "Files in dataset " + dataset_id + ":" + Style.RESET_ALL)
|
|
332
332
|
for file in files:
|
|
333
333
|
print(f"\t{file}")
|
|
334
334
|
|
|
335
335
|
|
|
336
|
-
@
|
|
337
|
-
@click.argument("
|
|
336
|
+
@datasets.command()
|
|
337
|
+
@click.argument("dataset_id", required=False)
|
|
338
338
|
@click.argument("file_path")
|
|
339
|
-
def upload(file_path,
|
|
340
|
-
"""Upload files to a
|
|
339
|
+
def upload(file_path, dataset_id):
|
|
340
|
+
"""Upload files to a dataset. You can provide a single file path or a directory path to upload all files in the directory."""
|
|
341
341
|
sdk = get_sdk()
|
|
342
|
-
sdk.
|
|
342
|
+
sdk.upload_to_dataset(file_path, dataset_id)
|
|
343
343
|
|
|
344
344
|
|
|
345
|
-
@
|
|
346
|
-
@click.argument("
|
|
345
|
+
@datasets.command()
|
|
346
|
+
@click.argument("dataset_id")
|
|
347
347
|
@click.argument("file_name", required=False)
|
|
348
348
|
@click.argument("output_path", required=False)
|
|
349
|
-
def download(
|
|
350
|
-
"""Download a file/files from a
|
|
349
|
+
def download(dataset_id, file_name=None, output_path=None):
|
|
350
|
+
"""Download a file/files from a dataset. If no files are provided, all files in the dataset will be downloaded. If no output path is provided, the file will be saved to the current working directory."""
|
|
351
351
|
sdk = get_sdk()
|
|
352
|
-
files = sdk.
|
|
352
|
+
files = sdk.download_from_dataset(dataset_id, [file_name], output_path)
|
|
353
353
|
if not files:
|
|
354
354
|
return
|
|
355
355
|
for file in files:
|
|
@@ -114,7 +114,7 @@ class Sutro:
|
|
|
114
114
|
raise ValueError("Column name must be specified for DataFrame input")
|
|
115
115
|
input_data = data[column].to_list()
|
|
116
116
|
elif isinstance(data, str):
|
|
117
|
-
if data.startswith("
|
|
117
|
+
if data.startswith("dataset-"):
|
|
118
118
|
input_data = data + ":" + column
|
|
119
119
|
else:
|
|
120
120
|
file_ext = os.path.splitext(data)[1].lower()
|
|
@@ -172,12 +172,12 @@ class Sutro:
|
|
|
172
172
|
Run inference on the provided data.
|
|
173
173
|
|
|
174
174
|
This method allows you to run inference on the provided data using the Sutro API.
|
|
175
|
-
It supports various data types such as lists, pandas DataFrames, polars DataFrames, file paths and
|
|
175
|
+
It supports various data types such as lists, pandas DataFrames, polars DataFrames, file paths and datasets.
|
|
176
176
|
|
|
177
177
|
Args:
|
|
178
178
|
data (Union[List, pd.DataFrame, pl.DataFrame, str]): The data to run inference on.
|
|
179
179
|
model (str, optional): The model to use for inference. Defaults to "llama-3.1-8b".
|
|
180
|
-
column (str, optional): The column name to use for inference. Required if data is a DataFrame, file path, or
|
|
180
|
+
column (str, optional): The column name to use for inference. Required if data is a DataFrame, file path, or dataset.
|
|
181
181
|
output_column (str, optional): The column name to store the inference results in if the input is a DataFrame. Defaults to "inference_result".
|
|
182
182
|
job_priority (int, optional): The priority of the job. Defaults to 0.
|
|
183
183
|
output_schema (Union[Dict[str, Any], BaseModel], optional): A structured schema for the output.
|
|
@@ -793,22 +793,22 @@ class Sutro:
|
|
|
793
793
|
return
|
|
794
794
|
return response.json()
|
|
795
795
|
|
|
796
|
-
def
|
|
796
|
+
def create_dataset(self):
|
|
797
797
|
"""
|
|
798
|
-
Create a new
|
|
798
|
+
Create a new dataset.
|
|
799
799
|
|
|
800
|
-
This method creates a new
|
|
800
|
+
This method creates a new empty dataset and returns its ID.
|
|
801
801
|
|
|
802
802
|
Returns:
|
|
803
|
-
str: The ID of the new
|
|
803
|
+
str: The ID of the new dataset.
|
|
804
804
|
"""
|
|
805
|
-
endpoint = f"{self.base_url}/create-
|
|
805
|
+
endpoint = f"{self.base_url}/create-dataset"
|
|
806
806
|
headers = {
|
|
807
807
|
"Authorization": f"Key {self.api_key}",
|
|
808
808
|
"Content-Type": "application/json",
|
|
809
809
|
}
|
|
810
810
|
with yaspin(
|
|
811
|
-
SPINNER, text=to_colored_text("Creating
|
|
811
|
+
SPINNER, text=to_colored_text("Creating dataset"), color=YASPIN_COLOR
|
|
812
812
|
) as spinner:
|
|
813
813
|
response = requests.get(endpoint, headers=headers)
|
|
814
814
|
if response.status_code != 200:
|
|
@@ -820,25 +820,25 @@ class Sutro:
|
|
|
820
820
|
spinner.stop()
|
|
821
821
|
print(to_colored_text(response.json(), state="fail"))
|
|
822
822
|
return
|
|
823
|
-
|
|
823
|
+
dataset_id = response.json()["dataset_id"]
|
|
824
824
|
spinner.write(
|
|
825
|
-
to_colored_text(f"✔
|
|
825
|
+
to_colored_text(f"✔ Dataset created with ID: {dataset_id}", state="success")
|
|
826
826
|
)
|
|
827
|
-
return
|
|
827
|
+
return dataset_id
|
|
828
828
|
|
|
829
|
-
def
|
|
829
|
+
def upload_to_dataset(
|
|
830
830
|
self,
|
|
831
|
-
|
|
831
|
+
dataset_id: Union[List[str], str] = None,
|
|
832
832
|
file_paths: Union[List[str], str] = None,
|
|
833
833
|
verify_ssl: bool = True,
|
|
834
834
|
):
|
|
835
835
|
"""
|
|
836
|
-
Upload data to a
|
|
836
|
+
Upload data to a dataset.
|
|
837
837
|
|
|
838
|
-
This method uploads files to a
|
|
838
|
+
This method uploads files to a dataset. Accepts a dataset ID and file paths. If only a single parameter is provided, it will be interpreted as the file paths.
|
|
839
839
|
|
|
840
840
|
Args:
|
|
841
|
-
|
|
841
|
+
dataset_id (str): The ID of the dataset to upload to. If not provided, a new dataset will be created.
|
|
842
842
|
file_paths (Union[List[str], str]): A list of paths to the files to upload, or a single path to a collection of files.
|
|
843
843
|
verify_ssl (bool): Whether to verify SSL certificates. Set to False to bypass SSL verification for troubleshooting.
|
|
844
844
|
|
|
@@ -846,17 +846,17 @@ class Sutro:
|
|
|
846
846
|
dict: The response from the API.
|
|
847
847
|
"""
|
|
848
848
|
# when only a single parameter is provided, it is interpreted as the file paths
|
|
849
|
-
if file_paths is None and
|
|
850
|
-
file_paths =
|
|
851
|
-
|
|
849
|
+
if file_paths is None and dataset_id is not None:
|
|
850
|
+
file_paths = dataset_id
|
|
851
|
+
dataset_id = None
|
|
852
852
|
|
|
853
853
|
if file_paths is None:
|
|
854
854
|
raise ValueError("File paths must be provided")
|
|
855
855
|
|
|
856
|
-
if
|
|
857
|
-
|
|
856
|
+
if dataset_id is None:
|
|
857
|
+
dataset_id = self.create_dataset()
|
|
858
858
|
|
|
859
|
-
endpoint = f"{self.base_url}/upload-to-
|
|
859
|
+
endpoint = f"{self.base_url}/upload-to-dataset"
|
|
860
860
|
|
|
861
861
|
if isinstance(file_paths, str):
|
|
862
862
|
# check if the file path is a directory
|
|
@@ -871,7 +871,7 @@ class Sutro:
|
|
|
871
871
|
|
|
872
872
|
with yaspin(
|
|
873
873
|
SPINNER,
|
|
874
|
-
text=to_colored_text(f"Uploading files to
|
|
874
|
+
text=to_colored_text(f"Uploading files to dataset: {dataset_id}"),
|
|
875
875
|
color=YASPIN_COLOR,
|
|
876
876
|
) as spinner:
|
|
877
877
|
count = 0
|
|
@@ -887,7 +887,7 @@ class Sutro:
|
|
|
887
887
|
}
|
|
888
888
|
|
|
889
889
|
payload = {
|
|
890
|
-
"
|
|
890
|
+
"dataset_id": dataset_id,
|
|
891
891
|
}
|
|
892
892
|
|
|
893
893
|
headers = {
|
|
@@ -896,7 +896,7 @@ class Sutro:
|
|
|
896
896
|
count += 1
|
|
897
897
|
spinner.write(
|
|
898
898
|
to_colored_text(
|
|
899
|
-
f"Uploading file {count}/{len(file_paths)} to
|
|
899
|
+
f"Uploading file {count}/{len(file_paths)} to dataset: {dataset_id}"
|
|
900
900
|
)
|
|
901
901
|
)
|
|
902
902
|
|
|
@@ -923,19 +923,19 @@ class Sutro:
|
|
|
923
923
|
|
|
924
924
|
spinner.write(
|
|
925
925
|
to_colored_text(
|
|
926
|
-
f"✔ {count} files successfully uploaded to
|
|
926
|
+
f"✔ {count} files successfully uploaded to dataset", state="success"
|
|
927
927
|
)
|
|
928
928
|
)
|
|
929
|
-
return
|
|
929
|
+
return dataset_id
|
|
930
930
|
|
|
931
|
-
def
|
|
932
|
-
endpoint = f"{self.base_url}/list-
|
|
931
|
+
def list_datasets(self):
|
|
932
|
+
endpoint = f"{self.base_url}/list-datasets"
|
|
933
933
|
headers = {
|
|
934
934
|
"Authorization": f"Key {self.api_key}",
|
|
935
935
|
"Content-Type": "application/json",
|
|
936
936
|
}
|
|
937
937
|
with yaspin(
|
|
938
|
-
SPINNER, text=to_colored_text("Retrieving
|
|
938
|
+
SPINNER, text=to_colored_text("Retrieving datasets"), color=YASPIN_COLOR
|
|
939
939
|
) as spinner:
|
|
940
940
|
response = requests.post(endpoint, headers=headers)
|
|
941
941
|
if response.status_code != 200:
|
|
@@ -946,21 +946,21 @@ class Sutro:
|
|
|
946
946
|
)
|
|
947
947
|
print(to_colored_text(f"Error: {response.json()}", state="fail"))
|
|
948
948
|
return
|
|
949
|
-
spinner.write(to_colored_text("✔
|
|
950
|
-
return response.json()["
|
|
949
|
+
spinner.write(to_colored_text("✔ Datasets retrieved", state="success"))
|
|
950
|
+
return response.json()["datasets"]
|
|
951
951
|
|
|
952
|
-
def
|
|
953
|
-
endpoint = f"{self.base_url}/list-
|
|
952
|
+
def list_dataset_files(self, dataset_id: str):
|
|
953
|
+
endpoint = f"{self.base_url}/list-dataset-files"
|
|
954
954
|
headers = {
|
|
955
955
|
"Authorization": f"Key {self.api_key}",
|
|
956
956
|
"Content-Type": "application/json",
|
|
957
957
|
}
|
|
958
958
|
payload = {
|
|
959
|
-
"
|
|
959
|
+
"dataset_id": dataset_id,
|
|
960
960
|
}
|
|
961
961
|
with yaspin(
|
|
962
962
|
SPINNER,
|
|
963
|
-
text=to_colored_text(f"Listing files in
|
|
963
|
+
text=to_colored_text(f"Listing files in dataset: {dataset_id}"),
|
|
964
964
|
color=YASPIN_COLOR,
|
|
965
965
|
) as spinner:
|
|
966
966
|
response = requests.post(
|
|
@@ -975,27 +975,27 @@ class Sutro:
|
|
|
975
975
|
print(to_colored_text(f"Error: {response.json()}", state="fail"))
|
|
976
976
|
return
|
|
977
977
|
spinner.write(
|
|
978
|
-
to_colored_text(f"✔ Files listed in
|
|
978
|
+
to_colored_text(f"✔ Files listed in dataset: {dataset_id}", state="success")
|
|
979
979
|
)
|
|
980
980
|
return response.json()["files"]
|
|
981
981
|
|
|
982
|
-
def
|
|
982
|
+
def download_from_dataset(
|
|
983
983
|
self,
|
|
984
|
-
|
|
984
|
+
dataset_id: str,
|
|
985
985
|
files: Union[List[str], str] = None,
|
|
986
986
|
output_path: str = None,
|
|
987
987
|
):
|
|
988
|
-
endpoint = f"{self.base_url}/download-from-
|
|
988
|
+
endpoint = f"{self.base_url}/download-from-dataset"
|
|
989
989
|
|
|
990
990
|
if files is None:
|
|
991
|
-
files = self.
|
|
991
|
+
files = self.list_dataset_files(dataset_id)
|
|
992
992
|
elif isinstance(files, str):
|
|
993
993
|
files = [files]
|
|
994
994
|
|
|
995
995
|
if not files:
|
|
996
996
|
print(
|
|
997
997
|
to_colored_text(
|
|
998
|
-
f"Couldn't find files for
|
|
998
|
+
f"Couldn't find files for dataset ID: {dataset_id}", state="fail"
|
|
999
999
|
)
|
|
1000
1000
|
)
|
|
1001
1001
|
return
|
|
@@ -1006,7 +1006,7 @@ class Sutro:
|
|
|
1006
1006
|
|
|
1007
1007
|
with yaspin(
|
|
1008
1008
|
SPINNER,
|
|
1009
|
-
text=to_colored_text(f"Downloading files from
|
|
1009
|
+
text=to_colored_text(f"Downloading files from dataset: {dataset_id}"),
|
|
1010
1010
|
color=YASPIN_COLOR,
|
|
1011
1011
|
) as spinner:
|
|
1012
1012
|
count = 0
|
|
@@ -1016,11 +1016,11 @@ class Sutro:
|
|
|
1016
1016
|
"Content-Type": "application/json",
|
|
1017
1017
|
}
|
|
1018
1018
|
payload = {
|
|
1019
|
-
"
|
|
1019
|
+
"dataset_id": dataset_id,
|
|
1020
1020
|
"file_name": file,
|
|
1021
1021
|
}
|
|
1022
1022
|
spinner.text = to_colored_text(
|
|
1023
|
-
f"Downloading file {count + 1}/{len(files)} from
|
|
1023
|
+
f"Downloading file {count + 1}/{len(files)} from dataset: {dataset_id}"
|
|
1024
1024
|
)
|
|
1025
1025
|
response = requests.post(
|
|
1026
1026
|
endpoint, headers=headers, data=json.dumps(payload)
|
|
@@ -1039,7 +1039,7 @@ class Sutro:
|
|
|
1039
1039
|
count += 1
|
|
1040
1040
|
spinner.write(
|
|
1041
1041
|
to_colored_text(
|
|
1042
|
-
f"✔ {count} files successfully downloaded from
|
|
1042
|
+
f"✔ {count} files successfully downloaded from dataset: {dataset_id}",
|
|
1043
1043
|
state="success",
|
|
1044
1044
|
)
|
|
1045
1045
|
)
|
sutro-0.1.13/PKG-INFO
DELETED
|
@@ -1,41 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.4
|
|
2
|
-
Name: sutro
|
|
3
|
-
Version: 0.1.13
|
|
4
|
-
Summary: Sutro Python SDK
|
|
5
|
-
Project-URL: Homepage, https://sutro.sh
|
|
6
|
-
Project-URL: Documentation, https://docs.sutro.sh
|
|
7
|
-
License-Expression: Apache-2.0
|
|
8
|
-
License-File: LICENSE
|
|
9
|
-
Requires-Python: >=3.10
|
|
10
|
-
Requires-Dist: click==8.1.7
|
|
11
|
-
Requires-Dist: colorama==0.4.4
|
|
12
|
-
Requires-Dist: numpy==2.1.1
|
|
13
|
-
Requires-Dist: pandas==2.2.3
|
|
14
|
-
Requires-Dist: polars==1.8.2
|
|
15
|
-
Requires-Dist: pydantic==2.11.4
|
|
16
|
-
Requires-Dist: requests==2.32.3
|
|
17
|
-
Requires-Dist: tqdm==4.67.1
|
|
18
|
-
Requires-Dist: yaspin==3.1.0
|
|
19
|
-
Description-Content-Type: text/markdown
|
|
20
|
-
|
|
21
|
-
# sutro-client
|
|
22
|
-
|
|
23
|
-
The official Python client for Sutro. See [docs.sutro.sh](https://docs.sutro.sh/) for more information.
|
|
24
|
-
|
|
25
|
-
## Installing Locally (to test changes during development)
|
|
26
|
-
|
|
27
|
-
Run `make install` from the root directory. This should remove the old builds and reinstall the package in your environment with the latest. You can run `uv pip list` to ensure the package is pointing at the local files instead of the PyPI package.
|
|
28
|
-
|
|
29
|
-
## Creating releases
|
|
30
|
-
|
|
31
|
-
Make sure you increment the version appropriately in `pyproject.toml`. Generally speaking we'll do patch versions for small tweaks, minor versions for large additions or changes to behavior, and probably do major releases once it makes sense. Since we're still in beta and `0.x.x` releases, its probably okay to add backwards-incompatible changes to minor releases, but we want to avoid this if possible.
|
|
32
|
-
|
|
33
|
-
To create a release, run:
|
|
34
|
-
|
|
35
|
-
`make release <version>` with `<version>` formatted like `0.1.1`
|
|
36
|
-
|
|
37
|
-
It'll prompt you for an API key to PyPI, which you must have for it to work.
|
|
38
|
-
|
|
39
|
-
We also have a test PyPI account which you can use to test creating releases before pushing to the actual PyPI hub. I believe you can only create **one** release per version number, so it may be worth testing if you're paranoid about getting it right.
|
|
40
|
-
|
|
41
|
-
Also make sure to update the docs and increment the docs version number to match the new release. Keeping these consistent will provide a better user experience.
|
sutro-0.1.13/README.md
DELETED
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
# sutro-client
|
|
2
|
-
|
|
3
|
-
The official Python client for Sutro. See [docs.sutro.sh](https://docs.sutro.sh/) for more information.
|
|
4
|
-
|
|
5
|
-
## Installing Locally (to test changes during development)
|
|
6
|
-
|
|
7
|
-
Run `make install` from the root directory. This should remove the old builds and reinstall the package in your environment with the latest. You can run `uv pip list` to ensure the package is pointing at the local files instead of the PyPI package.
|
|
8
|
-
|
|
9
|
-
## Creating releases
|
|
10
|
-
|
|
11
|
-
Make sure you increment the version appropriately in `pyproject.toml`. Generally speaking we'll do patch versions for small tweaks, minor versions for large additions or changes to behavior, and probably do major releases once it makes sense. Since we're still in beta and `0.x.x` releases, its probably okay to add backwards-incompatible changes to minor releases, but we want to avoid this if possible.
|
|
12
|
-
|
|
13
|
-
To create a release, run:
|
|
14
|
-
|
|
15
|
-
`make release <version>` with `<version>` formatted like `0.1.1`
|
|
16
|
-
|
|
17
|
-
It'll prompt you for an API key to PyPI, which you must have for it to work.
|
|
18
|
-
|
|
19
|
-
We also have a test PyPI account which you can use to test creating releases before pushing to the actual PyPI hub. I believe you can only create **one** release per version number, so it may be worth testing if you're paranoid about getting it right.
|
|
20
|
-
|
|
21
|
-
Also make sure to update the docs and increment the docs version number to match the new release. Keeping these consistent will provide a better user experience.
|
|
File without changes
|
|
File without changes
|
|
File without changes
|