scale-nucleus 0.4.2__tar.gz → 0.6b3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/LICENSE +0 -0
  2. scale-nucleus-0.6b3/PKG-INFO +213 -0
  3. scale-nucleus-0.6b3/README.md +181 -0
  4. scale-nucleus-0.6b3/cli/client.py +14 -0
  5. scale-nucleus-0.6b3/cli/datasets.py +77 -0
  6. {scale-nucleus-0.4.2/nucleus/data_transfer_object → scale-nucleus-0.6b3/cli/helpers}/__init__.py +0 -0
  7. scale-nucleus-0.6b3/cli/helpers/nucleus_url.py +10 -0
  8. scale-nucleus-0.6b3/cli/helpers/web_helper.py +40 -0
  9. scale-nucleus-0.6b3/cli/install_completion.py +33 -0
  10. scale-nucleus-0.6b3/cli/jobs.py +42 -0
  11. scale-nucleus-0.6b3/cli/models.py +35 -0
  12. scale-nucleus-0.6b3/cli/nu.py +42 -0
  13. scale-nucleus-0.6b3/cli/reference.py +8 -0
  14. scale-nucleus-0.6b3/cli/slices.py +62 -0
  15. scale-nucleus-0.6b3/cli/tests.py +121 -0
  16. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/__init__.py +46 -14
  17. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/annotation.py +51 -1
  18. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/autocurate.py +0 -0
  19. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/connection.py +0 -0
  20. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/constants.py +0 -1
  21. {scale-nucleus-0.4.2/nucleus/modelci/data_transfer_objects → scale-nucleus-0.6b3/nucleus/data_transfer_object}/__init__.py +0 -0
  22. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/data_transfer_object/dataset_details.py +0 -0
  23. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/data_transfer_object/dataset_info.py +0 -0
  24. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/data_transfer_object/dataset_size.py +0 -0
  25. scale-nucleus-0.6b3/nucleus/data_transfer_object/scenes_list.py +18 -0
  26. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/dataset.py +97 -7
  27. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/dataset_item.py +0 -0
  28. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/dataset_item_uploader.py +0 -0
  29. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/deprecation_warning.py +0 -0
  30. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/errors.py +9 -0
  31. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/job.py +0 -0
  32. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/logger.py +0 -0
  33. scale-nucleus-0.6b3/nucleus/metadata_manager.py +45 -0
  34. scale-nucleus-0.6b3/nucleus/metrics/__init__.py +10 -0
  35. scale-nucleus-0.6b3/nucleus/metrics/base.py +117 -0
  36. scale-nucleus-0.6b3/nucleus/metrics/categorization_metrics.py +199 -0
  37. scale-nucleus-0.6b3/nucleus/metrics/errors.py +7 -0
  38. scale-nucleus-0.6b3/nucleus/metrics/filters.py +40 -0
  39. scale-nucleus-0.6b3/nucleus/metrics/geometry.py +198 -0
  40. scale-nucleus-0.6b3/nucleus/metrics/metric_utils.py +28 -0
  41. scale-nucleus-0.6b3/nucleus/metrics/polygon_metrics.py +480 -0
  42. scale-nucleus-0.6b3/nucleus/metrics/polygon_utils.py +299 -0
  43. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/model.py +7 -7
  44. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/model_run.py +0 -0
  45. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/payload_constructor.py +2 -1
  46. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/prediction.py +51 -1
  47. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/pydantic_base.py +10 -0
  48. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/retry_strategy.py +1 -1
  49. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/scene.py +0 -0
  50. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/slice.py +23 -0
  51. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/upload_response.py +0 -0
  52. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/url_utils.py +0 -0
  53. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/nucleus/utils.py +4 -0
  54. scale-nucleus-0.6b3/nucleus/validate/__init__.py +24 -0
  55. scale-nucleus-0.6b3/nucleus/validate/client.py +168 -0
  56. {scale-nucleus-0.4.2/nucleus/modelci → scale-nucleus-0.6b3/nucleus/validate}/constants.py +2 -2
  57. {scale-nucleus-0.4.2/nucleus/modelci/eval_functions → scale-nucleus-0.6b3/nucleus/validate/data_transfer_objects}/__init__.py +0 -0
  58. scale-nucleus-0.6b3/nucleus/validate/data_transfer_objects/eval_function.py +81 -0
  59. scale-nucleus-0.4.2/nucleus/modelci/data_transfer_objects/unit_test.py → scale-nucleus-0.6b3/nucleus/validate/data_transfer_objects/scenario_test.py +1 -1
  60. scale-nucleus-0.4.2/nucleus/modelci/data_transfer_objects/unit_test_evaluations.py → scale-nucleus-0.6b3/nucleus/validate/data_transfer_objects/scenario_test_evaluations.py +0 -0
  61. scale-nucleus-0.4.2/nucleus/modelci/data_transfer_objects/unit_test_metric.py → scale-nucleus-0.6b3/nucleus/validate/data_transfer_objects/scenario_test_metric.py +3 -3
  62. {scale-nucleus-0.4.2/nucleus/modelci → scale-nucleus-0.6b3/nucleus/validate}/errors.py +1 -1
  63. scale-nucleus-0.6b3/nucleus/validate/eval_functions/__init__.py +0 -0
  64. {scale-nucleus-0.4.2/nucleus/modelci → scale-nucleus-0.6b3/nucleus/validate}/eval_functions/available_eval_functions.py +23 -2
  65. {scale-nucleus-0.4.2/nucleus/modelci → scale-nucleus-0.6b3/nucleus/validate}/eval_functions/base_eval_function.py +0 -0
  66. scale-nucleus-0.4.2/nucleus/modelci/unit_test.py → scale-nucleus-0.6b3/nucleus/validate/scenario_test.py +45 -44
  67. scale-nucleus-0.4.2/nucleus/modelci/unit_test_evaluation.py → scale-nucleus-0.6b3/nucleus/validate/scenario_test_evaluation.py +29 -29
  68. scale-nucleus-0.6b3/nucleus/validate/scenario_test_metric.py +14 -0
  69. {scale-nucleus-0.4.2/nucleus/modelci → scale-nucleus-0.6b3/nucleus/validate}/utils.py +0 -0
  70. {scale-nucleus-0.4.2 → scale-nucleus-0.6b3}/pyproject.toml +12 -4
  71. scale-nucleus-0.6b3/setup.py +55 -0
  72. scale-nucleus-0.4.2/PKG-INFO +0 -101
  73. scale-nucleus-0.4.2/README.md +0 -74
  74. scale-nucleus-0.4.2/nucleus/modelci/__init__.py +0 -20
  75. scale-nucleus-0.4.2/nucleus/modelci/client.py +0 -162
  76. scale-nucleus-0.4.2/nucleus/modelci/data_transfer_objects/eval_function.py +0 -46
  77. scale-nucleus-0.4.2/nucleus/modelci/unit_test_metric.py +0 -14
  78. scale-nucleus-0.4.2/setup.py +0 -44
File without changes
@@ -0,0 +1,213 @@
1
+ Metadata-Version: 2.1
2
+ Name: scale-nucleus
3
+ Version: 0.6b3
4
+ Summary: The official Python client library for Nucleus, the Data Platform for AI
5
+ Home-page: https://scale.com/nucleus
6
+ License: MIT
7
+ Author: Scale AI Nucleus Team
8
+ Author-email: nucleusapi@scaleapi.com
9
+ Requires-Python: >=3.6.2,<4.0.0
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Programming Language :: Python :: 3
12
+ Classifier: Programming Language :: Python :: 3.10
13
+ Classifier: Programming Language :: Python :: 3.7
14
+ Classifier: Programming Language :: Python :: 3.8
15
+ Classifier: Programming Language :: Python :: 3.9
16
+ Requires-Dist: aiohttp (>=3.7.4,<4.0.0)
17
+ Requires-Dist: click (>=7.1.2,<9.0)
18
+ Requires-Dist: dataclasses (>=0.7,<0.8); python_full_version >= "3.6.1" and python_version < "3.7"
19
+ Requires-Dist: nest-asyncio (>=1.5.1,<2.0.0)
20
+ Requires-Dist: numpy (>=1.19.5,<2.0.0)
21
+ Requires-Dist: pydantic (>=1.8.2,<2.0.0)
22
+ Requires-Dist: requests (>=2.23.0,<3.0.0)
23
+ Requires-Dist: rich (>=10.15.2,<11.0.0)
24
+ Requires-Dist: scikit-learn (>=0.24.0)
25
+ Requires-Dist: scipy (>=1.4.1)
26
+ Requires-Dist: shellingham (>=1.4.0,<2.0.0)
27
+ Requires-Dist: tqdm (>=4.41.0,<5.0.0)
28
+ Project-URL: Documentation, https://dashboard.scale.com/nucleus/docs/api
29
+ Project-URL: Repository, https://github.com/scaleapi/nucleus-python-client
30
+ Description-Content-Type: text/markdown
31
+
32
+ # Nucleus
33
+
34
+ https://dashboard.scale.com/nucleus
35
+
36
+ Aggregate metrics in ML are not good enough. To improve production ML, you need to understand their qualitative failure modes, fix them by gathering more data, and curate diverse scenarios.
37
+
38
+ Scale Nucleus helps you:
39
+
40
+ - Visualize your data
41
+ - Curate interesting slices within your dataset
42
+ - Review and manage annotations
43
+ - Measure and debug your model performance
44
+
45
+ Nucleus is a new way—the right way—to develop ML models, helping us move away from the concept of one dataset and towards a paradigm of collections of scenarios.
46
+
47
+ ## Installation
48
+
49
+ `$ pip install scale-nucleus`
50
+
51
+
52
+ ## CLI installation
53
+ We recommend installing the CLI via `pipx` (https://pypa.github.io/pipx/installation/). This makes sure that
54
+ the CLI does not interfere with you system packages and is accessible from your favorite terminal.
55
+
56
+ For MacOS:
57
+ ```bash
58
+ brew install pipx
59
+ pipx ensurepath
60
+ pipx install scale-nucleus
61
+ # Optional installation of shell completion (for bash, zsh or fish)
62
+ nu install-completions
63
+ ```
64
+
65
+ Otherwise, install via pip (requires pip 19.0 or later):
66
+ ```bash
67
+ python3 -m pip install --user pipx
68
+ python3 -m pipx ensurepath
69
+ python3 -m pipx install scale-nucleus
70
+ # Optional installation of shell completion (for bash, zsh or fish)
71
+ nu install-completions
72
+ ```
73
+
74
+ ## Common issues/FAQ
75
+
76
+ ### Outdated Client
77
+
78
+ Nucleus is iterating rapidly and as a result we do not always perfectly preserve backwards compatibility with older versions of the client. If you run into any unexpected error, it's a good idea to upgrade your version of the client by running
79
+ ```
80
+ pip install --upgrade scale-nucleus
81
+ ```
82
+
83
+ ## Usage
84
+
85
+ For the most up to date documentation, reference: https://dashboard.scale.com/nucleus/docs/api?language=python.
86
+
87
+ ## For Developers
88
+
89
+ Clone from github and install as editable
90
+
91
+ ```
92
+ git clone git@github.com:scaleapi/nucleus-python-client.git
93
+ cd nucleus-python-client
94
+ pip3 install poetry
95
+ poetry install
96
+ ```
97
+
98
+ Please install the pre-commit hooks by running the following command:
99
+
100
+ ```python
101
+ poetry run pre-commit install
102
+ ```
103
+
104
+ When releasing a new version please add release notes to the changelog in `CHANGELOG.md`.
105
+
106
+ **Best practices for testing:**
107
+ (1). Please run pytest from the root directory of the repo, i.e.
108
+
109
+ ```
110
+ poetry run pytest tests/test_dataset.py
111
+ ```
112
+
113
+ (2) To skip slow integration tests that have to wait for an async job to start.
114
+
115
+ ```
116
+ poetry run pytest -m "not integration"
117
+ ```
118
+
119
+ ## Pydantic Models
120
+
121
+ Prefer using [Pydantic](https://pydantic-docs.helpmanual.io/usage/models/) models rather than creating raw dictionaries
122
+ or dataclasses to send or receive over the wire as JSONs. Pydantic is created with data validation in mind and provides very clear error
123
+ messages when it encounters a problem with the payload.
124
+
125
+ The Pydantic model(s) should mirror the payload to send. To represent a JSON payload that looks like this:
126
+ ```json
127
+ {
128
+ "example_json_with_info": {
129
+ "metadata": {
130
+ "frame": 0
131
+ },
132
+ "reference_id": "frame0",
133
+ "url": "s3://example/scale_nucleus/2021/lidar/0038711321865000.json",
134
+ "type": "pointcloud"
135
+ },
136
+ "example_image_with_info": {
137
+ "metadata": {
138
+ "author": "Picasso"
139
+ },
140
+ "reference_id": "frame0",
141
+ "url": "s3://bucket/0038711321865000.jpg",
142
+ "type": "image"
143
+ },
144
+ }
145
+ ```
146
+
147
+ Could be represented as the following structure. Note that the field names map to the JSON keys and the usage of field
148
+ validators (`@validator`).
149
+
150
+ ```python
151
+ import os.path
152
+ from pydantic import BaseModel, validator
153
+ from typing import Literal
154
+
155
+
156
+ class JsonWithInfo(BaseModel):
157
+ metadata: dict # any dict is valid
158
+ reference_id: str
159
+ url: str
160
+ type: Literal["pointcloud", "recipe"]
161
+
162
+ @validator("url")
163
+ def has_json_extension(cls, v):
164
+ if not v.endswith(".json"):
165
+ raise ValueError(f"Expected '.json' extension got {v}")
166
+ return v
167
+
168
+
169
+ class ImageWithInfo(BaseModel):
170
+ metadata: dict # any dict is valid
171
+ reference_id: str
172
+ url: str
173
+ type: Literal["image", "mask"]
174
+
175
+ @validator("url")
176
+ def has_valid_extension(cls, v):
177
+ valid_extensions = {".jpg", ".jpeg", ".png", ".tiff"}
178
+ _, extension = os.path.splitext(v)
179
+ if extension not in valid_extensions:
180
+ raise ValueError(f"Expected extension in {valid_extensions} got {v}")
181
+ return v
182
+
183
+
184
+ class ExampleNestedModel(BaseModel):
185
+ example_json_with_info: JsonWithInfo
186
+ example_image_with_info: ImageWithInfo
187
+
188
+ # Usage:
189
+ import requests
190
+ payload = requests.get("/example")
191
+ parsed_model = ExampleNestedModel.parse_obj(payload.json())
192
+ requests.post("example/post_to", json=parsed_model.dict())
193
+ ```
194
+
195
+
196
+ ### Migrating to Pydantic
197
+ - When migrating an interface from a dictionary use `nucleus.pydantic_base.DictCompatibleModel`. That allows you to get
198
+ the benefits of Pydantic but maintaints backwards compatibility with a Python dictionary by delegating `__getitem__` to
199
+ fields.
200
+ - When migrating a frozen dataclass use `nucleus.pydantic_base.ImmutableModel`. That is a base class set up to be
201
+ immutable after initialization.
202
+
203
+ **Updating documentation:**
204
+ We use [Sphinx](https://www.sphinx-doc.org/en/master/) to autogenerate our API Reference from docstrings.
205
+
206
+ To test your local docstring changes, run the following commands from the repository's root directory:
207
+ ```
208
+ poetry shell
209
+ cd docs
210
+ sphinx-autobuild . ./_build/html --watch ../nucleus
211
+ ```
212
+ `sphinx-autobuild` will spin up a server on localhost (port 8000 by default) that will watch for and automatically rebuild a version of the API reference based on your local docstring changes.
213
+
@@ -0,0 +1,181 @@
1
+ # Nucleus
2
+
3
+ https://dashboard.scale.com/nucleus
4
+
5
+ Aggregate metrics in ML are not good enough. To improve production ML, you need to understand their qualitative failure modes, fix them by gathering more data, and curate diverse scenarios.
6
+
7
+ Scale Nucleus helps you:
8
+
9
+ - Visualize your data
10
+ - Curate interesting slices within your dataset
11
+ - Review and manage annotations
12
+ - Measure and debug your model performance
13
+
14
+ Nucleus is a new way—the right way—to develop ML models, helping us move away from the concept of one dataset and towards a paradigm of collections of scenarios.
15
+
16
+ ## Installation
17
+
18
+ `$ pip install scale-nucleus`
19
+
20
+
21
+ ## CLI installation
22
+ We recommend installing the CLI via `pipx` (https://pypa.github.io/pipx/installation/). This makes sure that
23
+ the CLI does not interfere with you system packages and is accessible from your favorite terminal.
24
+
25
+ For MacOS:
26
+ ```bash
27
+ brew install pipx
28
+ pipx ensurepath
29
+ pipx install scale-nucleus
30
+ # Optional installation of shell completion (for bash, zsh or fish)
31
+ nu install-completions
32
+ ```
33
+
34
+ Otherwise, install via pip (requires pip 19.0 or later):
35
+ ```bash
36
+ python3 -m pip install --user pipx
37
+ python3 -m pipx ensurepath
38
+ python3 -m pipx install scale-nucleus
39
+ # Optional installation of shell completion (for bash, zsh or fish)
40
+ nu install-completions
41
+ ```
42
+
43
+ ## Common issues/FAQ
44
+
45
+ ### Outdated Client
46
+
47
+ Nucleus is iterating rapidly and as a result we do not always perfectly preserve backwards compatibility with older versions of the client. If you run into any unexpected error, it's a good idea to upgrade your version of the client by running
48
+ ```
49
+ pip install --upgrade scale-nucleus
50
+ ```
51
+
52
+ ## Usage
53
+
54
+ For the most up to date documentation, reference: https://dashboard.scale.com/nucleus/docs/api?language=python.
55
+
56
+ ## For Developers
57
+
58
+ Clone from github and install as editable
59
+
60
+ ```
61
+ git clone git@github.com:scaleapi/nucleus-python-client.git
62
+ cd nucleus-python-client
63
+ pip3 install poetry
64
+ poetry install
65
+ ```
66
+
67
+ Please install the pre-commit hooks by running the following command:
68
+
69
+ ```python
70
+ poetry run pre-commit install
71
+ ```
72
+
73
+ When releasing a new version please add release notes to the changelog in `CHANGELOG.md`.
74
+
75
+ **Best practices for testing:**
76
+ (1). Please run pytest from the root directory of the repo, i.e.
77
+
78
+ ```
79
+ poetry run pytest tests/test_dataset.py
80
+ ```
81
+
82
+ (2) To skip slow integration tests that have to wait for an async job to start.
83
+
84
+ ```
85
+ poetry run pytest -m "not integration"
86
+ ```
87
+
88
+ ## Pydantic Models
89
+
90
+ Prefer using [Pydantic](https://pydantic-docs.helpmanual.io/usage/models/) models rather than creating raw dictionaries
91
+ or dataclasses to send or receive over the wire as JSONs. Pydantic is created with data validation in mind and provides very clear error
92
+ messages when it encounters a problem with the payload.
93
+
94
+ The Pydantic model(s) should mirror the payload to send. To represent a JSON payload that looks like this:
95
+ ```json
96
+ {
97
+ "example_json_with_info": {
98
+ "metadata": {
99
+ "frame": 0
100
+ },
101
+ "reference_id": "frame0",
102
+ "url": "s3://example/scale_nucleus/2021/lidar/0038711321865000.json",
103
+ "type": "pointcloud"
104
+ },
105
+ "example_image_with_info": {
106
+ "metadata": {
107
+ "author": "Picasso"
108
+ },
109
+ "reference_id": "frame0",
110
+ "url": "s3://bucket/0038711321865000.jpg",
111
+ "type": "image"
112
+ },
113
+ }
114
+ ```
115
+
116
+ Could be represented as the following structure. Note that the field names map to the JSON keys and the usage of field
117
+ validators (`@validator`).
118
+
119
+ ```python
120
+ import os.path
121
+ from pydantic import BaseModel, validator
122
+ from typing import Literal
123
+
124
+
125
+ class JsonWithInfo(BaseModel):
126
+ metadata: dict # any dict is valid
127
+ reference_id: str
128
+ url: str
129
+ type: Literal["pointcloud", "recipe"]
130
+
131
+ @validator("url")
132
+ def has_json_extension(cls, v):
133
+ if not v.endswith(".json"):
134
+ raise ValueError(f"Expected '.json' extension got {v}")
135
+ return v
136
+
137
+
138
+ class ImageWithInfo(BaseModel):
139
+ metadata: dict # any dict is valid
140
+ reference_id: str
141
+ url: str
142
+ type: Literal["image", "mask"]
143
+
144
+ @validator("url")
145
+ def has_valid_extension(cls, v):
146
+ valid_extensions = {".jpg", ".jpeg", ".png", ".tiff"}
147
+ _, extension = os.path.splitext(v)
148
+ if extension not in valid_extensions:
149
+ raise ValueError(f"Expected extension in {valid_extensions} got {v}")
150
+ return v
151
+
152
+
153
+ class ExampleNestedModel(BaseModel):
154
+ example_json_with_info: JsonWithInfo
155
+ example_image_with_info: ImageWithInfo
156
+
157
+ # Usage:
158
+ import requests
159
+ payload = requests.get("/example")
160
+ parsed_model = ExampleNestedModel.parse_obj(payload.json())
161
+ requests.post("example/post_to", json=parsed_model.dict())
162
+ ```
163
+
164
+
165
+ ### Migrating to Pydantic
166
+ - When migrating an interface from a dictionary use `nucleus.pydantic_base.DictCompatibleModel`. That allows you to get
167
+ the benefits of Pydantic but maintaints backwards compatibility with a Python dictionary by delegating `__getitem__` to
168
+ fields.
169
+ - When migrating a frozen dataclass use `nucleus.pydantic_base.ImmutableModel`. That is a base class set up to be
170
+ immutable after initialization.
171
+
172
+ **Updating documentation:**
173
+ We use [Sphinx](https://www.sphinx-doc.org/en/master/) to autogenerate our API Reference from docstrings.
174
+
175
+ To test your local docstring changes, run the following commands from the repository's root directory:
176
+ ```
177
+ poetry shell
178
+ cd docs
179
+ sphinx-autobuild . ./_build/html --watch ../nucleus
180
+ ```
181
+ `sphinx-autobuild` will spin up a server on localhost (port 8000 by default) that will watch for and automatically rebuild a version of the API reference based on your local docstring changes.
@@ -0,0 +1,14 @@
1
+ import functools
2
+ import os
3
+
4
+ import nucleus
5
+
6
+
7
+ @functools.lru_cache()
8
+ def init_client():
9
+ api_key = os.environ.get("NUCLEUS_API_KEY", None)
10
+ if api_key:
11
+ client = nucleus.NucleusClient(api_key)
12
+ else:
13
+ raise RuntimeError("No NUCLEUS_API_KEY set")
14
+ return client
@@ -0,0 +1,77 @@
1
+ import click
2
+ from rich.console import Console
3
+ from rich.table import Column, Table
4
+
5
+ from cli.client import init_client
6
+ from cli.helpers.nucleus_url import nucleus_url
7
+ from cli.helpers.web_helper import launch_web_or_invoke
8
+
9
+
10
+ @click.group("datasets", invoke_without_command=True)
11
+ @click.option("--web", is_flag=True, help="Launch browser")
12
+ @click.pass_context
13
+ def datasets(ctx, web):
14
+ """Datasets are the base collections of items in Nucleus
15
+
16
+ https://dashboard.scale.com/nucleus/datasets
17
+ """
18
+ launch_web_or_invoke(
19
+ sub_url="datasets", ctx=ctx, launch_browser=web, command=list_datasets
20
+ )
21
+
22
+
23
+ @datasets.command("list")
24
+ @click.option(
25
+ "-m", "--machine-readable", is_flag=True, help="Removes pretty printing"
26
+ )
27
+ def list_datasets(machine_readable):
28
+ """List all available Datasets"""
29
+ console = Console()
30
+ with console.status("Finding your Datasets!", spinner="dots4"):
31
+ client = init_client()
32
+ all_datasets = client.datasets
33
+ if machine_readable:
34
+ table_params = {"box": None, "pad_edge": False}
35
+ else:
36
+ table_params = {
37
+ "title": ":fire: Datasets",
38
+ "title_justify": "left",
39
+ }
40
+
41
+ table = Table(
42
+ "id", "Name", Column("url", overflow="fold"), **table_params
43
+ )
44
+ for ds in all_datasets:
45
+ table.add_row(ds.id, ds.name, nucleus_url(ds.id))
46
+ console.print(table)
47
+
48
+
49
+ @datasets.command("delete")
50
+ @click.option("--id", prompt=True)
51
+ @click.option(
52
+ "--no-confirm-deletion",
53
+ is_flag=True,
54
+ help="WARNING: No confirmation for deletion",
55
+ )
56
+ @click.pass_context
57
+ def delete_dataset(ctx, id, no_confirm_deletion):
58
+ """Delete a Dataset"""
59
+ console = Console()
60
+ client = init_client()
61
+ id = id.strip()
62
+ dataset = client.get_dataset(id)
63
+ delete_string = ""
64
+ if not no_confirm_deletion:
65
+ delete_string = click.prompt(
66
+ click.style(
67
+ f"Type 'DELETE' to delete dataset: {dataset}", fg="red"
68
+ )
69
+ )
70
+ if no_confirm_deletion or delete_string == "DELETE":
71
+ client.delete_dataset(dataset.id)
72
+ console.print(f":fire: :anguished: Deleted {id}")
73
+ else:
74
+ console.print(
75
+ f":rotating_light: Refusing to delete {id}. Received '{delete_string}' instead of 'DELETE'"
76
+ )
77
+ ctx.abort()
@@ -0,0 +1,10 @@
1
+ import os
2
+ from urllib.parse import urljoin
3
+
4
+
5
+ def nucleus_url(sub_path: str):
6
+ nucleus_base = os.environ.get(
7
+ "NUCLEUS_DASHBOARD", "https://dashboard.scale.com/nucleus/"
8
+ )
9
+ extra_params = os.environ.get("NUCLEUS_DASH_PARAMS", "")
10
+ return urljoin(nucleus_base, sub_path.lstrip("/") + extra_params)
@@ -0,0 +1,40 @@
1
+ import click
2
+
3
+ from cli.helpers.nucleus_url import nucleus_url
4
+
5
+
6
+ def launch_web_or_show_help(
7
+ sub_url: str, ctx: click.Context, launch_browser: bool
8
+ ):
9
+ """ Launches the sub_url (composed with nuclues_url(sub_url)) in the browser if requested"""
10
+ if not ctx.invoked_subcommand:
11
+ if launch_browser:
12
+ url = nucleus_url(sub_url)
13
+ click.launch(url)
14
+ else:
15
+ click.echo(ctx.get_help())
16
+ else:
17
+ if launch_browser:
18
+ click.echo(click.style("--web does not work with sub-commands"))
19
+ ctx.abort()
20
+
21
+
22
+ def launch_web_or_invoke(
23
+ sub_url: str,
24
+ ctx: click.Context,
25
+ launch_browser: bool,
26
+ command: click.BaseCommand,
27
+ ):
28
+ """Launches the sub_url (composed with nuclues_url(sub_url)) in the browser if requested, otherwise invokes
29
+ the passed command
30
+ """
31
+ if not ctx.invoked_subcommand:
32
+ if launch_browser:
33
+ url = nucleus_url(sub_url)
34
+ click.launch(url)
35
+ else:
36
+ ctx.invoke(command)
37
+ else:
38
+ if launch_browser:
39
+ click.echo(click.style("--web does not work with sub-commands"))
40
+ ctx.abort()
@@ -0,0 +1,33 @@
1
+ import os
2
+ import shutil
3
+
4
+ import click
5
+ from shellingham import detect_shell
6
+
7
+
8
+ @click.command("install-completion")
9
+ def install_completion():
10
+ """Install shell completion script to your rc file"""
11
+ shell, _ = detect_shell()
12
+ if shell == "zsh":
13
+ rc_path = "~/.zshrc"
14
+ append_to_file = 'eval "$(_NU_COMPLETE=zsh_source nu)"'
15
+ elif shell == "bash":
16
+ rc_path = "~/.bashrc"
17
+ append_to_file = 'eval "$(_NU_COMPLETE=bash_source nu)"'
18
+ elif shell == "fish":
19
+ rc_path = "~/.config/fish/completions/foo-bar.fish"
20
+ append_to_file = "eval (env _NU_COMPLETE=fish_source nu)"
21
+ else:
22
+ raise RuntimeError(f"Unsupported shell {shell} for completions")
23
+
24
+ rc_path_expanded = os.path.expanduser(rc_path)
25
+ rc_bak = f"{rc_path_expanded}.bak"
26
+ shutil.copy(rc_path_expanded, rc_bak)
27
+ click.echo(f"Backed up {rc_path} to {rc_bak}")
28
+ with open(rc_path_expanded, mode="a") as rc_file:
29
+ rc_file.write("\n")
30
+ rc_file.write("# Shell completion for nu\n")
31
+ rc_file.write(append_to_file)
32
+ click.echo(f"Completion script added to {rc_path}")
33
+ click.echo(f"Don't forget to `source {rc_path}")
@@ -0,0 +1,42 @@
1
+ import click
2
+ from rich.live import Live
3
+ from rich.spinner import Spinner
4
+ from rich.table import Column, Table
5
+
6
+ from cli.client import init_client
7
+ from cli.helpers.web_helper import launch_web_or_invoke
8
+
9
+
10
+ @click.group("jobs", invoke_without_command=True)
11
+ @click.option("--web", is_flag=True, help="Launch browser")
12
+ @click.pass_context
13
+ def jobs(ctx, web):
14
+ """Jobs are a wrapper around various long-running tasks withing Nucleus
15
+
16
+ https://dashboard.scale.com/nucleus/jobs
17
+ """
18
+ launch_web_or_invoke("jobs", ctx, web, list_jobs)
19
+
20
+
21
+ @jobs.command("list")
22
+ def list_jobs():
23
+ """List all of your Jobs"""
24
+ client = init_client()
25
+ table = Table(
26
+ Column("id", overflow="fold", min_width=24),
27
+ "status",
28
+ "type",
29
+ "created at",
30
+ title=":satellite: Jobs",
31
+ title_justify="left",
32
+ )
33
+ with Live(Spinner("dots4", text="Finding your Jobs!")) as live:
34
+ all_jobs = client.jobs
35
+ for job in all_jobs:
36
+ table.add_row(
37
+ job.job_id,
38
+ job.job_last_known_status,
39
+ job.job_type,
40
+ job.job_creation_time,
41
+ )
42
+ live.update(table)
@@ -0,0 +1,35 @@
1
+ import click
2
+ from rich.console import Console
3
+ from rich.table import Column, Table
4
+
5
+ from cli.client import init_client
6
+ from cli.helpers.nucleus_url import nucleus_url
7
+ from cli.helpers.web_helper import launch_web_or_invoke
8
+
9
+
10
+ @click.group("models", invoke_without_command=True)
11
+ @click.option("--web", is_flag=True, help="Launch browser")
12
+ @click.pass_context
13
+ def models(ctx, web):
14
+ """Models help you store and access your ML model data
15
+
16
+ https://dashboard.scale.com/nucleus/models
17
+ """
18
+ launch_web_or_invoke("models", ctx, web, list_models)
19
+
20
+
21
+ @models.command("list")
22
+ def list_models():
23
+ """List your Models"""
24
+ console = Console()
25
+ with console.status("Finding your Models!", spinner="dots4"):
26
+ client = init_client()
27
+ table = Table(
28
+ Column("id", overflow="fold", min_width=24),
29
+ "name",
30
+ Column("url", overflow="fold"),
31
+ )
32
+ models = client.models
33
+ for m in models:
34
+ table.add_row(m.id, m.name, nucleus_url(m.id))
35
+ console.print(table)