craft-ai-sdk 0.65.3__py3-none-any.whl → 0.66.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of craft-ai-sdk might be problematic. Click here for more details.

craft_ai_sdk/__init__.py CHANGED
@@ -13,4 +13,4 @@ from .io import ( # noqa: F401
13
13
  )
14
14
  from .sdk import CraftAiSdk # noqa: F401
15
15
 
16
- __version__ = "0.65.3"
16
+ __version__ = "0.66.0"
@@ -115,6 +115,7 @@ def create_deployment(
115
115
  wait_for_completion=True,
116
116
  timeout_s: Union[int, None] = None,
117
117
  execution_timeout_s: Union[int, None] = None,
118
+ endpoint_token: Union[str, None] = None,
118
119
  ):
119
120
  """Create a deployment associated with a given pipeline.
120
121
 
@@ -241,6 +242,11 @@ def create_deployment(
241
242
  timeout_s (:obj:`int`): Maximum time (in seconds) to wait for the deployment to
242
243
  be ready. Set to None to wait indefinitely. Defaults to None.
243
244
  Only applicable if ``wait_for_completion`` is ``True``.
245
+ endpoint_token (:obj:`str`, optional): Applicable only if ``execution_rule``
246
+ is ``"endpoint"``. The token to access the endpoint. If not set, a token
247
+ will be generated. This token is different from the SDK token, you can find it
248
+ in the result of this function as "endpoint_token". It is used as a secret
249
+ to trigger the endpoint.
244
250
 
245
251
  Returns:
246
252
  :obj:`dict[str, str]`: Created deployment represented as a dict with the
@@ -449,6 +455,14 @@ def create_deployment(
449
455
  )
450
456
  else:
451
457
  data["endpoint_url_path"] = endpoint_url_path
458
+ if endpoint_token is not None:
459
+ if execution_rule != "endpoint":
460
+ raise ValueError(
461
+ "'endpoint_token' can only be specified if 'execution_rule' is \
462
+ 'endpoint'."
463
+ )
464
+ else:
465
+ data["endpoint_token"] = endpoint_token
452
466
 
453
467
  data["inputs_mapping"] = _validate_inputs_mapping(inputs_mapping)
454
468
  data["outputs_mapping"] = _validate_outputs_mapping(outputs_mapping)
@@ -4,6 +4,8 @@ from urllib.parse import urlencode
4
4
 
5
5
  import requests
6
6
 
7
+ from craft_ai_sdk.utils.dict_utils import remove_none_values
8
+
7
9
  from ..sdk import BaseCraftAiSdk
8
10
  from ..shared.authentication import use_authentication
9
11
  from ..shared.logger import log_func_result
@@ -204,12 +206,14 @@ def retrieve_endpoint_results(
204
206
 
205
207
 
206
208
  def generate_new_endpoint_token(
207
- sdk: BaseCraftAiSdk, endpoint_name: str
209
+ sdk: BaseCraftAiSdk, endpoint_name: str, endpoint_token: Union[str, None] = None
208
210
  ) -> EndpointNewToken:
209
211
  """Generate a new endpoint token for an endpoint.
210
212
 
211
213
  Args:
212
214
  endpoint_name (:obj:`str`): Name of the endpoint.
215
+ endpoint_token (:obj:`str`, optional): New endpoint token to set. If not set,
216
+ a new token will be generated.
213
217
 
214
218
  Returns:
215
219
  :obj:`dict[str, str]`: New endpoint token represented as :obj:`dict` with
@@ -218,4 +222,4 @@ def generate_new_endpoint_token(
218
222
  * ``"endpoint_token"`` (:obj:`str`): New endpoint token.
219
223
  """
220
224
  url = f"{sdk.base_environment_api_url}/endpoints/{endpoint_name}/generate-new-token"
221
- return sdk._post(url)
225
+ return sdk._post(url, data=remove_none_values({"endpoint_token": endpoint_token}))
craft_ai_sdk/io.py CHANGED
@@ -1,4 +1,3 @@
1
- import warnings
2
1
  from typing import Any, TypedDict, cast
3
2
 
4
3
  from strenum import LowercaseStrEnum
@@ -19,19 +18,19 @@ class INPUT_OUTPUT_TYPES(LowercaseStrEnum):
19
18
 
20
19
 
21
20
  class Input:
22
- """Class to specify a step input when creating a step
23
- (cf. :meth:`.CraftAiSdk.create_step`).
21
+ """Class to specify a pipeline input when creating a pipeline
22
+ (cf. :meth:`.CraftAiSdk.create_pipeline`).
24
23
 
25
24
  Args:
26
25
  name (:obj:`str`): Name of the input. This corresponds to the name of a
27
- parameter of a step function.
26
+ parameter of a pipeline function.
28
27
  data_type (:obj:`str`): Type of the input: It could be one of "string",
29
28
  "number","boolean", "json", "array" or "file". For convenience, members of
30
29
  the enumeration :class:`INPUT_OUTPUT_TYPES` could be used too.
31
30
  description (:obj:`str`, optional): Description. Defaults to None.
32
31
  is_required (:obj:`bool`, optional): Specify if an value should be provided at
33
32
  execution time. Defaults to None.
34
- default_value (:obj:`Any`, optional): A default value for the step input at
33
+ default_value (:obj:`Any`, optional): A default value for the pipeline input at
35
34
  execution time. The type for `default_value` should match the type specified
36
35
  by `data_type`. Defaults to None.
37
36
  """
@@ -57,12 +56,12 @@ class Input:
57
56
 
58
57
 
59
58
  class Output:
60
- """Class to specify a step output when creating a step
61
- (cf. :meth:`.CraftAiSdk.create_step`).
59
+ """Class to specify a pipeline output when creating a pipeline
60
+ (cf. :meth:`.CraftAiSdk.create_pipeline`).
62
61
 
63
62
  Args:
64
63
  name (:obj:`str`): Name of the output. This corresponds to the key of the `dict`
65
- returned by the step function.
64
+ returned by the pipeline function.
66
65
  data_type (:obj:`str`): Type of the output. It could be one of "string",
67
66
  "number", "boolean", "json", "array" or "file". For convenience, members of
68
67
  the enumeration :class:`INPUT_OUTPUT_TYPES` could be used too.
@@ -97,7 +96,7 @@ class InputSourceDict(TypedDict):
97
96
 
98
97
 
99
98
  class InputSource:
100
- """Class to specify to which source a step input should be mapped when creating
99
+ """Class to specify to which source a pipeline input should be mapped when creating
101
100
  a deployment (cf. :meth:`.CraftAiSdk.create_deployment`). The different sources can
102
101
  be one of:
103
102
 
@@ -125,16 +124,16 @@ class InputSource:
125
124
  value at execution time.
126
125
  datastore_path (:obj:`str`, optional): Path of the input file in the datastore.
127
126
  If you want to use a file from the datastore as input, this file will then
128
- be accessible as if you passed the file path as an argument to the step.
127
+ be accessible as if you passed the file path as an argument to the pipeline.
129
128
  The resulting input will be a :obj:`dict` with `"path"` as key and the
130
129
  file path as value. The file will be downloaded in the execution environment
131
- before the step is executed. You can then use the file as you would use any
132
- other file in the execution environment. Here is an example of how to use
133
- this feature in the step code:
130
+ before the pipeline is executed. You can then use the file as you would
131
+ use any other file in the execution environment. Here is an example of
132
+ how to use this feature in the pipeline code:
134
133
 
135
134
  .. code-block:: python
136
135
 
137
- def step_function(input):
136
+ def pipeline_function(input):
138
137
  with open(input["path"]) as f:
139
138
  content = f.read()
140
139
  print(content)
@@ -156,21 +155,7 @@ class InputSource:
156
155
  constant_value=None,
157
156
  is_null=None,
158
157
  datastore_path=None,
159
- step_input_name=None,
160
158
  ):
161
- if pipeline_input_name is not None and step_input_name is not None:
162
- raise ValueError(
163
- "Both pipeline_input_name and step_input_name cannot be specified."
164
- )
165
- if pipeline_input_name is None and step_input_name is None:
166
- raise ValueError('missing "pipeline_input_name" argument.')
167
- if step_input_name is not None:
168
- warnings.warn(
169
- "Providing the step_input_name argument is deprecated and will "
170
- "be removed in a future version. Please use the pipeline_input_name keyword argument instead.", # noqa: E501
171
- FutureWarning,
172
- stacklevel=2,
173
- )
174
159
  self.pipeline_input_name = pipeline_input_name
175
160
  self.endpoint_input_name = endpoint_input_name
176
161
  self.environment_variable_name = environment_variable_name
@@ -179,7 +164,6 @@ class InputSource:
179
164
  self.constant_value = constant_value
180
165
  self.is_null = is_null
181
166
  self.datastore_path = datastore_path
182
- self.step_input_name = step_input_name
183
167
 
184
168
  def to_dict(self) -> InputSourceDict:
185
169
  input_mapping_dict = {
@@ -191,7 +175,6 @@ class InputSource:
191
175
  "constant_value": self.constant_value,
192
176
  "is_null": self.is_null,
193
177
  "datastore_path": self.datastore_path,
194
- "step_input_name": self.step_input_name,
195
178
  }
196
179
 
197
180
  return cast(InputSourceDict, remove_none_values(input_mapping_dict))
@@ -202,11 +185,10 @@ class OutputDestinationDict(TypedDict):
202
185
  endpoint_output_name: NotRequired[str]
203
186
  is_null: NotRequired[bool]
204
187
  datastore_path: NotRequired[str]
205
- step_output_name: NotRequired[str]
206
188
 
207
189
 
208
190
  class OutputDestination:
209
- """Class to specify to which destination a step output should be mapped when
191
+ """Class to specify to which destination a pipeline output should be mapped when
210
192
  creating a deployment (cf. :meth:`.CraftAiSdk.create_deployment`). If the execution
211
193
  rule of the deployment is endpoint, an output could either be exposed as an output
212
194
  of the endpoint (via `endpoint_output_name` parameter) or not (via `is_null`
@@ -221,16 +203,17 @@ class OutputDestination:
221
203
  deployment output.
222
204
  datastore_path (:obj:`str`, optional): Path of the output file in the datastore.
223
205
  If you want to upload a file to the datastore as output, you can specify
224
- this parameter. The file will be uploaded to the datastore after the step
225
- is executed. In order to pass the file to be uploaded in the datastore, you
226
- will have to do the same as if you were passing a file as output. You will
227
- have to return a :obj:`dict` with `"path"` as key and the file path as
228
- value. The file will be uploaded to the datastore after the step is
229
- executed. Here is an example of how to use this feature in the step code:
206
+ this parameter. The file will be uploaded to the datastore after the
207
+ pipeline is executed. In order to pass the file to be uploaded in the
208
+ datastore, you will have to do the same as if you were passing a file
209
+ as output. You will have to return a :obj:`dict` with `"path"` as key
210
+ and the file path as value. The file will be uploaded to the datastore
211
+ after the pipeline is executed. Here is an example of how to use this
212
+ feature in the pipeline code:
230
213
 
231
214
  .. code-block:: python
232
215
 
233
- def step_function():
216
+ def pipeline_function():
234
217
  file_path = "path/to/file"
235
218
  with open(file_path, "w") as f:
236
219
  f.write("content")
@@ -253,26 +236,11 @@ class OutputDestination:
253
236
  endpoint_output_name=None,
254
237
  is_null=None,
255
238
  datastore_path=None,
256
- step_output_name=None,
257
239
  ):
258
- if pipeline_output_name is not None and step_output_name is not None:
259
- raise ValueError(
260
- "Both pipeline_output_name and step_output_name cannot be specified."
261
- )
262
- if pipeline_output_name is None and step_output_name is None:
263
- raise ValueError('missing "pipeline_output_name" argument.')
264
- if step_output_name is not None:
265
- warnings.warn(
266
- "Providing the 'step_output_name' argument is deprecated and will "
267
- "be removed in a future version. Please use the 'pipeline_output_name' keyword argument instead.", # noqa: E501
268
- FutureWarning,
269
- stacklevel=2,
270
- )
271
240
  self.pipeline_output_name = pipeline_output_name
272
241
  self.endpoint_output_name = endpoint_output_name
273
242
  self.is_null = is_null
274
243
  self.datastore_path = datastore_path
275
- self.step_output_name = step_output_name
276
244
 
277
245
  def to_dict(self) -> OutputDestinationDict:
278
246
  output_mapping_dict = {
@@ -280,7 +248,6 @@ class OutputDestination:
280
248
  "endpoint_output_name": self.endpoint_output_name,
281
249
  "is_null": self.is_null,
282
250
  "datastore_path": self.datastore_path,
283
- "step_output_name": self.step_output_name,
284
251
  }
285
252
 
286
253
  return cast(OutputDestinationDict, remove_none_values(output_mapping_dict))
craft_ai_sdk/sdk.py CHANGED
@@ -138,7 +138,7 @@ class CraftAiSdk(BaseCraftAiSdk):
138
138
  os.environ.get("CRAFT_AI__MULTIPART_PART_SIZE__B", str(38 * 256 * 1024))
139
139
  )
140
140
  _access_token_margin = timedelta(seconds=30)
141
- _version = "0.65.3" # Would be better to share it somewhere
141
+ _version = "0.66.0" # Would be better to share it somewhere
142
142
 
143
143
  def __init__(
144
144
  self,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: craft-ai-sdk
3
- Version: 0.65.3
3
+ Version: 0.66.0
4
4
  Summary: Craft AI MLOps platform SDK
5
5
  License: Apache-2.0
6
6
  Author: Craft AI
@@ -1,8 +1,8 @@
1
- craft_ai_sdk/__init__.py,sha256=ihzipaTQTB6IBVmQA5o4Pl_vAND4Fv8tHTXbgxspNds,363
1
+ craft_ai_sdk/__init__.py,sha256=veuzN4isifu8Hhhi6X3JJy6t5zCuJmvf7cogoeNkfWY,363
2
2
  craft_ai_sdk/constants.py,sha256=rH4JrGlTpbjjjNRrKhk5oScbj5G5INrcVza6Bb6kIzY,980
3
3
  craft_ai_sdk/core/data_store.py,sha256=dlVZajXGwcI_4mzqFctHpzKO-uySIP5lvQaJy6CKwY0,9131
4
- craft_ai_sdk/core/deployments.py,sha256=8IijSJjomEtiLNeYgVuwMKqqs3MD5rCMqoGKOYHGOtc,36915
5
- craft_ai_sdk/core/endpoints.py,sha256=nZgkxM69cJjLpO8GHgT-yat5DkGvi2s0AfOKYbN7F7w,7355
4
+ craft_ai_sdk/core/deployments.py,sha256=O4JjH8CjfA4Dy-Y7H_ABVEPS3TbMhO9j66F2mKf22K4,37617
5
+ craft_ai_sdk/core/endpoints.py,sha256=smNLSDG4T6ZMNyu494Ip52SURBOqmmx3oSWtz8GzCCc,7648
6
6
  craft_ai_sdk/core/environment_variables.py,sha256=CpkyneLf8UF-1QXe36Ay_RZJ8grSfbZZWBvGSQzKlLs,2221
7
7
  craft_ai_sdk/core/pipeline_executions.py,sha256=p_vjsOMR92_6UUG5S-pgyen3V7yYHTjHBJ7pNfANe5I,18661
8
8
  craft_ai_sdk/core/pipeline_metrics.py,sha256=IJFzAptwqjanb2Jl-XQCh0FtashEd4I2Pb51ky51cyY,7166
@@ -12,8 +12,8 @@ craft_ai_sdk/core/steps.py,sha256=JptHia3zHRdbQ-lXxg-aCgoN2S2L10AxU8D3A2Mlxl0,23
12
12
  craft_ai_sdk/core/users.py,sha256=q5et87q0SOMpRTBOiLX026oaEOTaY1aGqKdbUC51zbA,618
13
13
  craft_ai_sdk/core/vector_database.py,sha256=S3h68Ej1FnHYJdc5LdLlNjobYYdmaCloSq2UaylPWko,2334
14
14
  craft_ai_sdk/exceptions.py,sha256=IC-JfZmmmaTsbMCgirOEByRmWnatQLjKe8BErRkuwM0,1075
15
- craft_ai_sdk/io.py,sha256=jeCH5nVBpaSPX3SRwj6UahhOw9z52iRTqDgX6DuSEkY,13468
16
- craft_ai_sdk/sdk.py,sha256=YIXMPWabAwOSa54qxvgFYA5u8ElRTJ_Xu6FNlL_bJBE,10812
15
+ craft_ai_sdk/io.py,sha256=x1G7ga10W_SeTDLHe-FrhU26_WNwWW32-yprMmCZ5Ck,11836
16
+ craft_ai_sdk/sdk.py,sha256=m3eZtexqGPqdfJA2zIfjzd0qQG81YFE5ncG_Dc7lDHs,10812
17
17
  craft_ai_sdk/shared/authentication.py,sha256=OdwtAH47tOUS-u_HhxlI8JdjZT5REr5B5cGSrX7sz00,885
18
18
  craft_ai_sdk/shared/environments.py,sha256=LbpRK-ACpwFfN7WTuo0nrtbbi2NAXhuMRKf7YkktEYI,510
19
19
  craft_ai_sdk/shared/execution_context.py,sha256=B2Ghq-wiUvq81q5mhsm79Oc59c8c00uQxMIpApFD03o,585
@@ -26,8 +26,8 @@ craft_ai_sdk/utils/__init__.py,sha256=A0sLCXSPD1Z3q2GP1uLDjvif4ivOr__Hzg9RQysEuq
26
26
  craft_ai_sdk/utils/datetime_utils.py,sha256=yYP5HVdI879WXxQCajPTnas1pWrwInOxMux-mxqQNQM,734
27
27
  craft_ai_sdk/utils/dict_utils.py,sha256=1HQ3A14SN48XPFDmQleujGAgksmkjIs3hyPLpwhwh24,748
28
28
  craft_ai_sdk/utils/file_utils.py,sha256=o10-CDt4qzgCJNPykvlNrL6WTouhVLY8C8BVpHJYt18,2795
29
- craft_ai_sdk-0.65.3.dist-info/LICENSE,sha256=_2oYRJic9lZK05LceuJ9aZZw5mPHYc1WQhJiVS-oGFU,10754
30
- craft_ai_sdk-0.65.3.dist-info/METADATA,sha256=OcxCuOinh8qkU4GaVfBzVX4xK2mBniL6p4v_DmhOxRg,1676
31
- craft_ai_sdk-0.65.3.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
32
- craft_ai_sdk-0.65.3.dist-info/entry_points.txt,sha256=QC96WcXvvUfLMRgFD-l_y7_TgC9SqZybLs9EQ8dsGiQ,417
33
- craft_ai_sdk-0.65.3.dist-info/RECORD,,
29
+ craft_ai_sdk-0.66.0.dist-info/LICENSE,sha256=_2oYRJic9lZK05LceuJ9aZZw5mPHYc1WQhJiVS-oGFU,10754
30
+ craft_ai_sdk-0.66.0.dist-info/METADATA,sha256=2BLk5MjWIqsFg3bIdXnyeRMP5bibtaQ_xF4_m_zpePM,1676
31
+ craft_ai_sdk-0.66.0.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
32
+ craft_ai_sdk-0.66.0.dist-info/entry_points.txt,sha256=QC96WcXvvUfLMRgFD-l_y7_TgC9SqZybLs9EQ8dsGiQ,417
33
+ craft_ai_sdk-0.66.0.dist-info/RECORD,,