craft-ai-sdk 0.60.0__tar.gz → 0.61.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of craft-ai-sdk might be problematic. Click here for more details.
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/PKG-INFO +1 -1
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/craft_ai_sdk/__init__.py +5 -6
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/craft_ai_sdk/constants.py +11 -1
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/craft_ai_sdk/core/data_store.py +6 -8
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/craft_ai_sdk/core/deployments.py +7 -15
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/craft_ai_sdk/core/endpoints.py +4 -2
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/craft_ai_sdk/core/environment_variables.py +1 -1
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/craft_ai_sdk/core/pipeline_executions.py +8 -12
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/craft_ai_sdk/core/pipeline_metrics.py +3 -1
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/craft_ai_sdk/core/pipelines.py +10 -14
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/craft_ai_sdk/core/resource_metrics.py +3 -3
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/craft_ai_sdk/core/steps.py +31 -16
- craft_ai_sdk-0.61.0/craft_ai_sdk/core/vector_database.py +21 -0
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/craft_ai_sdk/io.py +1 -0
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/craft_ai_sdk/sdk.py +5 -2
- craft_ai_sdk-0.61.0/craft_ai_sdk/shared/authentication.py +24 -0
- craft_ai_sdk-0.61.0/craft_ai_sdk/shared/environments.py +16 -0
- craft_ai_sdk-0.61.0/craft_ai_sdk/shared/execution_context.py +21 -0
- craft_ai_sdk-0.61.0/craft_ai_sdk/shared/helpers.py +27 -0
- craft_ai_sdk-0.61.0/craft_ai_sdk/shared/logger.py +39 -0
- craft_ai_sdk-0.61.0/craft_ai_sdk/shared/request_response_handler.py +113 -0
- craft_ai_sdk-0.61.0/craft_ai_sdk/utils/__init__.py +14 -0
- craft_ai_sdk-0.61.0/craft_ai_sdk/utils/datetime_utils.py +24 -0
- craft_ai_sdk-0.61.0/craft_ai_sdk/utils/dict_utils.py +24 -0
- craft_ai_sdk-0.61.0/craft_ai_sdk/utils/file_utils.py +69 -0
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/documentation.pdf +0 -0
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/pyproject.toml +1 -1
- craft_ai_sdk-0.60.0/craft_ai_sdk/utils.py +0 -362
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/LICENSE +0 -0
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/README.md +0 -0
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/craft_ai_sdk/core/users.py +0 -0
- {craft_ai_sdk-0.60.0 → craft_ai_sdk-0.61.0}/craft_ai_sdk/exceptions.py +0 -0
- {craft_ai_sdk-0.60.0/craft_ai_sdk → craft_ai_sdk-0.61.0/craft_ai_sdk/shared}/warnings.py +2 -2
|
@@ -1,17 +1,16 @@
|
|
|
1
|
-
from craft_ai_sdk.utils import CREATION_PARAMETER_VALUE # noqa: F401
|
|
2
|
-
from .sdk import CraftAiSdk # noqa: F401
|
|
3
|
-
from .exceptions import SdkException # noqa: F401
|
|
4
1
|
from .constants import ( # noqa: F401
|
|
2
|
+
CREATION_PARAMETER_VALUE,
|
|
5
3
|
DEPLOYMENT_EXECUTION_RULES,
|
|
6
4
|
DEPLOYMENT_MODES,
|
|
7
5
|
)
|
|
6
|
+
from .exceptions import SdkException # noqa: F401
|
|
8
7
|
from .io import ( # noqa: F401
|
|
9
8
|
INPUT_OUTPUT_TYPES,
|
|
10
9
|
Input,
|
|
11
|
-
Output,
|
|
12
10
|
InputSource,
|
|
11
|
+
Output,
|
|
13
12
|
OutputDestination,
|
|
14
13
|
)
|
|
14
|
+
from .sdk import CraftAiSdk # noqa: F401
|
|
15
15
|
|
|
16
|
-
|
|
17
|
-
__version__ = "0.60.0"
|
|
16
|
+
__version__ = "0.61.0"
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from enum import auto
|
|
1
|
+
from enum import Enum, auto
|
|
2
2
|
|
|
3
3
|
from strenum import LowercaseStrEnum
|
|
4
4
|
|
|
@@ -29,3 +29,13 @@ class DEPLOYMENT_STATUS(LowercaseStrEnum):
|
|
|
29
29
|
|
|
30
30
|
|
|
31
31
|
CREATION_REQUESTS_RETRY_INTERVAL = 10
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class CREATION_PARAMETER_VALUE(Enum):
|
|
35
|
+
"""Enumeration for creation parameters special values."""
|
|
36
|
+
|
|
37
|
+
#: Special value to indicate that the parameter should be set to the
|
|
38
|
+
#: project information value.
|
|
39
|
+
FALLBACK_PROJECT = "FALLBACK_PROJECT"
|
|
40
|
+
#: Special value to indicate that the parameter should be set to `None`.
|
|
41
|
+
NULL = "NULL"
|
|
@@ -1,14 +1,12 @@
|
|
|
1
1
|
import io
|
|
2
|
-
import requests
|
|
3
2
|
import urllib.parse
|
|
3
|
+
|
|
4
|
+
import requests
|
|
5
|
+
|
|
4
6
|
from ..sdk import BaseCraftAiSdk
|
|
5
|
-
from ..
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
handle_data_store_response,
|
|
9
|
-
log_action,
|
|
10
|
-
log_func_result,
|
|
11
|
-
)
|
|
7
|
+
from ..shared.logger import log_action, log_func_result
|
|
8
|
+
from ..shared.request_response_handler import handle_data_store_response
|
|
9
|
+
from ..utils import chunk_buffer, convert_size
|
|
12
10
|
|
|
13
11
|
|
|
14
12
|
def get_data_store_object_information(sdk: BaseCraftAiSdk, object_path_in_datastore):
|
|
@@ -1,18 +1,10 @@
|
|
|
1
|
-
from ..constants import
|
|
2
|
-
DEPLOYMENT_EXECUTION_RULES,
|
|
3
|
-
DEPLOYMENT_MODES,
|
|
4
|
-
DEPLOYMENT_STATUS,
|
|
5
|
-
)
|
|
6
|
-
from ..io import (
|
|
7
|
-
_validate_inputs_mapping,
|
|
8
|
-
_validate_outputs_mapping,
|
|
9
|
-
)
|
|
10
|
-
from ..sdk import BaseCraftAiSdk
|
|
1
|
+
from ..constants import DEPLOYMENT_EXECUTION_RULES, DEPLOYMENT_MODES, DEPLOYMENT_STATUS
|
|
11
2
|
from ..exceptions import SdkException
|
|
3
|
+
from ..io import _validate_inputs_mapping, _validate_outputs_mapping
|
|
4
|
+
from ..sdk import BaseCraftAiSdk
|
|
5
|
+
from ..shared.logger import log_action, log_func_result
|
|
12
6
|
from ..utils import (
|
|
13
|
-
|
|
14
|
-
log_func_result,
|
|
15
|
-
log_action,
|
|
7
|
+
datetime_to_timestamp_in_ms,
|
|
16
8
|
remove_keys_from_dict,
|
|
17
9
|
remove_none_values,
|
|
18
10
|
)
|
|
@@ -660,9 +652,9 @@ def get_deployment_logs(
|
|
|
660
652
|
url = f"{sdk.base_environment_api_url}/deployments/{deployment_name}/logs"
|
|
661
653
|
data = {}
|
|
662
654
|
if from_datetime is not None:
|
|
663
|
-
data["from"] =
|
|
655
|
+
data["from"] = datetime_to_timestamp_in_ms(from_datetime)
|
|
664
656
|
if to_datetime is not None:
|
|
665
|
-
data["to"] =
|
|
657
|
+
data["to"] = datetime_to_timestamp_in_ms(to_datetime)
|
|
666
658
|
if limit is not None:
|
|
667
659
|
data["limit"] = limit
|
|
668
660
|
|
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
import io
|
|
2
|
-
import requests
|
|
3
2
|
from urllib.parse import urlencode
|
|
4
3
|
|
|
5
|
-
|
|
4
|
+
import requests
|
|
5
|
+
|
|
6
6
|
from ..sdk import BaseCraftAiSdk
|
|
7
|
+
from ..shared.logger import log_func_result
|
|
8
|
+
from ..shared.request_response_handler import handle_http_response
|
|
7
9
|
from .deployments import get_deployment
|
|
8
10
|
|
|
9
11
|
|
|
@@ -1,23 +1,19 @@
|
|
|
1
|
-
import json
|
|
2
1
|
import io
|
|
3
|
-
|
|
2
|
+
import json
|
|
4
3
|
|
|
5
4
|
from ..exceptions import SdkException
|
|
6
5
|
from ..io import (
|
|
7
6
|
INPUT_OUTPUT_TYPES,
|
|
8
|
-
_format_execution_output,
|
|
9
7
|
_format_execution_input,
|
|
8
|
+
_format_execution_output,
|
|
10
9
|
_validate_inputs_mapping,
|
|
11
10
|
_validate_outputs_mapping,
|
|
12
11
|
)
|
|
13
12
|
from ..sdk import BaseCraftAiSdk
|
|
14
|
-
from ..
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
use_authentication,
|
|
19
|
-
handle_http_response,
|
|
20
|
-
)
|
|
13
|
+
from ..shared.authentication import use_authentication
|
|
14
|
+
from ..shared.logger import log_action, log_func_result
|
|
15
|
+
from ..shared.request_response_handler import handle_http_response
|
|
16
|
+
from ..utils import datetime_to_timestamp_in_ms
|
|
21
17
|
|
|
22
18
|
|
|
23
19
|
@log_func_result("Pipeline execution startup")
|
|
@@ -441,9 +437,9 @@ def get_pipeline_execution_logs(
|
|
|
441
437
|
|
|
442
438
|
data = {}
|
|
443
439
|
if from_datetime is not None:
|
|
444
|
-
data["from"] =
|
|
440
|
+
data["from"] = datetime_to_timestamp_in_ms(from_datetime)
|
|
445
441
|
if to_datetime is not None:
|
|
446
|
-
data["to"] =
|
|
442
|
+
data["to"] = datetime_to_timestamp_in_ms(to_datetime)
|
|
447
443
|
if limit is not None:
|
|
448
444
|
data["limit"] = limit
|
|
449
445
|
|
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
import warnings
|
|
2
2
|
|
|
3
|
-
from ..utils import log_func_result, remove_none_values, get_execution_id
|
|
4
3
|
from ..sdk import BaseCraftAiSdk
|
|
4
|
+
from ..shared.execution_context import get_execution_id
|
|
5
|
+
from ..shared.logger import log_func_result
|
|
6
|
+
from ..utils import remove_none_values
|
|
5
7
|
|
|
6
8
|
|
|
7
9
|
@log_func_result("Pipeline metrics definition", get_execution_id)
|
|
@@ -1,21 +1,17 @@
|
|
|
1
|
-
import warnings
|
|
2
1
|
import os
|
|
2
|
+
import warnings
|
|
3
|
+
|
|
3
4
|
import requests
|
|
4
5
|
|
|
5
6
|
from ..sdk import BaseCraftAiSdk
|
|
6
|
-
from ..
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
handle_data_store_response,
|
|
11
|
-
remove_keys_from_dict,
|
|
12
|
-
_datetime_to_timestamp_in_ms,
|
|
13
|
-
_wait_create_until_ready,
|
|
14
|
-
)
|
|
7
|
+
from ..shared.helpers import wait_create_until_ready
|
|
8
|
+
from ..shared.logger import log_action, log_func_result
|
|
9
|
+
from ..shared.request_response_handler import handle_data_store_response
|
|
10
|
+
from ..utils import datetime_to_timestamp_in_ms, multipartify, remove_keys_from_dict
|
|
15
11
|
from .steps import (
|
|
16
|
-
_validate_create_step_parameters,
|
|
17
12
|
_prepare_create_step_data,
|
|
18
13
|
_prepare_create_step_files,
|
|
14
|
+
_validate_create_step_parameters,
|
|
19
15
|
)
|
|
20
16
|
|
|
21
17
|
|
|
@@ -313,7 +309,7 @@ def get_pipeline(
|
|
|
313
309
|
"""
|
|
314
310
|
base_url = f"{sdk.base_environment_api_url}/pipelines/{pipeline_name}"
|
|
315
311
|
if wait_for_completion:
|
|
316
|
-
pipeline =
|
|
312
|
+
pipeline = wait_create_until_ready(
|
|
317
313
|
sdk,
|
|
318
314
|
pipeline_name,
|
|
319
315
|
lambda sdk, _: sdk._get(
|
|
@@ -452,9 +448,9 @@ def get_pipeline_logs(
|
|
|
452
448
|
|
|
453
449
|
data = {}
|
|
454
450
|
if from_datetime is not None:
|
|
455
|
-
data["from"] =
|
|
451
|
+
data["from"] = datetime_to_timestamp_in_ms(from_datetime)
|
|
456
452
|
if to_datetime is not None:
|
|
457
|
-
data["to"] =
|
|
453
|
+
data["to"] = datetime_to_timestamp_in_ms(to_datetime)
|
|
458
454
|
if limit is not None:
|
|
459
455
|
data["limit"] = limit
|
|
460
456
|
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from ..sdk import BaseCraftAiSdk
|
|
2
|
-
from ..utils import
|
|
2
|
+
from ..utils import datetime_to_timestamp_in_ms
|
|
3
3
|
|
|
4
4
|
|
|
5
5
|
def get_resource_metrics(sdk: BaseCraftAiSdk, start_date, end_date, csv=False):
|
|
@@ -48,8 +48,8 @@ def get_resource_metrics(sdk: BaseCraftAiSdk, start_date, end_date, csv=False):
|
|
|
48
48
|
|
|
49
49
|
url = (
|
|
50
50
|
f"{sdk.base_environment_api_url}/resource-metrics"
|
|
51
|
-
f"?start={
|
|
52
|
-
f"&end={
|
|
51
|
+
f"?start={datetime_to_timestamp_in_ms(start_date)}"
|
|
52
|
+
f"&end={datetime_to_timestamp_in_ms(end_date)}"
|
|
53
53
|
f"&download={csv}"
|
|
54
54
|
)
|
|
55
55
|
|
|
@@ -1,20 +1,16 @@
|
|
|
1
|
-
import tarfile
|
|
2
1
|
import io
|
|
3
2
|
import os
|
|
3
|
+
import tarfile
|
|
4
|
+
|
|
4
5
|
import requests
|
|
5
6
|
|
|
6
|
-
from ..
|
|
7
|
+
from ..constants import CREATION_PARAMETER_VALUE
|
|
7
8
|
from ..io import Input, Output
|
|
8
|
-
from ..
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
remove_none_values,
|
|
14
|
-
handle_data_store_response,
|
|
15
|
-
_datetime_to_timestamp_in_ms,
|
|
16
|
-
_wait_create_until_ready,
|
|
17
|
-
)
|
|
9
|
+
from ..sdk import BaseCraftAiSdk
|
|
10
|
+
from ..shared.helpers import wait_create_until_ready
|
|
11
|
+
from ..shared.logger import log_action, log_func_result
|
|
12
|
+
from ..shared.request_response_handler import handle_data_store_response
|
|
13
|
+
from ..utils import datetime_to_timestamp_in_ms, multipartify, remove_none_values
|
|
18
14
|
|
|
19
15
|
|
|
20
16
|
def _compress_folder_to_memory(local_folder, include):
|
|
@@ -41,6 +37,25 @@ def _validate_create_step_parameters(inputs, outputs, timeout_s):
|
|
|
41
37
|
raise ValueError("'outputs' must be a list of instances of Output.")
|
|
42
38
|
|
|
43
39
|
|
|
40
|
+
def _map_container_config_step_parameter(container_config):
|
|
41
|
+
"""
|
|
42
|
+
Maps container config with :obj:`CREATION_PARAMETER_VALUE` enum values to final
|
|
43
|
+
container config. `None` is considered to be equivalent to
|
|
44
|
+
:obj:`CREATION_PARAMETER_VALUE.FALLBACK_PROJECT`, and should not be projected to
|
|
45
|
+
output
|
|
46
|
+
"""
|
|
47
|
+
ret = {}
|
|
48
|
+
for key in container_config:
|
|
49
|
+
if key == "local_folder":
|
|
50
|
+
continue
|
|
51
|
+
val = container_config[key]
|
|
52
|
+
if val is CREATION_PARAMETER_VALUE.NULL:
|
|
53
|
+
ret[key] = None
|
|
54
|
+
elif val is not CREATION_PARAMETER_VALUE.FALLBACK_PROJECT and val is not None:
|
|
55
|
+
ret[key] = val
|
|
56
|
+
return ret
|
|
57
|
+
|
|
58
|
+
|
|
44
59
|
def _prepare_create_step_data(
|
|
45
60
|
function_path,
|
|
46
61
|
function_name,
|
|
@@ -59,7 +74,7 @@ def _prepare_create_step_data(
|
|
|
59
74
|
"function_path": function_path,
|
|
60
75
|
"function_name": function_name,
|
|
61
76
|
"description": description,
|
|
62
|
-
"container_config":
|
|
77
|
+
"container_config": _map_container_config_step_parameter(container_config),
|
|
63
78
|
}
|
|
64
79
|
)
|
|
65
80
|
|
|
@@ -361,7 +376,7 @@ def get_step(sdk: BaseCraftAiSdk, step_name, wait_for_completion=False, timeout_
|
|
|
361
376
|
base_url = f"{sdk.base_environment_api_url}/steps/{step_name}"
|
|
362
377
|
|
|
363
378
|
if wait_for_completion:
|
|
364
|
-
step =
|
|
379
|
+
step = wait_create_until_ready(
|
|
365
380
|
sdk,
|
|
366
381
|
step_name,
|
|
367
382
|
lambda sdk, _: sdk._get(
|
|
@@ -479,9 +494,9 @@ def get_step_logs(
|
|
|
479
494
|
|
|
480
495
|
data = {}
|
|
481
496
|
if from_datetime is not None:
|
|
482
|
-
data["from"] =
|
|
497
|
+
data["from"] = datetime_to_timestamp_in_ms(from_datetime)
|
|
483
498
|
if to_datetime is not None:
|
|
484
|
-
data["to"] =
|
|
499
|
+
data["to"] = datetime_to_timestamp_in_ms(to_datetime)
|
|
485
500
|
if limit is not None:
|
|
486
501
|
data["limit"] = limit
|
|
487
502
|
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from craft_ai_sdk.shared.environments import get_environment_id
|
|
2
|
+
|
|
3
|
+
from ..sdk import BaseCraftAiSdk
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def get_vector_database_credentials(sdk: BaseCraftAiSdk):
|
|
7
|
+
"""Get the credentials of the vector database.
|
|
8
|
+
|
|
9
|
+
Returns:
|
|
10
|
+
:obj:`dict`: The vector database credentials, with the following keys:
|
|
11
|
+
* ``"vector_database_url"`` (:obj:`str`): URL of the vector database.
|
|
12
|
+
* ``"vector_database_token"`` (:obj:`str`): Token to connect to the vector
|
|
13
|
+
database.
|
|
14
|
+
"""
|
|
15
|
+
environment_id = get_environment_id(sdk)
|
|
16
|
+
|
|
17
|
+
vector_database_url = (
|
|
18
|
+
f"{sdk.base_control_api_url}/environments/{environment_id}/vector-database"
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
return sdk._get(vector_database_url)
|
|
@@ -8,7 +8,8 @@ from datetime import timedelta
|
|
|
8
8
|
import jwt
|
|
9
9
|
import requests
|
|
10
10
|
|
|
11
|
-
from .
|
|
11
|
+
from .shared.authentication import use_authentication
|
|
12
|
+
from .shared.request_response_handler import handle_http_request
|
|
12
13
|
|
|
13
14
|
warnings.simplefilter("always", DeprecationWarning)
|
|
14
15
|
|
|
@@ -20,6 +21,7 @@ class BaseCraftAiSdk(ABC):
|
|
|
20
21
|
base_control_api_url: str
|
|
21
22
|
_MULTIPART_THRESHOLD: int
|
|
22
23
|
_MULTIPART_PART_SIZE: int
|
|
24
|
+
_version: str
|
|
23
25
|
|
|
24
26
|
@abstractmethod
|
|
25
27
|
def _get(self, url, params=None, **kwargs):
|
|
@@ -114,6 +116,7 @@ class CraftAiSdk(BaseCraftAiSdk):
|
|
|
114
116
|
list_steps,
|
|
115
117
|
)
|
|
116
118
|
from .core.users import get_user
|
|
119
|
+
from .core.vector_database import get_vector_database_credentials
|
|
117
120
|
|
|
118
121
|
# Size (in bytes) from which datastore upload will switch to multipart
|
|
119
122
|
# AWS: minimum part size is 5MiB
|
|
@@ -129,7 +132,7 @@ class CraftAiSdk(BaseCraftAiSdk):
|
|
|
129
132
|
os.environ.get("CRAFT_AI__MULTIPART_PART_SIZE__B", str(38 * 256 * 1024))
|
|
130
133
|
)
|
|
131
134
|
_access_token_margin = timedelta(seconds=30)
|
|
132
|
-
_version = "0.
|
|
135
|
+
_version = "0.61.0" # Would be better to share it somewhere
|
|
133
136
|
|
|
134
137
|
def __init__(
|
|
135
138
|
self,
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def use_authentication(action_func):
|
|
6
|
+
@functools.wraps(action_func)
|
|
7
|
+
def wrapper(sdk, *args, headers=None, **kwargs):
|
|
8
|
+
actual_headers = None
|
|
9
|
+
if (
|
|
10
|
+
sdk._access_token_data is None
|
|
11
|
+
or sdk._access_token_data["exp"]
|
|
12
|
+
< (datetime.now() + sdk._access_token_margin).timestamp()
|
|
13
|
+
):
|
|
14
|
+
sdk._refresh_access_token()
|
|
15
|
+
actual_headers = {"Authorization": f"Bearer {sdk._access_token}"}
|
|
16
|
+
if headers is not None:
|
|
17
|
+
actual_headers.update(headers)
|
|
18
|
+
|
|
19
|
+
response = action_func(sdk, *args, headers=actual_headers, **kwargs)
|
|
20
|
+
if response.status_code == 401:
|
|
21
|
+
sdk._clear_access_token()
|
|
22
|
+
return response
|
|
23
|
+
|
|
24
|
+
return wrapper
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import requests
|
|
2
|
+
|
|
3
|
+
from craft_ai_sdk.sdk import BaseCraftAiSdk
|
|
4
|
+
from craft_ai_sdk.shared.request_response_handler import handle_http_response
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def get_environment_id(sdk: BaseCraftAiSdk):
|
|
8
|
+
health_url = f"{sdk.base_environment_api_url}/health"
|
|
9
|
+
health_result = requests.get(
|
|
10
|
+
health_url,
|
|
11
|
+
headers={
|
|
12
|
+
"craft-ai-client": f"craft-ai-sdk@{sdk._version}",
|
|
13
|
+
},
|
|
14
|
+
)
|
|
15
|
+
handle_http_response(health_result)
|
|
16
|
+
return health_result.json().get("environment_id", "")
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
_execution_context = None
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def get_execution_id():
|
|
7
|
+
global _execution_context
|
|
8
|
+
if _execution_context is None:
|
|
9
|
+
try:
|
|
10
|
+
# File injected in steps
|
|
11
|
+
import __craft_internal_execution_context # type: ignore
|
|
12
|
+
|
|
13
|
+
_execution_context = __craft_internal_execution_context
|
|
14
|
+
except ImportError:
|
|
15
|
+
_execution_context = False
|
|
16
|
+
if _execution_context:
|
|
17
|
+
try:
|
|
18
|
+
return _execution_context.current_execution_id.get()
|
|
19
|
+
except LookupError:
|
|
20
|
+
pass
|
|
21
|
+
return os.environ.get("CRAFT_AI_EXECUTION_ID")
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from craft_ai_sdk.exceptions import SdkException
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def wait_create_until_ready(sdk, name, get_func, timeout_s, start_time, get_log_func):
|
|
5
|
+
elapsed_time = sdk._get_time() - start_time
|
|
6
|
+
status = "creation_pending"
|
|
7
|
+
while status == "creation_pending" and (
|
|
8
|
+
timeout_s is None or elapsed_time < timeout_s
|
|
9
|
+
):
|
|
10
|
+
created_obj = get_func(sdk, name)
|
|
11
|
+
status = created_obj.get("creation_info", {}).get("status", None)
|
|
12
|
+
elapsed_time = sdk._get_time() - start_time
|
|
13
|
+
|
|
14
|
+
if status == "creation_failed":
|
|
15
|
+
raise SdkException(
|
|
16
|
+
f'The creation of "{name}" has failed. You can check the logs with '
|
|
17
|
+
f'the "{get_log_func.__name__}" function.',
|
|
18
|
+
name="CreationFailed",
|
|
19
|
+
)
|
|
20
|
+
if status != "ready":
|
|
21
|
+
raise SdkException(
|
|
22
|
+
f'The creation of "{name}" was not ready in time. It is still being '
|
|
23
|
+
"created but this function stopped trying. Please check its status with "
|
|
24
|
+
f'"{get_func.__name__}".',
|
|
25
|
+
name="TimeoutException",
|
|
26
|
+
)
|
|
27
|
+
return created_obj
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
import sys
|
|
3
|
+
from typing import Callable, Union
|
|
4
|
+
|
|
5
|
+
from craft_ai_sdk.exceptions import SdkException
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def log_action(sdk, message: str, should_log: Union[bool, Callable[[], bool]] = True):
|
|
9
|
+
if sdk.verbose_log and (should_log() if callable(should_log) else should_log):
|
|
10
|
+
print(message, file=sys.stderr)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def log_func_result(message: str, should_log: Union[bool, Callable[[], bool]] = True):
|
|
14
|
+
def decorator_log_func_result(action_func):
|
|
15
|
+
@functools.wraps(action_func)
|
|
16
|
+
def wrapper_log_func_result(*args, **kwargs):
|
|
17
|
+
sdk = args[0]
|
|
18
|
+
try:
|
|
19
|
+
res = action_func(*args, **kwargs)
|
|
20
|
+
log_action(sdk, "{:s} succeeded".format(message), should_log)
|
|
21
|
+
return res
|
|
22
|
+
except SdkException as error:
|
|
23
|
+
log_action(
|
|
24
|
+
sdk,
|
|
25
|
+
"{:s} failed ! {}".format(message, error),
|
|
26
|
+
should_log,
|
|
27
|
+
)
|
|
28
|
+
raise error
|
|
29
|
+
except Exception as error:
|
|
30
|
+
log_action(
|
|
31
|
+
sdk,
|
|
32
|
+
"{:s} failed for unexpected reason ! {}".format(message, error),
|
|
33
|
+
should_log,
|
|
34
|
+
)
|
|
35
|
+
raise error
|
|
36
|
+
|
|
37
|
+
return wrapper_log_func_result
|
|
38
|
+
|
|
39
|
+
return decorator_log_func_result
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
import xml.etree.ElementTree as ET
|
|
2
|
+
from json import JSONDecodeError
|
|
3
|
+
|
|
4
|
+
from requests import RequestException, Response
|
|
5
|
+
|
|
6
|
+
from craft_ai_sdk.exceptions import SdkException
|
|
7
|
+
from craft_ai_sdk.shared.execution_context import get_execution_id
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def handle_data_store_response(response):
|
|
11
|
+
"""Return the content of a response received from the datastore
|
|
12
|
+
or parse the send error and raise it.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
response (requests.Response): A response from the data store.
|
|
16
|
+
|
|
17
|
+
Raises:
|
|
18
|
+
SdkException: When the response contains an error.
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
:obj:`str`: Content of the response.
|
|
22
|
+
"""
|
|
23
|
+
if 200 <= response.status_code < 300:
|
|
24
|
+
return response.content
|
|
25
|
+
|
|
26
|
+
try:
|
|
27
|
+
# Parse XML error returned by the data store before raising it
|
|
28
|
+
xml_error_node = ET.fromstring(response.text)
|
|
29
|
+
error_infos = {node.tag: node.text for node in xml_error_node}
|
|
30
|
+
error_code = error_infos.pop("Code")
|
|
31
|
+
error_message = error_infos.pop("Message")
|
|
32
|
+
raise SdkException(
|
|
33
|
+
message=error_message,
|
|
34
|
+
status_code=response.status_code,
|
|
35
|
+
name=error_code,
|
|
36
|
+
additional_data=error_infos,
|
|
37
|
+
)
|
|
38
|
+
except ET.ParseError:
|
|
39
|
+
raise SdkException(
|
|
40
|
+
"Unable to decode response from the data store: "
|
|
41
|
+
f"Content being:\n'{response.text}'",
|
|
42
|
+
status_code=response.status_code,
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _parse_json_response(response):
|
|
47
|
+
if response.status_code == 204 or response.text == "OK":
|
|
48
|
+
return
|
|
49
|
+
try:
|
|
50
|
+
response_json = response.json()
|
|
51
|
+
except JSONDecodeError:
|
|
52
|
+
raise SdkException(
|
|
53
|
+
f"Unable to decode response data into json. Data being:\n'{response.text}'",
|
|
54
|
+
status_code=response.status_code,
|
|
55
|
+
) from None
|
|
56
|
+
return response_json
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def _raise_craft_ai_error_from_response(response: Response):
|
|
60
|
+
try:
|
|
61
|
+
error_content = response.json()
|
|
62
|
+
error_message = error_content.get("message", "The server returned an error")
|
|
63
|
+
|
|
64
|
+
# Permission denied inside a running execution
|
|
65
|
+
if response.status_code == 403 and get_execution_id() is not None:
|
|
66
|
+
error_message = (
|
|
67
|
+
"Insufficient permissions. This is probably because "
|
|
68
|
+
"you called an SDK function that is not permitted from "
|
|
69
|
+
"inside a running deployment or execution, even if it "
|
|
70
|
+
"works from your computer. Original error: " + error_message
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
raise SdkException(
|
|
74
|
+
message=error_message,
|
|
75
|
+
status_code=response.status_code,
|
|
76
|
+
name=error_content.get("name"),
|
|
77
|
+
request_id=error_content.get("request_id"),
|
|
78
|
+
additional_data=error_content.get("additional_data"),
|
|
79
|
+
)
|
|
80
|
+
except JSONDecodeError:
|
|
81
|
+
raise SdkException(
|
|
82
|
+
"The server returned an invalid response content. "
|
|
83
|
+
f"Content being:\n'{response.text}'",
|
|
84
|
+
status_code=response.status_code,
|
|
85
|
+
) from None
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def handle_http_response(response: Response):
|
|
89
|
+
if 200 <= response.status_code < 400:
|
|
90
|
+
if "application/octet-stream" in response.headers.get(
|
|
91
|
+
"content-type", ""
|
|
92
|
+
) or "text/csv" in response.headers.get("content-type", ""):
|
|
93
|
+
return response.content
|
|
94
|
+
return _parse_json_response(response)
|
|
95
|
+
_raise_craft_ai_error_from_response(response)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def handle_http_request(request_func):
|
|
99
|
+
def wrapper(*args, **kwargs):
|
|
100
|
+
get_response = kwargs.pop("get_response", False)
|
|
101
|
+
try:
|
|
102
|
+
response = request_func(*args, **kwargs)
|
|
103
|
+
except RequestException as error:
|
|
104
|
+
raise SdkException(
|
|
105
|
+
"Unable to perform the request", name="RequestError"
|
|
106
|
+
) from error
|
|
107
|
+
|
|
108
|
+
content = handle_http_response(response)
|
|
109
|
+
if get_response:
|
|
110
|
+
return content, response
|
|
111
|
+
return content
|
|
112
|
+
|
|
113
|
+
return wrapper
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from .datetime_utils import datetime_to_timestamp_in_ms, parse_isodate
|
|
2
|
+
from .dict_utils import remove_keys_from_dict, remove_none_values
|
|
3
|
+
from .file_utils import chunk_buffer, convert_size, merge_paths, multipartify
|
|
4
|
+
|
|
5
|
+
__all__ = [
|
|
6
|
+
"datetime_to_timestamp_in_ms",
|
|
7
|
+
"parse_isodate",
|
|
8
|
+
"remove_keys_from_dict",
|
|
9
|
+
"remove_none_values",
|
|
10
|
+
"merge_paths",
|
|
11
|
+
"multipartify",
|
|
12
|
+
"chunk_buffer",
|
|
13
|
+
"convert_size",
|
|
14
|
+
]
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def datetime_to_timestamp_in_ms(dt):
|
|
6
|
+
if not isinstance(dt, datetime):
|
|
7
|
+
raise ValueError("Parameter must be a datetime.datetime object.")
|
|
8
|
+
return int(1_000 * dt.timestamp())
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def parse_isodate(date_string):
|
|
12
|
+
"""_summary_
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
date_string (str): date in ISO 8601 format potentially ending with
|
|
16
|
+
"Z" specific character.
|
|
17
|
+
|
|
18
|
+
Returns:
|
|
19
|
+
:obj:`datetime.datetime`: A `datetime` corresponding to `date_string`.
|
|
20
|
+
"""
|
|
21
|
+
if date_string[-1] == "Z":
|
|
22
|
+
date_string = date_string.rstrip("Z")
|
|
23
|
+
|
|
24
|
+
return datetime.fromisoformat(re.sub(r"\.\d+", "", date_string))
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from typing import Union
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def remove_none_values(obj):
|
|
5
|
+
return {key: value for key, value in obj.items() if value is not None}
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def remove_keys_from_dict(dictionnary: dict, paths_to_remove: Union[set, None] = None):
|
|
9
|
+
if dictionnary is None:
|
|
10
|
+
return None
|
|
11
|
+
|
|
12
|
+
paths_to_remove = paths_to_remove or set()
|
|
13
|
+
returned_dictionnary = dictionnary.copy()
|
|
14
|
+
|
|
15
|
+
for path in paths_to_remove:
|
|
16
|
+
key, _, subpath = path.partition(".")
|
|
17
|
+
if subpath == "":
|
|
18
|
+
returned_dictionnary.pop(key, None)
|
|
19
|
+
elif isinstance(returned_dictionnary.get(key), dict):
|
|
20
|
+
returned_dictionnary[key] = remove_keys_from_dict(
|
|
21
|
+
returned_dictionnary[key], {subpath}
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
return returned_dictionnary
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
from io import BytesIO, IOBase, StringIO
|
|
2
|
+
from typing import Iterable, Union
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def merge_paths(prefix, path):
|
|
6
|
+
components = (value for value in path.split("/") if value != "")
|
|
7
|
+
return prefix + "/".join(components)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
# From https://stackoverflow.com/a/58767245/4839162
|
|
11
|
+
def chunk_buffer(buffer: IOBase, size: int) -> Iterable[Union[BytesIO, StringIO]]:
|
|
12
|
+
size_int = int(size)
|
|
13
|
+
b = buffer.read(size_int)
|
|
14
|
+
next_data = None
|
|
15
|
+
while b:
|
|
16
|
+
chunk = StringIO() if isinstance(b, str) else BytesIO()
|
|
17
|
+
previous_data = next_data
|
|
18
|
+
if previous_data:
|
|
19
|
+
chunk.write(next_data)
|
|
20
|
+
chunk.write(b)
|
|
21
|
+
chunk.seek(0)
|
|
22
|
+
|
|
23
|
+
next_data = buffer.read(1)
|
|
24
|
+
|
|
25
|
+
data = {
|
|
26
|
+
"chunk": chunk,
|
|
27
|
+
"len": len(b) + (len(previous_data) if previous_data else 0),
|
|
28
|
+
"lastChunk": len(next_data) == 0,
|
|
29
|
+
}
|
|
30
|
+
yield data
|
|
31
|
+
chunk.close()
|
|
32
|
+
b = buffer.read(size_int - 1)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def convert_size(size_in_bytes):
|
|
36
|
+
"""
|
|
37
|
+
Convert a size in bytes to a human readable string.
|
|
38
|
+
"""
|
|
39
|
+
units = ["B", "KB", "MB", "GB", "TB"]
|
|
40
|
+
for unit in units:
|
|
41
|
+
if size_in_bytes < 1024.0:
|
|
42
|
+
break
|
|
43
|
+
size_in_bytes /= 1024.0
|
|
44
|
+
return "{:.2f} {}".format(size_in_bytes, unit)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
# Adapted from
|
|
48
|
+
# https://gist.github.com/kazqvaizer/4cebebe5db654a414132809f9f88067b#file-multipartify-py-L13-L33
|
|
49
|
+
def multipartify(data, parent_key=None) -> dict:
|
|
50
|
+
def formatter(v):
|
|
51
|
+
return (None, v if v is not None else "")
|
|
52
|
+
|
|
53
|
+
if type(data) is not dict:
|
|
54
|
+
return {parent_key: formatter(data)}
|
|
55
|
+
|
|
56
|
+
converted = []
|
|
57
|
+
|
|
58
|
+
for key, value in data.items():
|
|
59
|
+
current_key = key if parent_key is None else f"{parent_key}[{key}]"
|
|
60
|
+
if type(value) is dict:
|
|
61
|
+
converted.extend(multipartify(value, current_key).items())
|
|
62
|
+
elif type(value) is list:
|
|
63
|
+
for ind, list_value in enumerate(value):
|
|
64
|
+
iter_key = f"{current_key}[{ind}]"
|
|
65
|
+
converted.extend(multipartify(list_value, iter_key).items())
|
|
66
|
+
else:
|
|
67
|
+
converted.append((current_key, formatter(value)))
|
|
68
|
+
|
|
69
|
+
return dict(converted)
|
|
Binary file
|
|
@@ -1,362 +0,0 @@
|
|
|
1
|
-
from datetime import datetime
|
|
2
|
-
from enum import Enum
|
|
3
|
-
import functools
|
|
4
|
-
from io import BytesIO, IOBase, StringIO
|
|
5
|
-
import re
|
|
6
|
-
import sys
|
|
7
|
-
from typing import Callable, Iterable, Union
|
|
8
|
-
import xml.etree.ElementTree as ET
|
|
9
|
-
from requests import RequestException, Response
|
|
10
|
-
from json import JSONDecodeError
|
|
11
|
-
import os
|
|
12
|
-
|
|
13
|
-
from .exceptions import SdkException
|
|
14
|
-
|
|
15
|
-
_execution_context = None
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
def get_execution_id():
|
|
19
|
-
global _execution_context
|
|
20
|
-
if _execution_context is None:
|
|
21
|
-
try:
|
|
22
|
-
# File injected in steps
|
|
23
|
-
import __craft_internal_execution_context # type: ignore
|
|
24
|
-
|
|
25
|
-
_execution_context = __craft_internal_execution_context
|
|
26
|
-
except ImportError:
|
|
27
|
-
_execution_context = False
|
|
28
|
-
if _execution_context:
|
|
29
|
-
try:
|
|
30
|
-
return _execution_context.current_execution_id.get()
|
|
31
|
-
except LookupError:
|
|
32
|
-
pass
|
|
33
|
-
return os.environ.get("CRAFT_AI_EXECUTION_ID")
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
def handle_data_store_response(response):
|
|
37
|
-
"""Return the content of a response received from the datastore
|
|
38
|
-
or parse the send error and raise it.
|
|
39
|
-
|
|
40
|
-
Args:
|
|
41
|
-
response (requests.Response): A response from the data store.
|
|
42
|
-
|
|
43
|
-
Raises:
|
|
44
|
-
SdkException: When the response contains an error.
|
|
45
|
-
|
|
46
|
-
Returns:
|
|
47
|
-
:obj:`str`: Content of the response.
|
|
48
|
-
"""
|
|
49
|
-
if 200 <= response.status_code < 300:
|
|
50
|
-
return response.content
|
|
51
|
-
|
|
52
|
-
try:
|
|
53
|
-
# Parse XML error returned by the data store before raising it
|
|
54
|
-
xml_error_node = ET.fromstring(response.text)
|
|
55
|
-
error_infos = {node.tag: node.text for node in xml_error_node}
|
|
56
|
-
error_code = error_infos.pop("Code")
|
|
57
|
-
error_message = error_infos.pop("Message")
|
|
58
|
-
raise SdkException(
|
|
59
|
-
message=error_message,
|
|
60
|
-
status_code=response.status_code,
|
|
61
|
-
name=error_code,
|
|
62
|
-
additional_data=error_infos,
|
|
63
|
-
)
|
|
64
|
-
except ET.ParseError:
|
|
65
|
-
raise SdkException(
|
|
66
|
-
"Unable to decode response from the data store: "
|
|
67
|
-
f"Content being:\n'{response.text}'",
|
|
68
|
-
status_code=response.status_code,
|
|
69
|
-
)
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
def _parse_json_response(response):
|
|
73
|
-
if response.status_code == 204 or response.text == "OK":
|
|
74
|
-
return
|
|
75
|
-
try:
|
|
76
|
-
response_json = response.json()
|
|
77
|
-
except JSONDecodeError:
|
|
78
|
-
raise SdkException(
|
|
79
|
-
f"Unable to decode response data into json. Data being:\n'{response.text}'",
|
|
80
|
-
status_code=response.status_code,
|
|
81
|
-
) from None
|
|
82
|
-
return response_json
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
def _raise_craft_ai_error_from_response(response: Response):
|
|
86
|
-
try:
|
|
87
|
-
error_content = response.json()
|
|
88
|
-
error_message = error_content.get("message", "The server returned an error")
|
|
89
|
-
|
|
90
|
-
# Permission denied inside a running execution
|
|
91
|
-
if response.status_code == 403 and get_execution_id() is not None:
|
|
92
|
-
error_message = (
|
|
93
|
-
"Insufficient permissions. This is probably because "
|
|
94
|
-
"you called an SDK function that is not permitted from "
|
|
95
|
-
"inside a running deployment or execution, even if it "
|
|
96
|
-
"works from your computer. Original error: " + error_message
|
|
97
|
-
)
|
|
98
|
-
|
|
99
|
-
raise SdkException(
|
|
100
|
-
message=error_message,
|
|
101
|
-
status_code=response.status_code,
|
|
102
|
-
name=error_content.get("name"),
|
|
103
|
-
request_id=error_content.get("request_id"),
|
|
104
|
-
additional_data=error_content.get("additional_data"),
|
|
105
|
-
)
|
|
106
|
-
except JSONDecodeError:
|
|
107
|
-
raise SdkException(
|
|
108
|
-
"The server returned an invalid response content. "
|
|
109
|
-
f"Content being:\n'{response.text}'",
|
|
110
|
-
status_code=response.status_code,
|
|
111
|
-
) from None
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
def handle_http_response(response: Response):
|
|
115
|
-
if 200 <= response.status_code < 400:
|
|
116
|
-
if "application/octet-stream" in response.headers.get(
|
|
117
|
-
"content-type", ""
|
|
118
|
-
) or "text/csv" in response.headers.get("content-type", ""):
|
|
119
|
-
return response.content
|
|
120
|
-
return _parse_json_response(response)
|
|
121
|
-
_raise_craft_ai_error_from_response(response)
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
def handle_http_request(request_func):
|
|
125
|
-
def wrapper(*args, **kwargs):
|
|
126
|
-
get_response = kwargs.pop("get_response", False)
|
|
127
|
-
try:
|
|
128
|
-
response = request_func(*args, **kwargs)
|
|
129
|
-
except RequestException as error:
|
|
130
|
-
raise SdkException(
|
|
131
|
-
"Unable to perform the request", name="RequestError"
|
|
132
|
-
) from error
|
|
133
|
-
|
|
134
|
-
content = handle_http_response(response)
|
|
135
|
-
if get_response:
|
|
136
|
-
return content, response
|
|
137
|
-
return content
|
|
138
|
-
|
|
139
|
-
return wrapper
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
def log_action(sdk, message: str, should_log: Union[bool, Callable[[], bool]] = True):
|
|
143
|
-
if sdk.verbose_log and (should_log() if callable(should_log) else should_log):
|
|
144
|
-
print(message, file=sys.stderr)
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
def log_func_result(message: str, should_log: Union[bool, Callable[[], bool]] = True):
|
|
148
|
-
def decorator_log_func_result(action_func):
|
|
149
|
-
@functools.wraps(action_func)
|
|
150
|
-
def wrapper_log_func_result(*args, **kwargs):
|
|
151
|
-
sdk = args[0]
|
|
152
|
-
try:
|
|
153
|
-
res = action_func(*args, **kwargs)
|
|
154
|
-
log_action(sdk, "{:s} succeeded".format(message), should_log)
|
|
155
|
-
return res
|
|
156
|
-
except SdkException as error:
|
|
157
|
-
log_action(
|
|
158
|
-
sdk,
|
|
159
|
-
"{:s} failed ! {}".format(message, error),
|
|
160
|
-
should_log,
|
|
161
|
-
)
|
|
162
|
-
raise error
|
|
163
|
-
except Exception as error:
|
|
164
|
-
log_action(
|
|
165
|
-
sdk,
|
|
166
|
-
"{:s} failed for unexpected reason ! {}".format(message, error),
|
|
167
|
-
should_log,
|
|
168
|
-
)
|
|
169
|
-
raise error
|
|
170
|
-
|
|
171
|
-
return wrapper_log_func_result
|
|
172
|
-
|
|
173
|
-
return decorator_log_func_result
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
def _datetime_to_timestamp_in_ms(dt):
|
|
177
|
-
if not isinstance(dt, datetime):
|
|
178
|
-
raise ValueError("Parameter must be a datetime.datetime object.")
|
|
179
|
-
return int(1_000 * dt.timestamp())
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
def parse_isodate(date_string):
|
|
183
|
-
"""_summary_
|
|
184
|
-
|
|
185
|
-
Args:
|
|
186
|
-
date_string (str): date in ISO 8601 format potentially ending with
|
|
187
|
-
"Z" specific character.
|
|
188
|
-
|
|
189
|
-
Returns:
|
|
190
|
-
:obj:`datetime.datetime`: A `datetime` corresponding to `date_string`.
|
|
191
|
-
"""
|
|
192
|
-
if date_string[-1] == "Z":
|
|
193
|
-
date_string = date_string.rstrip("Z")
|
|
194
|
-
|
|
195
|
-
return datetime.fromisoformat(re.sub(r"\.\d+", "", date_string))
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
def use_authentication(action_func):
|
|
199
|
-
@functools.wraps(action_func)
|
|
200
|
-
def wrapper(sdk, *args, headers=None, **kwargs):
|
|
201
|
-
actual_headers = None
|
|
202
|
-
if (
|
|
203
|
-
sdk._access_token_data is None
|
|
204
|
-
or sdk._access_token_data["exp"]
|
|
205
|
-
< (datetime.now() + sdk._access_token_margin).timestamp()
|
|
206
|
-
):
|
|
207
|
-
sdk._refresh_access_token()
|
|
208
|
-
actual_headers = {"Authorization": f"Bearer {sdk._access_token}"}
|
|
209
|
-
if headers is not None:
|
|
210
|
-
actual_headers.update(headers)
|
|
211
|
-
|
|
212
|
-
response = action_func(sdk, *args, headers=actual_headers, **kwargs)
|
|
213
|
-
if response.status_code == 401:
|
|
214
|
-
sdk._clear_access_token()
|
|
215
|
-
return response
|
|
216
|
-
|
|
217
|
-
return wrapper
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
def remove_none_values(obj):
|
|
221
|
-
return {key: value for key, value in obj.items() if value is not None}
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
def remove_keys_from_dict(dictionnary: dict, paths_to_remove: set = None):
|
|
225
|
-
if dictionnary is None:
|
|
226
|
-
return None
|
|
227
|
-
|
|
228
|
-
paths_to_remove = paths_to_remove or set()
|
|
229
|
-
returned_dictionnary = dictionnary.copy()
|
|
230
|
-
|
|
231
|
-
for path in paths_to_remove:
|
|
232
|
-
key, _, subpath = path.partition(".")
|
|
233
|
-
if subpath == "":
|
|
234
|
-
returned_dictionnary.pop(key, None)
|
|
235
|
-
elif isinstance(returned_dictionnary.get(key), dict):
|
|
236
|
-
returned_dictionnary[key] = remove_keys_from_dict(
|
|
237
|
-
returned_dictionnary[key], {subpath}
|
|
238
|
-
)
|
|
239
|
-
|
|
240
|
-
return returned_dictionnary
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
def merge_paths(prefix, path):
|
|
244
|
-
components = (value for value in path.split("/") if value != "")
|
|
245
|
-
return prefix + "/".join(components)
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
class CREATION_PARAMETER_VALUE(Enum):
|
|
249
|
-
"""Enumeration for creation parameters special values."""
|
|
250
|
-
|
|
251
|
-
#: Special value to indicate that the parameter should be set to the
|
|
252
|
-
#: project information value.
|
|
253
|
-
FALLBACK_PROJECT = "FALLBACK_PROJECT"
|
|
254
|
-
#: Special value to indicate that the parameter should be set to `None`.
|
|
255
|
-
NULL = "NULL"
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
def map_container_config_step_parameter(container_config):
|
|
259
|
-
"""
|
|
260
|
-
Maps container config with :obj:`CREATION_PARAMETER_VALUE` enum values to final
|
|
261
|
-
container config. `None` is considered to be equivalent to
|
|
262
|
-
:obj:`CREATION_PARAMETER_VALUE.FALLBACK_PROJECT`, and should not be projected to
|
|
263
|
-
output
|
|
264
|
-
"""
|
|
265
|
-
ret = {}
|
|
266
|
-
for key in container_config:
|
|
267
|
-
if key == "local_folder":
|
|
268
|
-
continue
|
|
269
|
-
val = container_config[key]
|
|
270
|
-
if val is CREATION_PARAMETER_VALUE.NULL:
|
|
271
|
-
ret[key] = None
|
|
272
|
-
elif val is not CREATION_PARAMETER_VALUE.FALLBACK_PROJECT and val is not None:
|
|
273
|
-
ret[key] = val
|
|
274
|
-
return ret
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
# From https://stackoverflow.com/a/58767245/4839162
|
|
278
|
-
def chunk_buffer(buffer: IOBase, size: int) -> Iterable[Union[BytesIO, StringIO]]:
|
|
279
|
-
size_int = int(size)
|
|
280
|
-
b = buffer.read(size_int)
|
|
281
|
-
next_data = None
|
|
282
|
-
while b:
|
|
283
|
-
chunk = StringIO() if isinstance(b, str) else BytesIO()
|
|
284
|
-
previous_data = next_data
|
|
285
|
-
if previous_data:
|
|
286
|
-
chunk.write(next_data)
|
|
287
|
-
chunk.write(b)
|
|
288
|
-
chunk.seek(0)
|
|
289
|
-
|
|
290
|
-
next_data = buffer.read(1)
|
|
291
|
-
|
|
292
|
-
data = {
|
|
293
|
-
"chunk": chunk,
|
|
294
|
-
"len": len(b) + (len(previous_data) if previous_data else 0),
|
|
295
|
-
"lastChunk": len(next_data) == 0,
|
|
296
|
-
}
|
|
297
|
-
yield data
|
|
298
|
-
chunk.close()
|
|
299
|
-
b = buffer.read(size_int - 1)
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
def convert_size(size_in_bytes):
|
|
303
|
-
"""
|
|
304
|
-
Convert a size in bytes to a human readable string.
|
|
305
|
-
"""
|
|
306
|
-
units = ["B", "KB", "MB", "GB", "TB"]
|
|
307
|
-
for unit in units:
|
|
308
|
-
if size_in_bytes < 1024.0:
|
|
309
|
-
break
|
|
310
|
-
size_in_bytes /= 1024.0
|
|
311
|
-
return "{:.2f} {}".format(size_in_bytes, unit)
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
# Adapted from
|
|
315
|
-
# https://gist.github.com/kazqvaizer/4cebebe5db654a414132809f9f88067b#file-multipartify-py-L13-L33
|
|
316
|
-
def multipartify(data, parent_key=None) -> dict:
|
|
317
|
-
def formatter(v):
|
|
318
|
-
return (None, v if v is not None else "")
|
|
319
|
-
|
|
320
|
-
if type(data) is not dict:
|
|
321
|
-
return {parent_key: formatter(data)}
|
|
322
|
-
|
|
323
|
-
converted = []
|
|
324
|
-
|
|
325
|
-
for key, value in data.items():
|
|
326
|
-
current_key = key if parent_key is None else f"{parent_key}[{key}]"
|
|
327
|
-
if type(value) is dict:
|
|
328
|
-
converted.extend(multipartify(value, current_key).items())
|
|
329
|
-
elif type(value) is list:
|
|
330
|
-
for ind, list_value in enumerate(value):
|
|
331
|
-
iter_key = f"{current_key}[{ind}]"
|
|
332
|
-
converted.extend(multipartify(list_value, iter_key).items())
|
|
333
|
-
else:
|
|
334
|
-
converted.append((current_key, formatter(value)))
|
|
335
|
-
|
|
336
|
-
return dict(converted)
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
def _wait_create_until_ready(sdk, name, get_func, timeout_s, start_time, get_log_func):
|
|
340
|
-
elapsed_time = sdk._get_time() - start_time
|
|
341
|
-
status = "creation_pending"
|
|
342
|
-
while status == "creation_pending" and (
|
|
343
|
-
timeout_s is None or elapsed_time < timeout_s
|
|
344
|
-
):
|
|
345
|
-
created_obj = get_func(sdk, name)
|
|
346
|
-
status = created_obj.get("creation_info", {}).get("status", None)
|
|
347
|
-
elapsed_time = sdk._get_time() - start_time
|
|
348
|
-
|
|
349
|
-
if status == "creation_failed":
|
|
350
|
-
raise SdkException(
|
|
351
|
-
f'The creation of "{name}" has failed. You can check the logs with '
|
|
352
|
-
f'the "{get_log_func.__name__}" function.',
|
|
353
|
-
name="CreationFailed",
|
|
354
|
-
)
|
|
355
|
-
if status != "ready":
|
|
356
|
-
raise SdkException(
|
|
357
|
-
f'The creation of "{name}" was not ready in time. It is still being '
|
|
358
|
-
"created but this function stopped trying. Please check its status with "
|
|
359
|
-
f'"{get_func.__name__}".',
|
|
360
|
-
name="TimeoutException",
|
|
361
|
-
)
|
|
362
|
-
return created_obj
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|