qwak-core 0.4.271__py3-none-any.whl → 0.4.273__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- _qwak_proto/qwak/model_group/model_group_repository_details_pb2.py +86 -0
- _qwak_proto/qwak/model_group/model_group_repository_details_pb2.pyi +107 -0
- _qwak_proto/qwak/model_group/model_group_repository_details_pb2_grpc.py +4 -0
- _qwak_proto/qwak/models/models_pb2.py +102 -81
- _qwak_proto/qwak/models/models_pb2.pyi +57 -0
- _qwak_proto/qwak/models/models_pb2_grpc.py +34 -0
- _qwak_proto/qwak/models/models_query_pb2.py +77 -0
- _qwak_proto/qwak/models/models_query_pb2.pyi +157 -0
- _qwak_proto/qwak/models/models_query_pb2_grpc.py +4 -0
- frogml_storage/__init__.py +1 -0
- frogml_storage/artifactory/__init__.py +1 -0
- frogml_storage/artifactory/_artifactory_api.py +315 -0
- frogml_storage/authentication/login/__init__.py +1 -0
- frogml_storage/authentication/login/_login_cli.py +239 -0
- frogml_storage/authentication/login/_login_command.py +74 -0
- frogml_storage/authentication/models/__init__.py +3 -0
- frogml_storage/authentication/models/_auth.py +24 -0
- frogml_storage/authentication/models/_auth_config.py +70 -0
- frogml_storage/authentication/models/_login.py +22 -0
- frogml_storage/authentication/utils/__init__.py +17 -0
- frogml_storage/authentication/utils/_authentication_utils.py +281 -0
- frogml_storage/authentication/utils/_login_checks_utils.py +114 -0
- frogml_storage/base_storage.py +140 -0
- frogml_storage/constants.py +56 -0
- frogml_storage/exceptions/checksum_verification_error.py +3 -0
- frogml_storage/exceptions/validation_error.py +4 -0
- frogml_storage/frog_ml.py +668 -0
- frogml_storage/http/__init__.py +1 -0
- frogml_storage/http/http_client.py +83 -0
- frogml_storage/logging/__init__.py +1 -0
- frogml_storage/logging/_log_config.py +45 -0
- frogml_storage/logging/log_utils.py +21 -0
- frogml_storage/models/__init__.py +1 -0
- frogml_storage/models/_download_context.py +54 -0
- frogml_storage/models/dataset_manifest.py +13 -0
- frogml_storage/models/entity_manifest.py +93 -0
- frogml_storage/models/frogml_dataset_version.py +21 -0
- frogml_storage/models/frogml_entity_type_info.py +50 -0
- frogml_storage/models/frogml_entity_version.py +34 -0
- frogml_storage/models/frogml_model_version.py +21 -0
- frogml_storage/models/model_manifest.py +60 -0
- frogml_storage/models/serialization_metadata.py +15 -0
- frogml_storage/utils/__init__.py +12 -0
- frogml_storage/utils/_environment.py +21 -0
- frogml_storage/utils/_input_checks_utility.py +104 -0
- frogml_storage/utils/_storage_utils.py +15 -0
- frogml_storage/utils/_url_utils.py +27 -0
- qwak/__init__.py +1 -1
- qwak/clients/instance_template/client.py +6 -4
- qwak/clients/prompt_manager/model_descriptor_mapper.py +21 -19
- qwak/feature_store/_common/artifact_utils.py +3 -3
- qwak/feature_store/data_sources/base.py +4 -4
- qwak/feature_store/data_sources/batch/athena.py +3 -3
- qwak/feature_store/feature_sets/streaming.py +3 -3
- qwak/feature_store/feature_sets/streaming_backfill.py +1 -1
- qwak/feature_store/online/client.py +6 -6
- qwak/feature_store/sinks/streaming/factory.py +1 -1
- qwak/inner/build_logic/phases/phase_010_fetch_model/fetch_strategy_manager/strategy/git/git_strategy.py +3 -3
- qwak/inner/di_configuration/account.py +23 -24
- qwak/inner/tool/auth.py +2 -2
- qwak/llmops/provider/openai/provider.py +3 -3
- qwak/model/tools/adapters/output.py +1 -1
- qwak/model/utils/feature_utils.py +12 -8
- qwak/model_loggers/artifact_logger.py +7 -7
- qwak/tools/logger/logger.py +1 -1
- qwak_core-0.4.273.dist-info/METADATA +415 -0
- {qwak_core-0.4.271.dist-info → qwak_core-0.4.273.dist-info}/RECORD +68 -26
- _qwak_proto/__init__.py +0 -0
- _qwak_proto/qwak/__init__.py +0 -0
- qwak_core-0.4.271.dist-info/METADATA +0 -53
- {qwak_core-0.4.271.dist-info → qwak_core-0.4.273.dist-info}/WHEEL +0 -0
@@ -0,0 +1,77 @@
|
|
1
|
+
# -*- coding: utf-8 -*-
|
2
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
3
|
+
# source: qwak/models/models_query.proto
|
4
|
+
"""Generated protocol buffer code."""
|
5
|
+
from google.protobuf.internal import enum_type_wrapper
|
6
|
+
from google.protobuf import descriptor as _descriptor
|
7
|
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
8
|
+
from google.protobuf import message as _message
|
9
|
+
from google.protobuf import reflection as _reflection
|
10
|
+
from google.protobuf import symbol_database as _symbol_database
|
11
|
+
# @@protoc_insertion_point(imports)
|
12
|
+
|
13
|
+
_sym_db = _symbol_database.Default()
|
14
|
+
|
15
|
+
|
16
|
+
|
17
|
+
|
18
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1eqwak/models/models_query.proto\x12\x16qwak.models.management\"\x99\x01\n\nSortModels\x12\x31\n\x07sort_by\x18\x01 \x01(\x0e\x32\x1e.qwak.models.management.SortByH\x00\x12\x37\n\nsort_order\x18\x02 \x01(\x0e\x32!.qwak.models.management.SortOrderH\x01\x42\r\n\x0bsort_by_optB\x10\n\x0esort_order_opt\"\xc8\x01\n\x06\x46ilter\x12\x1e\n\x14model_name_substring\x18\x01 \x01(\tH\x00\x12\x19\n\x11model_group_names\x18\x02 \x03(\t\x12V\n\x17\x64\x65ployment_model_status\x18\x03 \x01(\x0e\x32\x33.qwak.models.management.DeploymentModelStatusFilterH\x01\x42\x0c\n\nsearch_optB\x1d\n\x1b\x64\x65ployment_model_status_opt\"<\n\x12PaginationMetadata\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x13\n\x0bnext_cursor\x18\x02 \x01(\t*J\n\x06SortBy\x12\x13\n\x0fSORT_BY_INVALID\x10\x00\x12\x10\n\x0cSORT_BY_NAME\x10\x01\x12\x19\n\x15SORT_BY_CREATION_DATE\x10\x02*L\n\tSortOrder\x12\x16\n\x12SORT_ORDER_INVALID\x10\x00\x12\x12\n\x0eSORT_ORDER_ASC\x10\x01\x12\x13\n\x0fSORT_ORDER_DESC\x10\x02*\xa5\x01\n\x1b\x44\x65ploymentModelStatusFilter\x12*\n&DEPLOYMENT_MODEL_STATUS_FILTER_INVALID\x10\x00\x12+\n\'DEPLOYMENT_MODEL_STATUS_FILTER_DEPLOYED\x10\x01\x12-\n)DEPLOYMENT_MODEL_STATUS_FILTER_UNDEPLOYED\x10\x02\x42\x36\n!com.qwak.ai.management.models.apiB\x0fModelQueryProtoP\x01\x62\x06proto3')
|
19
|
+
|
20
|
+
_SORTBY = DESCRIPTOR.enum_types_by_name['SortBy']
|
21
|
+
SortBy = enum_type_wrapper.EnumTypeWrapper(_SORTBY)
|
22
|
+
_SORTORDER = DESCRIPTOR.enum_types_by_name['SortOrder']
|
23
|
+
SortOrder = enum_type_wrapper.EnumTypeWrapper(_SORTORDER)
|
24
|
+
_DEPLOYMENTMODELSTATUSFILTER = DESCRIPTOR.enum_types_by_name['DeploymentModelStatusFilter']
|
25
|
+
DeploymentModelStatusFilter = enum_type_wrapper.EnumTypeWrapper(_DEPLOYMENTMODELSTATUSFILTER)
|
26
|
+
SORT_BY_INVALID = 0
|
27
|
+
SORT_BY_NAME = 1
|
28
|
+
SORT_BY_CREATION_DATE = 2
|
29
|
+
SORT_ORDER_INVALID = 0
|
30
|
+
SORT_ORDER_ASC = 1
|
31
|
+
SORT_ORDER_DESC = 2
|
32
|
+
DEPLOYMENT_MODEL_STATUS_FILTER_INVALID = 0
|
33
|
+
DEPLOYMENT_MODEL_STATUS_FILTER_DEPLOYED = 1
|
34
|
+
DEPLOYMENT_MODEL_STATUS_FILTER_UNDEPLOYED = 2
|
35
|
+
|
36
|
+
|
37
|
+
_SORTMODELS = DESCRIPTOR.message_types_by_name['SortModels']
|
38
|
+
_FILTER = DESCRIPTOR.message_types_by_name['Filter']
|
39
|
+
_PAGINATIONMETADATA = DESCRIPTOR.message_types_by_name['PaginationMetadata']
|
40
|
+
SortModels = _reflection.GeneratedProtocolMessageType('SortModels', (_message.Message,), {
|
41
|
+
'DESCRIPTOR' : _SORTMODELS,
|
42
|
+
'__module__' : 'qwak.models.models_query_pb2'
|
43
|
+
# @@protoc_insertion_point(class_scope:qwak.models.management.SortModels)
|
44
|
+
})
|
45
|
+
_sym_db.RegisterMessage(SortModels)
|
46
|
+
|
47
|
+
Filter = _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), {
|
48
|
+
'DESCRIPTOR' : _FILTER,
|
49
|
+
'__module__' : 'qwak.models.models_query_pb2'
|
50
|
+
# @@protoc_insertion_point(class_scope:qwak.models.management.Filter)
|
51
|
+
})
|
52
|
+
_sym_db.RegisterMessage(Filter)
|
53
|
+
|
54
|
+
PaginationMetadata = _reflection.GeneratedProtocolMessageType('PaginationMetadata', (_message.Message,), {
|
55
|
+
'DESCRIPTOR' : _PAGINATIONMETADATA,
|
56
|
+
'__module__' : 'qwak.models.models_query_pb2'
|
57
|
+
# @@protoc_insertion_point(class_scope:qwak.models.management.PaginationMetadata)
|
58
|
+
})
|
59
|
+
_sym_db.RegisterMessage(PaginationMetadata)
|
60
|
+
|
61
|
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
62
|
+
|
63
|
+
DESCRIPTOR._options = None
|
64
|
+
DESCRIPTOR._serialized_options = b'\n!com.qwak.ai.management.models.apiB\017ModelQueryProtoP\001'
|
65
|
+
_SORTBY._serialized_start=479
|
66
|
+
_SORTBY._serialized_end=553
|
67
|
+
_SORTORDER._serialized_start=555
|
68
|
+
_SORTORDER._serialized_end=631
|
69
|
+
_DEPLOYMENTMODELSTATUSFILTER._serialized_start=634
|
70
|
+
_DEPLOYMENTMODELSTATUSFILTER._serialized_end=799
|
71
|
+
_SORTMODELS._serialized_start=59
|
72
|
+
_SORTMODELS._serialized_end=212
|
73
|
+
_FILTER._serialized_start=215
|
74
|
+
_FILTER._serialized_end=415
|
75
|
+
_PAGINATIONMETADATA._serialized_start=417
|
76
|
+
_PAGINATIONMETADATA._serialized_end=477
|
77
|
+
# @@protoc_insertion_point(module_scope)
|
@@ -0,0 +1,157 @@
|
|
1
|
+
"""
|
2
|
+
@generated by mypy-protobuf. Do not edit manually!
|
3
|
+
isort:skip_file
|
4
|
+
"""
|
5
|
+
import builtins
|
6
|
+
import collections.abc
|
7
|
+
import google.protobuf.descriptor
|
8
|
+
import google.protobuf.internal.containers
|
9
|
+
import google.protobuf.internal.enum_type_wrapper
|
10
|
+
import google.protobuf.message
|
11
|
+
import sys
|
12
|
+
import typing
|
13
|
+
|
14
|
+
if sys.version_info >= (3, 10):
|
15
|
+
import typing as typing_extensions
|
16
|
+
else:
|
17
|
+
import typing_extensions
|
18
|
+
|
19
|
+
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
20
|
+
|
21
|
+
class _SortBy:
|
22
|
+
ValueType = typing.NewType("ValueType", builtins.int)
|
23
|
+
V: typing_extensions.TypeAlias = ValueType
|
24
|
+
|
25
|
+
class _SortByEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_SortBy.ValueType], builtins.type): # noqa: F821
|
26
|
+
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
27
|
+
SORT_BY_INVALID: _SortBy.ValueType # 0
|
28
|
+
SORT_BY_NAME: _SortBy.ValueType # 1
|
29
|
+
"""Sort models by name (default)."""
|
30
|
+
SORT_BY_CREATION_DATE: _SortBy.ValueType # 2
|
31
|
+
"""Sort models by their creation date."""
|
32
|
+
|
33
|
+
class SortBy(_SortBy, metaclass=_SortByEnumTypeWrapper): ...
|
34
|
+
|
35
|
+
SORT_BY_INVALID: SortBy.ValueType # 0
|
36
|
+
SORT_BY_NAME: SortBy.ValueType # 1
|
37
|
+
"""Sort models by name (default)."""
|
38
|
+
SORT_BY_CREATION_DATE: SortBy.ValueType # 2
|
39
|
+
"""Sort models by their creation date."""
|
40
|
+
global___SortBy = SortBy
|
41
|
+
|
42
|
+
class _SortOrder:
|
43
|
+
ValueType = typing.NewType("ValueType", builtins.int)
|
44
|
+
V: typing_extensions.TypeAlias = ValueType
|
45
|
+
|
46
|
+
class _SortOrderEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_SortOrder.ValueType], builtins.type): # noqa: F821
|
47
|
+
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
48
|
+
SORT_ORDER_INVALID: _SortOrder.ValueType # 0
|
49
|
+
SORT_ORDER_ASC: _SortOrder.ValueType # 1
|
50
|
+
"""Sort in ascending order (default)."""
|
51
|
+
SORT_ORDER_DESC: _SortOrder.ValueType # 2
|
52
|
+
"""Sort in descending order."""
|
53
|
+
|
54
|
+
class SortOrder(_SortOrder, metaclass=_SortOrderEnumTypeWrapper): ...
|
55
|
+
|
56
|
+
SORT_ORDER_INVALID: SortOrder.ValueType # 0
|
57
|
+
SORT_ORDER_ASC: SortOrder.ValueType # 1
|
58
|
+
"""Sort in ascending order (default)."""
|
59
|
+
SORT_ORDER_DESC: SortOrder.ValueType # 2
|
60
|
+
"""Sort in descending order."""
|
61
|
+
global___SortOrder = SortOrder
|
62
|
+
|
63
|
+
class _DeploymentModelStatusFilter:
|
64
|
+
ValueType = typing.NewType("ValueType", builtins.int)
|
65
|
+
V: typing_extensions.TypeAlias = ValueType
|
66
|
+
|
67
|
+
class _DeploymentModelStatusFilterEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DeploymentModelStatusFilter.ValueType], builtins.type): # noqa: F821
|
68
|
+
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
69
|
+
DEPLOYMENT_MODEL_STATUS_FILTER_INVALID: _DeploymentModelStatusFilter.ValueType # 0
|
70
|
+
DEPLOYMENT_MODEL_STATUS_FILTER_DEPLOYED: _DeploymentModelStatusFilter.ValueType # 1
|
71
|
+
DEPLOYMENT_MODEL_STATUS_FILTER_UNDEPLOYED: _DeploymentModelStatusFilter.ValueType # 2
|
72
|
+
|
73
|
+
class DeploymentModelStatusFilter(_DeploymentModelStatusFilter, metaclass=_DeploymentModelStatusFilterEnumTypeWrapper): ...
|
74
|
+
|
75
|
+
DEPLOYMENT_MODEL_STATUS_FILTER_INVALID: DeploymentModelStatusFilter.ValueType # 0
|
76
|
+
DEPLOYMENT_MODEL_STATUS_FILTER_DEPLOYED: DeploymentModelStatusFilter.ValueType # 1
|
77
|
+
DEPLOYMENT_MODEL_STATUS_FILTER_UNDEPLOYED: DeploymentModelStatusFilter.ValueType # 2
|
78
|
+
global___DeploymentModelStatusFilter = DeploymentModelStatusFilter
|
79
|
+
|
80
|
+
class SortModels(google.protobuf.message.Message):
|
81
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
82
|
+
|
83
|
+
SORT_BY_FIELD_NUMBER: builtins.int
|
84
|
+
SORT_ORDER_FIELD_NUMBER: builtins.int
|
85
|
+
sort_by: global___SortBy.ValueType
|
86
|
+
sort_order: global___SortOrder.ValueType
|
87
|
+
def __init__(
|
88
|
+
self,
|
89
|
+
*,
|
90
|
+
sort_by: global___SortBy.ValueType = ...,
|
91
|
+
sort_order: global___SortOrder.ValueType = ...,
|
92
|
+
) -> None: ...
|
93
|
+
def HasField(self, field_name: typing_extensions.Literal["sort_by", b"sort_by", "sort_by_opt", b"sort_by_opt", "sort_order", b"sort_order", "sort_order_opt", b"sort_order_opt"]) -> builtins.bool: ...
|
94
|
+
def ClearField(self, field_name: typing_extensions.Literal["sort_by", b"sort_by", "sort_by_opt", b"sort_by_opt", "sort_order", b"sort_order", "sort_order_opt", b"sort_order_opt"]) -> None: ...
|
95
|
+
@typing.overload
|
96
|
+
def WhichOneof(self, oneof_group: typing_extensions.Literal["sort_by_opt", b"sort_by_opt"]) -> typing_extensions.Literal["sort_by"] | None: ...
|
97
|
+
@typing.overload
|
98
|
+
def WhichOneof(self, oneof_group: typing_extensions.Literal["sort_order_opt", b"sort_order_opt"]) -> typing_extensions.Literal["sort_order"] | None: ...
|
99
|
+
|
100
|
+
global___SortModels = SortModels
|
101
|
+
|
102
|
+
class Filter(google.protobuf.message.Message):
|
103
|
+
"""Filter models by specific properties using a filter expression.
|
104
|
+
The following fields are used to filter the models returned by the query. An AND operation is applied between each of these fields.
|
105
|
+
"""
|
106
|
+
|
107
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
108
|
+
|
109
|
+
MODEL_NAME_SUBSTRING_FIELD_NUMBER: builtins.int
|
110
|
+
MODEL_GROUP_NAMES_FIELD_NUMBER: builtins.int
|
111
|
+
DEPLOYMENT_MODEL_STATUS_FIELD_NUMBER: builtins.int
|
112
|
+
model_name_substring: builtins.str
|
113
|
+
@property
|
114
|
+
def model_group_names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
115
|
+
"""Filter models by one or more model group names.
|
116
|
+
Return models that belong to one of the specified model groups.
|
117
|
+
Optional field. If not specified, models from all groups are included.
|
118
|
+
"""
|
119
|
+
deployment_model_status: global___DeploymentModelStatusFilter.ValueType
|
120
|
+
def __init__(
|
121
|
+
self,
|
122
|
+
*,
|
123
|
+
model_name_substring: builtins.str = ...,
|
124
|
+
model_group_names: collections.abc.Iterable[builtins.str] | None = ...,
|
125
|
+
deployment_model_status: global___DeploymentModelStatusFilter.ValueType = ...,
|
126
|
+
) -> None: ...
|
127
|
+
def HasField(self, field_name: typing_extensions.Literal["deployment_model_status", b"deployment_model_status", "deployment_model_status_opt", b"deployment_model_status_opt", "model_name_substring", b"model_name_substring", "search_opt", b"search_opt"]) -> builtins.bool: ...
|
128
|
+
def ClearField(self, field_name: typing_extensions.Literal["deployment_model_status", b"deployment_model_status", "deployment_model_status_opt", b"deployment_model_status_opt", "model_group_names", b"model_group_names", "model_name_substring", b"model_name_substring", "search_opt", b"search_opt"]) -> None: ...
|
129
|
+
@typing.overload
|
130
|
+
def WhichOneof(self, oneof_group: typing_extensions.Literal["deployment_model_status_opt", b"deployment_model_status_opt"]) -> typing_extensions.Literal["deployment_model_status"] | None: ...
|
131
|
+
@typing.overload
|
132
|
+
def WhichOneof(self, oneof_group: typing_extensions.Literal["search_opt", b"search_opt"]) -> typing_extensions.Literal["model_name_substring"] | None: ...
|
133
|
+
|
134
|
+
global___Filter = Filter
|
135
|
+
|
136
|
+
class PaginationMetadata(google.protobuf.message.Message):
|
137
|
+
"""Metadata for pagination."""
|
138
|
+
|
139
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
140
|
+
|
141
|
+
PAGE_SIZE_FIELD_NUMBER: builtins.int
|
142
|
+
NEXT_CURSOR_FIELD_NUMBER: builtins.int
|
143
|
+
page_size: builtins.int
|
144
|
+
"""Number of items per page."""
|
145
|
+
next_cursor: builtins.str
|
146
|
+
"""Cursor for the next page.
|
147
|
+
If there are no more pages, this field will be empty.
|
148
|
+
"""
|
149
|
+
def __init__(
|
150
|
+
self,
|
151
|
+
*,
|
152
|
+
page_size: builtins.int = ...,
|
153
|
+
next_cursor: builtins.str = ...,
|
154
|
+
) -> None: ...
|
155
|
+
def ClearField(self, field_name: typing_extensions.Literal["next_cursor", b"next_cursor", "page_size", b"page_size"]) -> None: ...
|
156
|
+
|
157
|
+
global___PaginationMetadata = PaginationMetadata
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__ = "0.4.273"
|
@@ -0,0 +1 @@
|
|
1
|
+
from ._artifactory_api import ArtifactoryApi, StartTransactionResponse
|
@@ -0,0 +1,315 @@
|
|
1
|
+
import json
|
2
|
+
import os
|
3
|
+
from typing import Optional
|
4
|
+
|
5
|
+
from requests import Response
|
6
|
+
from tqdm.auto import tqdm
|
7
|
+
from tqdm.utils import CallbackIOWrapper
|
8
|
+
from urllib3 import Retry
|
9
|
+
|
10
|
+
from frogml_storage.logging import logger
|
11
|
+
from frogml_storage.utils import join_url
|
12
|
+
from frogml_storage.constants import CHECKSUM_SHA2_HEADER
|
13
|
+
from frogml_storage.models.entity_manifest import Checksums, EntityManifest
|
14
|
+
from frogml_storage.models.frogml_entity_type_info import FrogMLEntityTypeInfo
|
15
|
+
from frogml_storage.http import HTTPClient
|
16
|
+
from frogml_storage.models import DownloadContext
|
17
|
+
|
18
|
+
|
19
|
+
class StartTransactionResponse:
|
20
|
+
files_upload_path: str
|
21
|
+
lead_upload_path: str
|
22
|
+
dependencies_upload_path: str
|
23
|
+
code_upload_path: str
|
24
|
+
transaction_id: str
|
25
|
+
|
26
|
+
def __init__(
|
27
|
+
self,
|
28
|
+
files_upload_path,
|
29
|
+
lead_upload_path,
|
30
|
+
dependencies_upload_path,
|
31
|
+
code_upload_path,
|
32
|
+
transaction_id,
|
33
|
+
):
|
34
|
+
self.files_upload_path = files_upload_path
|
35
|
+
self.lead_upload_path = lead_upload_path
|
36
|
+
self.dependencies_upload_path = dependencies_upload_path
|
37
|
+
self.code_upload_path = code_upload_path
|
38
|
+
self.transaction_id = transaction_id
|
39
|
+
|
40
|
+
|
41
|
+
class ArtifactoryApi:
|
42
|
+
def __init__(self, uri, auth=None, http_client=None):
|
43
|
+
self.uri = uri
|
44
|
+
if http_client is not None:
|
45
|
+
self.http_client = http_client
|
46
|
+
else:
|
47
|
+
self.auth = auth
|
48
|
+
self.http_client = HTTPClient(auth=auth)
|
49
|
+
|
50
|
+
def start_transaction(
|
51
|
+
self,
|
52
|
+
entity_type_info: FrogMLEntityTypeInfo,
|
53
|
+
repository: str,
|
54
|
+
entity_name: str,
|
55
|
+
version: Optional[str],
|
56
|
+
) -> StartTransactionResponse:
|
57
|
+
"""
|
58
|
+
Initializes an upload. Returns transaction ID and upload path
|
59
|
+
"""
|
60
|
+
if version is None:
|
61
|
+
start_transaction_url = (
|
62
|
+
f"{self.uri}/api/machinelearning/{repository}/"
|
63
|
+
f"{entity_type_info.entity_type}/{entity_name}/start-transaction"
|
64
|
+
)
|
65
|
+
else:
|
66
|
+
start_transaction_url = (
|
67
|
+
f"{self.uri}/api/machinelearning/{repository}/{entity_type_info.entity_type}"
|
68
|
+
f"/{entity_name}/start-transaction/{version}"
|
69
|
+
)
|
70
|
+
try:
|
71
|
+
response = self.http_client.post(start_transaction_url)
|
72
|
+
response.raise_for_status()
|
73
|
+
files_upload_path = response.json()["filesUploadPath"]
|
74
|
+
lead_upload_path = response.json()["leadUploadPath"]
|
75
|
+
dependencies_upload_path = response.json()["dependenciesUploadPath"]
|
76
|
+
code_upload_path = response.json()["codeUploadPath"]
|
77
|
+
transaction_id = response.json()["transactionId"]
|
78
|
+
except Exception as exception:
|
79
|
+
err = (
|
80
|
+
f"Error occurred while trying to start an upload transaction for "
|
81
|
+
f"{entity_type_info.entity_type}: '{entity_name}'"
|
82
|
+
f" Error: '{exception}'"
|
83
|
+
)
|
84
|
+
logger.error(err, exc_info=False)
|
85
|
+
raise exception
|
86
|
+
return StartTransactionResponse(
|
87
|
+
files_upload_path=files_upload_path,
|
88
|
+
lead_upload_path=lead_upload_path,
|
89
|
+
dependencies_upload_path=dependencies_upload_path,
|
90
|
+
code_upload_path=code_upload_path,
|
91
|
+
transaction_id=transaction_id,
|
92
|
+
)
|
93
|
+
|
94
|
+
def end_transaction(
|
95
|
+
self,
|
96
|
+
entity_type: FrogMLEntityTypeInfo,
|
97
|
+
repository: str,
|
98
|
+
entity_name: str,
|
99
|
+
entity_manifest: EntityManifest,
|
100
|
+
transaction_id: str,
|
101
|
+
version: str,
|
102
|
+
properties: Optional[dict[str, str]],
|
103
|
+
) -> None:
|
104
|
+
"""
|
105
|
+
Upload model-manifest.json | dataset-manifest.json file, makes the model | dataset available in the repository
|
106
|
+
"""
|
107
|
+
filename = entity_type.metadata_file_name
|
108
|
+
|
109
|
+
url = join_url(
|
110
|
+
self.uri,
|
111
|
+
"api",
|
112
|
+
"machinelearning",
|
113
|
+
repository,
|
114
|
+
entity_type.entity_type,
|
115
|
+
"entity-manifest",
|
116
|
+
entity_name,
|
117
|
+
version,
|
118
|
+
transaction_id,
|
119
|
+
)
|
120
|
+
|
121
|
+
json_entity_manifest = entity_manifest.to_json()
|
122
|
+
self.upload_entity_manifest(
|
123
|
+
entity_type=entity_type,
|
124
|
+
filename=filename,
|
125
|
+
payload=json_entity_manifest,
|
126
|
+
url=url,
|
127
|
+
properties=properties,
|
128
|
+
)
|
129
|
+
|
130
|
+
def download_file(self, args: DownloadContext) -> None:
|
131
|
+
filename = os.path.basename(args.target_path)
|
132
|
+
try:
|
133
|
+
url = f"{self.uri}/{args.repo_key}/{args.source_url}"
|
134
|
+
with self.http_client.get(url=url, stream=True) as response:
|
135
|
+
response.raise_for_status()
|
136
|
+
total_size = int(response.headers.get("content-length", 0))
|
137
|
+
with open(args.target_path, "wb") as file:
|
138
|
+
with self.__initialize_progress_bar(total_size, filename) as pbar:
|
139
|
+
for chunk in response.iter_content(chunk_size=8192):
|
140
|
+
if chunk:
|
141
|
+
file.write(chunk)
|
142
|
+
pbar.update(len(chunk))
|
143
|
+
|
144
|
+
except Exception as exception:
|
145
|
+
self.__handle_download_exception(exception, args.target_path, filename)
|
146
|
+
|
147
|
+
def get_entity_manifest(
|
148
|
+
self,
|
149
|
+
entity_type_info: FrogMLEntityTypeInfo,
|
150
|
+
repository: str,
|
151
|
+
entity_name: str,
|
152
|
+
namespace: Optional[str],
|
153
|
+
version: Optional[str],
|
154
|
+
) -> dict:
|
155
|
+
url = join_url(
|
156
|
+
self.uri,
|
157
|
+
"api",
|
158
|
+
"machinelearning",
|
159
|
+
repository,
|
160
|
+
entity_type_info.entity_type,
|
161
|
+
"entity-manifest",
|
162
|
+
namespace,
|
163
|
+
entity_name,
|
164
|
+
version,
|
165
|
+
)
|
166
|
+
try:
|
167
|
+
with self.http_client.get(url=url) as r:
|
168
|
+
r.raise_for_status()
|
169
|
+
return r.json()
|
170
|
+
except Exception as exception:
|
171
|
+
err = f"Error occurred while trying to get {entity_type_info.entity_type} info file. Error: '{exception}'"
|
172
|
+
logger.error(err, exc_info=False)
|
173
|
+
raise exception
|
174
|
+
|
175
|
+
@staticmethod
|
176
|
+
def __handle_download_exception(
|
177
|
+
exception: Exception, target_path: str, filename: str
|
178
|
+
) -> None:
|
179
|
+
if os.path.exists(target_path):
|
180
|
+
os.remove(target_path)
|
181
|
+
err = f"Error occurred while trying to download file: '{filename}' Error: '{exception}'"
|
182
|
+
logger.error(err, exc_info=False)
|
183
|
+
raise exception
|
184
|
+
|
185
|
+
def get_artifact_checksum(self, download_context: DownloadContext) -> str:
|
186
|
+
url = f"{self.uri}/{download_context.repo_key}/{download_context.source_url}"
|
187
|
+
try:
|
188
|
+
with self.http_client.head(url=url, stream=True) as response:
|
189
|
+
response.raise_for_status()
|
190
|
+
return response.headers.get(CHECKSUM_SHA2_HEADER)
|
191
|
+
|
192
|
+
except Exception as exception:
|
193
|
+
logger.error(exception.__cause__, exc_info=False)
|
194
|
+
raise exception
|
195
|
+
|
196
|
+
def upload_entity_manifest(
|
197
|
+
self,
|
198
|
+
entity_type: FrogMLEntityTypeInfo,
|
199
|
+
filename: str,
|
200
|
+
payload: str,
|
201
|
+
url: str,
|
202
|
+
properties: Optional[dict[str, str]],
|
203
|
+
stream: bool = False,
|
204
|
+
) -> None:
|
205
|
+
body_part_name = f"{entity_type.body_part_stream}"
|
206
|
+
|
207
|
+
try:
|
208
|
+
files = {
|
209
|
+
f"{body_part_name}": (
|
210
|
+
f"{body_part_name}",
|
211
|
+
payload,
|
212
|
+
"application/octet-stream",
|
213
|
+
), # Include the InputStream
|
214
|
+
"additionalData": (
|
215
|
+
"additionalData",
|
216
|
+
json.dumps(properties),
|
217
|
+
"application/octet-stream",
|
218
|
+
), # Include the object
|
219
|
+
}
|
220
|
+
with self.http_client.put(url=url, files=files, stream=stream) as response:
|
221
|
+
response.raise_for_status()
|
222
|
+
except Exception as exception:
|
223
|
+
err = f"Error occurred while trying to upload file: '{filename}' Error: '{exception}'"
|
224
|
+
logger.error(err, exc_info=False)
|
225
|
+
raise exception
|
226
|
+
|
227
|
+
def upload_file(self, file_path: str, url: str) -> None:
|
228
|
+
wrapped_file = None
|
229
|
+
try:
|
230
|
+
file_size = os.stat(file_path).st_size
|
231
|
+
with (
|
232
|
+
self.__initialize_progress_bar(file_size, file_path) as pbar,
|
233
|
+
open(file_path, "rb") as file,
|
234
|
+
):
|
235
|
+
wrapped_file = CallbackIOWrapper(pbar.update, file, "read")
|
236
|
+
with self.http_client.put(url=url, payload=wrapped_file) as response:
|
237
|
+
response.raise_for_status()
|
238
|
+
except Exception as exception:
|
239
|
+
err = f"Error occurred while trying to upload file: '{file_path}' Error: '{exception}'"
|
240
|
+
logger.error(err, exc_info=False)
|
241
|
+
raise type(exception)(f"{err} File: {file_path}") from exception
|
242
|
+
finally:
|
243
|
+
if wrapped_file is not None:
|
244
|
+
wrapped_file.close()
|
245
|
+
|
246
|
+
def checksum_deployment(
|
247
|
+
self,
|
248
|
+
checksum: Checksums,
|
249
|
+
url: str,
|
250
|
+
full_path: str,
|
251
|
+
stream: bool = False,
|
252
|
+
) -> bool:
|
253
|
+
response = self.http_client.put(
|
254
|
+
url=url,
|
255
|
+
headers={"X-Checksum-Sha256": checksum.sha2, "X-Checksum-Deploy": "true"},
|
256
|
+
stream=stream,
|
257
|
+
)
|
258
|
+
if response.status_code != 200 and response.status_code != 201:
|
259
|
+
return False
|
260
|
+
else:
|
261
|
+
file_size = os.path.getsize(full_path)
|
262
|
+
pbar = self.__initialize_progress_bar(file_size, full_path)
|
263
|
+
pbar.update(file_size)
|
264
|
+
pbar.close()
|
265
|
+
return True
|
266
|
+
|
267
|
+
@staticmethod
|
268
|
+
def __initialize_progress_bar(total_size: int, filename: str) -> tqdm:
|
269
|
+
return tqdm(
|
270
|
+
total=total_size, unit="B", unit_scale=True, desc=filename, initial=0
|
271
|
+
)
|
272
|
+
|
273
|
+
def encrypt_password(self) -> Response:
|
274
|
+
"""
|
275
|
+
returns encrypted password as text
|
276
|
+
"""
|
277
|
+
return self.http_client.get(
|
278
|
+
url=join_url(self.uri, "/api/security/encryptedPassword")
|
279
|
+
)
|
280
|
+
|
281
|
+
def ping(self) -> Response:
|
282
|
+
"""
|
283
|
+
Sends a ping to Artifactory to validate login status
|
284
|
+
"""
|
285
|
+
url = join_url(self.uri, "api/system/ping")
|
286
|
+
return self.http_client.get(url=url)
|
287
|
+
|
288
|
+
def get_artifactory_version(self) -> Response:
|
289
|
+
return self.http_client.get(url=join_url(self.uri, "/api/system/version"))
|
290
|
+
|
291
|
+
def create_machinelearning_local_repo(self, repo_name: str) -> Response:
|
292
|
+
data = {
|
293
|
+
"rclass": "local",
|
294
|
+
"packageType": "machinelearning",
|
295
|
+
}
|
296
|
+
return self.http_client.put(
|
297
|
+
url=join_url(self.uri, "/api/repositories/" + repo_name), json=data
|
298
|
+
)
|
299
|
+
|
300
|
+
def delete_frogml_local_repo(self, repo_name: str) -> Response:
|
301
|
+
return self.http_client.delete(
|
302
|
+
url=join_url(self.uri, "/api/repositories/" + repo_name)
|
303
|
+
)
|
304
|
+
|
305
|
+
|
306
|
+
class RetryWithLog(Retry):
|
307
|
+
"""
|
308
|
+
Adding extra logs before making a retry request
|
309
|
+
"""
|
310
|
+
|
311
|
+
def __init__(self, *args, **kwargs):
|
312
|
+
history = kwargs.get("history")
|
313
|
+
if history is not None:
|
314
|
+
logger.debug(f"Error: ${history[-1].error}\nretrying...")
|
315
|
+
super().__init__(*args, **kwargs)
|
@@ -0,0 +1 @@
|
|
1
|
+
from ._login_cli import login as frogml_login
|