digitalhub 0.9.1__py3-none-any.whl → 0.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of digitalhub might be problematic. Click here for more details.

Files changed (121) hide show
  1. digitalhub/__init__.py +2 -3
  2. digitalhub/client/_base/api_builder.py +1 -1
  3. digitalhub/client/_base/client.py +25 -2
  4. digitalhub/client/_base/params_builder.py +16 -0
  5. digitalhub/client/dhcore/api_builder.py +10 -4
  6. digitalhub/client/dhcore/client.py +30 -398
  7. digitalhub/client/dhcore/configurator.py +361 -0
  8. digitalhub/client/dhcore/error_parser.py +107 -0
  9. digitalhub/client/dhcore/models.py +13 -23
  10. digitalhub/client/dhcore/params_builder.py +178 -0
  11. digitalhub/client/dhcore/utils.py +4 -44
  12. digitalhub/client/local/api_builder.py +13 -18
  13. digitalhub/client/local/client.py +18 -2
  14. digitalhub/client/local/enums.py +11 -0
  15. digitalhub/client/local/params_builder.py +116 -0
  16. digitalhub/configurator/api.py +31 -0
  17. digitalhub/configurator/configurator.py +195 -0
  18. digitalhub/configurator/credentials_store.py +65 -0
  19. digitalhub/configurator/ini_module.py +74 -0
  20. digitalhub/entities/_base/_base/entity.py +2 -2
  21. digitalhub/entities/_base/context/entity.py +4 -4
  22. digitalhub/entities/_base/entity/builder.py +5 -5
  23. digitalhub/entities/_base/executable/entity.py +2 -2
  24. digitalhub/entities/_base/material/entity.py +12 -12
  25. digitalhub/entities/_base/material/status.py +1 -1
  26. digitalhub/entities/_base/material/utils.py +2 -2
  27. digitalhub/entities/_base/unversioned/entity.py +2 -2
  28. digitalhub/entities/_base/versioned/entity.py +2 -2
  29. digitalhub/entities/_commons/enums.py +2 -0
  30. digitalhub/entities/_commons/metrics.py +164 -0
  31. digitalhub/entities/_commons/types.py +5 -0
  32. digitalhub/entities/_commons/utils.py +2 -2
  33. digitalhub/entities/_processors/base.py +527 -0
  34. digitalhub/entities/{_operations/processor.py → _processors/context.py} +212 -837
  35. digitalhub/entities/_processors/utils.py +158 -0
  36. digitalhub/entities/artifact/artifact/spec.py +3 -1
  37. digitalhub/entities/artifact/crud.py +13 -12
  38. digitalhub/entities/artifact/utils.py +1 -1
  39. digitalhub/entities/builders.py +6 -18
  40. digitalhub/entities/dataitem/_base/entity.py +0 -41
  41. digitalhub/entities/dataitem/crud.py +27 -15
  42. digitalhub/entities/dataitem/table/entity.py +49 -35
  43. digitalhub/entities/dataitem/table/models.py +4 -3
  44. digitalhub/{utils/data_utils.py → entities/dataitem/table/utils.py} +46 -54
  45. digitalhub/entities/dataitem/utils.py +58 -10
  46. digitalhub/entities/function/crud.py +9 -9
  47. digitalhub/entities/model/_base/entity.py +120 -0
  48. digitalhub/entities/model/_base/spec.py +6 -17
  49. digitalhub/entities/model/_base/status.py +10 -0
  50. digitalhub/entities/model/crud.py +13 -12
  51. digitalhub/entities/model/huggingface/spec.py +9 -4
  52. digitalhub/entities/model/mlflow/models.py +2 -2
  53. digitalhub/entities/model/mlflow/spec.py +7 -7
  54. digitalhub/entities/model/mlflow/utils.py +44 -5
  55. digitalhub/entities/project/_base/entity.py +317 -9
  56. digitalhub/entities/project/_base/spec.py +8 -6
  57. digitalhub/entities/project/crud.py +12 -11
  58. digitalhub/entities/run/_base/entity.py +103 -6
  59. digitalhub/entities/run/_base/spec.py +4 -2
  60. digitalhub/entities/run/_base/status.py +12 -0
  61. digitalhub/entities/run/crud.py +8 -8
  62. digitalhub/entities/secret/_base/entity.py +3 -3
  63. digitalhub/entities/secret/_base/spec.py +4 -2
  64. digitalhub/entities/secret/crud.py +11 -9
  65. digitalhub/entities/task/_base/entity.py +4 -4
  66. digitalhub/entities/task/_base/models.py +51 -40
  67. digitalhub/entities/task/_base/spec.py +2 -0
  68. digitalhub/entities/task/_base/utils.py +2 -2
  69. digitalhub/entities/task/crud.py +12 -8
  70. digitalhub/entities/workflow/crud.py +9 -9
  71. digitalhub/factory/utils.py +9 -9
  72. digitalhub/readers/{_base → data/_base}/builder.py +1 -1
  73. digitalhub/readers/{_base → data/_base}/reader.py +16 -4
  74. digitalhub/readers/{api.py → data/api.py} +2 -2
  75. digitalhub/readers/{factory.py → data/factory.py} +3 -3
  76. digitalhub/readers/{pandas → data/pandas}/builder.py +2 -2
  77. digitalhub/readers/{pandas → data/pandas}/reader.py +110 -30
  78. digitalhub/readers/query/__init__.py +0 -0
  79. digitalhub/stores/_base/store.py +59 -69
  80. digitalhub/stores/api.py +8 -33
  81. digitalhub/stores/builder.py +44 -161
  82. digitalhub/stores/local/store.py +106 -89
  83. digitalhub/stores/remote/store.py +86 -11
  84. digitalhub/stores/s3/configurator.py +108 -0
  85. digitalhub/stores/s3/enums.py +17 -0
  86. digitalhub/stores/s3/models.py +21 -0
  87. digitalhub/stores/s3/store.py +154 -70
  88. digitalhub/{utils/s3_utils.py → stores/s3/utils.py} +7 -3
  89. digitalhub/stores/sql/configurator.py +88 -0
  90. digitalhub/stores/sql/enums.py +16 -0
  91. digitalhub/stores/sql/models.py +24 -0
  92. digitalhub/stores/sql/store.py +106 -85
  93. digitalhub/{readers/_commons → utils}/enums.py +5 -1
  94. digitalhub/utils/exceptions.py +6 -0
  95. digitalhub/utils/file_utils.py +8 -7
  96. digitalhub/utils/generic_utils.py +28 -15
  97. digitalhub/utils/git_utils.py +16 -9
  98. digitalhub/utils/types.py +5 -0
  99. digitalhub/utils/uri_utils.py +2 -2
  100. {digitalhub-0.9.1.dist-info → digitalhub-0.10.0.dist-info}/METADATA +25 -31
  101. {digitalhub-0.9.1.dist-info → digitalhub-0.10.0.dist-info}/RECORD +108 -99
  102. {digitalhub-0.9.1.dist-info → digitalhub-0.10.0.dist-info}/WHEEL +1 -2
  103. digitalhub/client/dhcore/env.py +0 -23
  104. digitalhub/entities/_base/project/entity.py +0 -341
  105. digitalhub-0.9.1.dist-info/top_level.txt +0 -2
  106. test/local/CRUD/test_artifacts.py +0 -96
  107. test/local/CRUD/test_dataitems.py +0 -96
  108. test/local/CRUD/test_models.py +0 -95
  109. test/local/imports/test_imports.py +0 -66
  110. test/local/instances/test_validate.py +0 -55
  111. test/test_crud_functions.py +0 -109
  112. test/test_crud_runs.py +0 -86
  113. test/test_crud_tasks.py +0 -81
  114. test/testkfp.py +0 -37
  115. test/testkfp_pipeline.py +0 -22
  116. /digitalhub/{entities/_base/project → configurator}/__init__.py +0 -0
  117. /digitalhub/entities/{_operations → _processors}/__init__.py +0 -0
  118. /digitalhub/readers/{_base → data}/__init__.py +0 -0
  119. /digitalhub/readers/{_commons → data/_base}/__init__.py +0 -0
  120. /digitalhub/readers/{pandas → data/pandas}/__init__.py +0 -0
  121. {digitalhub-0.9.1.dist-info → digitalhub-0.10.0.dist-info/licenses}/LICENSE.txt +0 -0
@@ -1,39 +1,23 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import typing
3
4
  from pathlib import Path
4
5
  from typing import Any
5
6
 
6
7
  import pyarrow as pa
7
8
  import pyarrow.parquet as pq
8
- from sqlalchemy import MetaData, Table, create_engine
9
+ from sqlalchemy import MetaData, Table, create_engine, select
9
10
  from sqlalchemy.engine import Engine
10
- from sqlalchemy.engine.row import LegacyRow
11
11
  from sqlalchemy.exc import SQLAlchemyError
12
12
 
13
- from digitalhub.readers.api import get_reader_by_object
14
- from digitalhub.stores._base.store import Store, StoreConfig
13
+ from digitalhub.readers.data.api import get_reader_by_object
14
+ from digitalhub.stores._base.store import Store
15
+ from digitalhub.stores.sql.configurator import SqlStoreConfigurator
15
16
  from digitalhub.utils.exceptions import StoreError
17
+ from digitalhub.utils.types import SourcesOrListOfSources
16
18
 
17
-
18
- class SQLStoreConfig(StoreConfig):
19
- """
20
- SQL store configuration class.
21
- """
22
-
23
- host: str
24
- """SQL host."""
25
-
26
- port: int
27
- """SQL port."""
28
-
29
- user: str
30
- """SQL user."""
31
-
32
- password: str
33
- """SQL password."""
34
-
35
- database: str
36
- """SQL database name."""
19
+ if typing.TYPE_CHECKING:
20
+ from sqlalchemy.engine.row import Row
37
21
 
38
22
 
39
23
  class SqlStore(Store):
@@ -42,9 +26,10 @@ class SqlStore(Store):
42
26
  artifacts on SQL based storage.
43
27
  """
44
28
 
45
- def __init__(self, name: str, store_type: str, config: SQLStoreConfig) -> None:
46
- super().__init__(name, store_type)
47
- self.config = config
29
+ def __init__(self, config: dict | None = None) -> None:
30
+ super().__init__()
31
+ self._configurator = SqlStoreConfigurator()
32
+ self._configurator.configure(config)
48
33
 
49
34
  ##############################
50
35
  # I/O methods
@@ -101,7 +86,7 @@ class SqlStore(Store):
101
86
 
102
87
  def upload(
103
88
  self,
104
- src: str | list[str],
89
+ src: SourcesOrListOfSources,
105
90
  dst: str,
106
91
  ) -> list[tuple[str, str]]:
107
92
  """
@@ -135,50 +120,75 @@ class SqlStore(Store):
135
120
  return []
136
121
 
137
122
  ##############################
138
- # Private I/O methods
123
+ # Datastore methods
139
124
  ##############################
140
125
 
141
- def _download_table(self, schema: str, table: str, dst: str) -> str:
126
+ def read_df(
127
+ self,
128
+ path: SourcesOrListOfSources,
129
+ file_format: str | None = None,
130
+ engine: str | None = None,
131
+ **kwargs,
132
+ ) -> Any:
142
133
  """
143
- Download a table from SQL based storage.
134
+ Read DataFrame from path.
144
135
 
145
136
  Parameters
146
137
  ----------
147
- schema : str
148
- The origin schema.
149
- table : str
150
- The origin table.
151
- dst : str
152
- The destination path.
138
+ path : SourcesOrListOfSources
139
+ Path(s) to read DataFrame from.
140
+ file_format : str
141
+ Extension of the file.
142
+ engine : str
143
+ Dataframe engine (pandas, polars, etc.).
144
+ **kwargs : dict
145
+ Keyword arguments.
153
146
 
154
147
  Returns
155
148
  -------
156
- str
157
- The destination path.
149
+ Any
150
+ DataFrame.
158
151
  """
159
- engine = self._check_factory(schema=schema)
152
+ if isinstance(path, list):
153
+ raise StoreError("SQL store can only read a single DataFrame at a time.")
160
154
 
161
- # Read the table from the database
162
- sa_table = Table(table, MetaData(), autoload_with=engine)
163
- query = sa_table.select()
164
- with engine.begin() as conn:
165
- result: list[LegacyRow] = conn.execute(query).fetchall()
155
+ reader = self._get_reader(engine)
156
+ schema = self._get_schema(path)
157
+ table = self._get_table_name(path)
158
+ sql_engine = self._check_factory(schema=schema)
166
159
 
167
- # Parse the result
168
- data = self._parse_result(result)
160
+ sa_table = Table(table, MetaData(), autoload_with=sql_engine)
161
+ stm = select(sa_table)
169
162
 
170
- # Convert the result to a pyarrow table and
171
- # write the pyarrow table to a Parquet file
172
- arrow_table = pa.Table.from_pydict(data)
173
- pq.write_table(arrow_table, dst)
163
+ return reader.read_table(stm, sql_engine, **kwargs)
174
164
 
175
- engine.dispose()
165
+ def query(
166
+ self,
167
+ query: str,
168
+ path: str,
169
+ engine: str | None = None,
170
+ ) -> Any:
171
+ """
172
+ Query data from database.
176
173
 
177
- return dst
174
+ Parameters
175
+ ----------
176
+ query : str
177
+ The query to execute.
178
+ path : str
179
+ Path to the database.
180
+ engine : str
181
+ Dataframe engine (pandas, polars, etc.).
178
182
 
179
- ##############################
180
- # Datastore methods
181
- ##############################
183
+ Returns
184
+ -------
185
+ Any
186
+ DataFrame.
187
+ """
188
+ reader = self._get_reader(engine)
189
+ schema = self._get_schema(path)
190
+ sql_engine = self._check_factory(schema=schema)
191
+ return reader.read_table(query, sql_engine)
182
192
 
183
193
  def write_df(self, df: Any, dst: str, extension: str | None = None, **kwargs) -> str:
184
194
  """
@@ -203,9 +213,47 @@ class SqlStore(Store):
203
213
  return self._upload_table(df, schema, table, **kwargs)
204
214
 
205
215
  ##############################
206
- # Private Datastore methods
216
+ # Private I/O methods
207
217
  ##############################
208
218
 
219
+ def _download_table(self, schema: str, table: str, dst: str) -> str:
220
+ """
221
+ Download a table from SQL based storage.
222
+
223
+ Parameters
224
+ ----------
225
+ schema : str
226
+ The origin schema.
227
+ table : str
228
+ The origin table.
229
+ dst : str
230
+ The destination path.
231
+
232
+ Returns
233
+ -------
234
+ str
235
+ The destination path.
236
+ """
237
+ engine = self._check_factory(schema=schema)
238
+
239
+ # Read the table from the database
240
+ sa_table = Table(table, MetaData(), autoload_with=engine)
241
+ stm = select(sa_table)
242
+ with engine.begin() as conn:
243
+ result: list[Row] = conn.execute(stm).fetchall()
244
+
245
+ # Parse the result
246
+ data = {col: [row[idx] for row in result] for idx, col in enumerate(sa_table.columns.keys())}
247
+
248
+ # Convert the result to a pyarrow table and
249
+ # write the pyarrow table to a Parquet file
250
+ arrow_table = pa.Table.from_pydict(data)
251
+ pq.write_table(arrow_table, dst)
252
+
253
+ engine.dispose()
254
+
255
+ return dst
256
+
209
257
  def _upload_table(self, df: Any, schema: str, table: str, **kwargs) -> str:
210
258
  """
211
259
  Upload a table to SQL based storage.
@@ -245,10 +293,7 @@ class SqlStore(Store):
245
293
  str
246
294
  The connection string.
247
295
  """
248
- return (
249
- f"postgresql://{self.config.user}:{self.config.password}@"
250
- f"{self.config.host}:{self.config.port}/{self.config.database}"
251
- )
296
+ return self._configurator.get_sql_conn_string()
252
297
 
253
298
  def _get_engine(self, schema: str | None = None) -> Engine:
254
299
  """
@@ -377,27 +422,3 @@ class SqlStore(Store):
377
422
  except SQLAlchemyError:
378
423
  engine.dispose()
379
424
  raise StoreError("No access to db!")
380
-
381
- @staticmethod
382
- def _parse_result(result: list[LegacyRow]) -> dict:
383
- """
384
- Convert a list of list of tuples to a dict.
385
-
386
- Parameters
387
- ----------
388
- result : list[LegacyRow]
389
- The data to convert.
390
-
391
- Returns
392
- -------
393
- dict
394
- The converted data.
395
- """
396
- data_list = [row.items() for row in result]
397
- data = {}
398
- for row in data_list:
399
- for column_name, value in row:
400
- if column_name not in data:
401
- data[column_name] = []
402
- data[column_name].append(value)
403
- return data
@@ -3,11 +3,15 @@ from __future__ import annotations
3
3
  from enum import Enum
4
4
 
5
5
 
6
- class Extensions(Enum):
6
+ class FileExtensions(Enum):
7
7
  """
8
8
  Supported file extensions.
9
9
  """
10
10
 
11
11
  CSV = "csv"
12
12
  PARQUET = "parquet"
13
+ JSON = "json"
14
+ EXCEL_OLD = "xls"
15
+ EXCEL = "xlsx"
16
+ TXT = "txt"
13
17
  FILE = "file"
@@ -71,3 +71,9 @@ class ReaderError(Exception):
71
71
  """
72
72
  Raised when incontered errors on readers.
73
73
  """
74
+
75
+
76
+ class ClientError(Exception):
77
+ """
78
+ Raised when incontered errors on clients.
79
+ """
@@ -4,6 +4,7 @@ from datetime import datetime
4
4
  from hashlib import sha256
5
5
  from mimetypes import guess_type
6
6
  from pathlib import Path
7
+ from typing import Optional
7
8
 
8
9
  from pydantic import BaseModel
9
10
 
@@ -13,12 +14,12 @@ class FileInfo(BaseModel):
13
14
  File info class.
14
15
  """
15
16
 
16
- path: str = None
17
- name: str = None
18
- content_type: str = None
19
- size: int = None
20
- hash: str = None
21
- last_modified: str = None
17
+ path: Optional[str] = None
18
+ name: Optional[str] = None
19
+ content_type: Optional[str] = None
20
+ size: Optional[int] = None
21
+ hash: Optional[str] = None
22
+ last_modified: Optional[str] = None
22
23
 
23
24
  def to_dict(self):
24
25
  return self.model_dump()
@@ -60,7 +61,7 @@ def get_file_size(data_path: str) -> int:
60
61
  return Path(data_path).stat().st_size
61
62
 
62
63
 
63
- def get_file_mime_type(data_path: str) -> str:
64
+ def get_file_mime_type(data_path: str) -> str | None:
64
65
  """
65
66
  Get the mime type of a file.
66
67
 
@@ -3,9 +3,10 @@ from __future__ import annotations
3
3
  import base64
4
4
  import importlib.util as imputil
5
5
  import json
6
- from datetime import datetime
7
- from enum import Enum
6
+ from datetime import date, datetime, time
7
+ from enum import Enum, EnumMeta
8
8
  from pathlib import Path
9
+ from types import MappingProxyType
9
10
  from typing import Any, Callable
10
11
  from zipfile import ZipFile
11
12
 
@@ -120,14 +121,14 @@ def extract_archive(path: Path, filename: Path) -> None:
120
121
  zip_file.extractall(path)
121
122
 
122
123
 
123
- class MyEncoder(json.JSONEncoder):
124
+ class CustomJsonEncoder(json.JSONEncoder):
124
125
  """
125
- Custom JSON encoder to handle numpy types.
126
+ Custom JSON encoder to handle json dumps.
126
127
  """
127
128
 
128
129
  def default(self, obj: Any) -> Any:
129
130
  """
130
- Convert numpy types to json.
131
+ Convert an object to json.
131
132
 
132
133
  Parameters
133
134
  ----------
@@ -147,11 +148,12 @@ class MyEncoder(json.JSONEncoder):
147
148
  return float(obj)
148
149
  elif isinstance(obj, np.ndarray):
149
150
  return obj.tolist()
150
- else:
151
- return str(obj)
151
+ elif isinstance(obj, (datetime, date, time)):
152
+ return obj.isoformat()
153
+ return str(obj)
152
154
 
153
155
 
154
- def dict_to_json(struct: dict) -> str:
156
+ def dump_json(struct: Any) -> str:
155
157
  """
156
158
  Convert a dict to json.
157
159
 
@@ -165,7 +167,7 @@ def dict_to_json(struct: dict) -> str:
165
167
  str
166
168
  The json string.
167
169
  """
168
- return json.dumps(struct, cls=MyEncoder)
170
+ return json.dumps(struct, cls=CustomJsonEncoder)
169
171
 
170
172
 
171
173
  def slugify_string(filename: str) -> str:
@@ -202,23 +204,34 @@ def import_function(path: Path, handler: str) -> Callable:
202
204
  Function.
203
205
  """
204
206
  spec = imputil.spec_from_file_location(path.stem, path)
207
+ if spec is None:
208
+ raise RuntimeError(f"Error loading function source from {str(path)}.")
209
+
205
210
  mod = imputil.module_from_spec(spec)
211
+ if spec.loader is None:
212
+ raise RuntimeError(f"Error getting module loader from {str(path)}.")
213
+
206
214
  spec.loader.exec_module(mod)
207
- return getattr(mod, handler)
215
+ func = getattr(mod, handler)
216
+ if not callable(func):
217
+ raise RuntimeError(f"Handler '{handler}' is not a callable.")
218
+
219
+ return func
208
220
 
209
221
 
210
- def list_enum(enum: Enum) -> list:
222
+ def list_enum(enum: EnumMeta) -> list[Any]:
211
223
  """
212
224
  Get all values of an enum.
213
225
 
214
226
  Parameters
215
227
  ----------
216
- enum : Enum
217
- Enum to get values from.
228
+ enum : EnumMeta
229
+ Enum class to get values from.
218
230
 
219
231
  Returns
220
232
  -------
221
- list
233
+ list[Any]
222
234
  List of enum values.
223
235
  """
224
- return [e.value for e in enum]
236
+ vals: MappingProxyType[str, Enum] = enum.__members__
237
+ return [member.value for member in vals.values()]
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  import os
4
4
  import shutil
5
5
  import warnings
6
+ from enum import Enum
6
7
  from pathlib import Path
7
8
  from urllib.parse import urlparse
8
9
 
@@ -13,6 +14,16 @@ except ImportError as e:
13
14
  warnings.warn("git is not installed. Please install git and try again.", RuntimeWarning)
14
15
 
15
16
 
17
+ class GitCredentialsType(Enum):
18
+ """
19
+ Supported git credentials types.
20
+ """
21
+
22
+ USERNAME = "GIT_USERNAME"
23
+ PASSWORD = "GIT_PASSWORD"
24
+ TOKEN = "GIT_TOKEN"
25
+
26
+
16
27
  def clone_repository(path: Path, url: str) -> None:
17
28
  """
18
29
  Clone git repository.
@@ -90,12 +101,8 @@ def get_git_username_password_from_token(token: str) -> tuple[str, str]:
90
101
  """
91
102
  # Mutued from mlrun
92
103
  if token.startswith("github_pat_") or token.startswith("glpat"):
93
- username = "oauth2"
94
- password = token
95
- else:
96
- username = token
97
- password = "x-oauth-basic"
98
- return username, password
104
+ return "oauth2", token
105
+ return token, "x-oauth-basic"
99
106
 
100
107
 
101
108
  def add_credentials_git_remote_url(url: str) -> str:
@@ -115,9 +122,9 @@ def add_credentials_git_remote_url(url: str) -> str:
115
122
  url_obj = urlparse(url)
116
123
 
117
124
  # Get credentials from environment variables
118
- username = os.getenv("GIT_USERNAME")
119
- password = os.getenv("GIT_PASSWORD")
120
- token = os.getenv("GIT_TOKEN")
125
+ username = os.getenv(GitCredentialsType.USERNAME.value)
126
+ password = os.getenv(GitCredentialsType.PASSWORD.value)
127
+ token = os.getenv(GitCredentialsType.TOKEN.value)
121
128
 
122
129
  # Get credentials from token. Override username and password
123
130
  if token is not None:
@@ -0,0 +1,5 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Union
4
+
5
+ SourcesOrListOfSources = Union[str, list[str]]
@@ -93,13 +93,13 @@ def map_uri_scheme(uri: str) -> str:
93
93
  Raises
94
94
  ------
95
95
  ValueError
96
- If the scheme is unknown.
96
+ If the scheme is unknown or invalid.
97
97
  """
98
98
  scheme = urlparse(uri).scheme
99
99
  if scheme in list_enum(LocalSchemes):
100
100
  return SchemeCategory.LOCAL.value
101
101
  if scheme in list_enum(InvalidLocalSchemes):
102
- raise ValueError("For local uri, do not use any scheme.")
102
+ raise ValueError("For local URI, do not use any scheme.")
103
103
  if scheme in list_enum(RemoteSchemes):
104
104
  return SchemeCategory.REMOTE.value
105
105
  if scheme in list_enum(S3Schemes):
@@ -1,7 +1,8 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: digitalhub
3
- Version: 0.9.1
3
+ Version: 0.10.0
4
4
  Summary: Python SDK for Digitalhub
5
+ Project-URL: Homepage, https://github.com/scc-digitalhub/digitalhub-sdk
5
6
  Author-email: Fondazione Bruno Kessler <dslab@fbk.eu>, Matteo Martini <mmartini@fbk.eu>
6
7
  License: Apache License
7
8
  Version 2.0, January 2004
@@ -219,48 +220,41 @@ License: Apache License
219
220
 
220
221
  (SIL OPEN FONT LICENSE Version 1.1) The Inter font family (https://github.com/rsms/inter)
221
222
  (SIL OPEN FONT LICENSE Version 1.1) The Fira Code font family (https://github.com/tonsky/FiraCode)
222
-
223
- Project-URL: Homepage, https://github.com/scc-digitalhub/digitalhub-sdk
223
+ License-File: LICENSE.txt
224
224
  Keywords: data,dataops,kubernetes
225
225
  Classifier: License :: OSI Approved :: Apache Software License
226
226
  Classifier: Programming Language :: Python :: 3.9
227
227
  Classifier: Programming Language :: Python :: 3.10
228
228
  Classifier: Programming Language :: Python :: 3.11
229
- Requires-Python: >=3.9
230
- Description-Content-Type: text/markdown
231
- License-File: LICENSE.txt
229
+ Requires-Python: <3.13,>=3.9
232
230
  Requires-Dist: boto3
233
- Requires-Dist: pydantic
234
- Requires-Dist: sqlalchemy<2
235
- Requires-Dist: pyarrow
231
+ Requires-Dist: gitpython>=3
236
232
  Requires-Dist: numpy<2
237
- Requires-Dist: requests
238
- Requires-Dist: PyYAML
239
- Requires-Dist: python-dotenv
240
- Requires-Dist: GitPython>=3
241
233
  Requires-Dist: psycopg2-binary
234
+ Requires-Dist: pyarrow
235
+ Requires-Dist: pydantic
236
+ Requires-Dist: python-dotenv
242
237
  Requires-Dist: python-slugify
238
+ Requires-Dist: pyyaml
239
+ Requires-Dist: requests
240
+ Requires-Dist: sqlalchemy
241
+ Provides-Extra: dev
242
+ Requires-Dist: bumpver; extra == 'dev'
243
+ Requires-Dist: jsonschema; extra == 'dev'
244
+ Requires-Dist: pytest; extra == 'dev'
245
+ Requires-Dist: pytest-cov; extra == 'dev'
243
246
  Provides-Extra: full
244
- Requires-Dist: pandas<2.2,>=1.2; extra == "full"
245
- Requires-Dist: mlflow; extra == "full"
246
- Provides-Extra: pandas
247
- Requires-Dist: pandas<2.2,>=1.2; extra == "pandas"
247
+ Requires-Dist: mlflow; extra == 'full'
248
+ Requires-Dist: pandas; extra == 'full'
248
249
  Provides-Extra: mlflow
249
- Requires-Dist: mlflow; extra == "mlflow"
250
- Provides-Extra: dev
251
- Requires-Dist: black; extra == "dev"
252
- Requires-Dist: pytest; extra == "dev"
253
- Requires-Dist: bumpver; extra == "dev"
254
- Requires-Dist: ruff; extra == "dev"
255
- Requires-Dist: moto; extra == "dev"
256
- Provides-Extra: docs
257
- Requires-Dist: Sphinx>=7; extra == "docs"
258
- Requires-Dist: pydata-sphinx-theme>=0.15; extra == "docs"
259
- Requires-Dist: numpydoc>=1.6; extra == "docs"
250
+ Requires-Dist: mlflow; extra == 'mlflow'
251
+ Provides-Extra: pandas
252
+ Requires-Dist: fsspec; extra == 'pandas'
253
+ Requires-Dist: pandas; extra == 'pandas'
254
+ Description-Content-Type: text/markdown
260
255
 
261
256
  # Digitalhub Library
262
257
 
263
- The Digitalhub SDK library is used to manage entities and executions in Digitalhub from Python.
264
- It comes with a suite of tools to help you manage your projects and executions. It exposes CRUD methods to create, read, update and delete entities, and objects methods to excute functions or workflows, collect or store execution results and data.
258
+ The Digitalhub library is a python tool for managing projects, entities and executions in Digitalhub. It exposes CRUD methods to create, read, update and delete entities, tools to execute functions or workflows, collect or store execution results and data.
265
259
 
266
260
  A more detailed description of the library can be found in the [official documentation](https://scc-digitalhub.github.io/sdk-docs/).