digitalhub 0.13.0b3__py3-none-any.whl → 0.14.0b0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of digitalhub might be problematic. Click here for more details.

Files changed (67) hide show
  1. digitalhub/__init__.py +3 -8
  2. digitalhub/entities/_base/_base/entity.py +0 -11
  3. digitalhub/entities/_base/entity/builder.py +5 -5
  4. digitalhub/entities/_base/executable/entity.py +1 -1
  5. digitalhub/entities/_base/runtime_entity/builder.py +53 -18
  6. digitalhub/entities/_commons/metrics.py +64 -30
  7. digitalhub/entities/_commons/utils.py +100 -30
  8. digitalhub/entities/_processors/base.py +160 -81
  9. digitalhub/entities/_processors/context.py +424 -224
  10. digitalhub/entities/_processors/utils.py +77 -33
  11. digitalhub/entities/artifact/crud.py +20 -4
  12. digitalhub/entities/artifact/utils.py +29 -14
  13. digitalhub/entities/dataitem/crud.py +20 -4
  14. digitalhub/entities/dataitem/table/entity.py +0 -21
  15. digitalhub/entities/dataitem/utils.py +84 -34
  16. digitalhub/entities/function/_base/entity.py +1 -1
  17. digitalhub/entities/function/crud.py +15 -4
  18. digitalhub/entities/model/_base/entity.py +21 -1
  19. digitalhub/entities/model/crud.py +21 -5
  20. digitalhub/entities/model/utils.py +29 -14
  21. digitalhub/entities/project/_base/entity.py +65 -33
  22. digitalhub/entities/project/crud.py +8 -1
  23. digitalhub/entities/run/_base/entity.py +21 -1
  24. digitalhub/entities/run/crud.py +22 -5
  25. digitalhub/entities/secret/crud.py +22 -5
  26. digitalhub/entities/task/crud.py +22 -5
  27. digitalhub/entities/trigger/crud.py +20 -4
  28. digitalhub/entities/workflow/_base/entity.py +1 -1
  29. digitalhub/entities/workflow/crud.py +15 -4
  30. digitalhub/factory/enums.py +18 -0
  31. digitalhub/factory/factory.py +136 -57
  32. digitalhub/factory/utils.py +3 -54
  33. digitalhub/stores/client/api.py +6 -10
  34. digitalhub/stores/client/builder.py +3 -3
  35. digitalhub/stores/client/dhcore/client.py +104 -162
  36. digitalhub/stores/client/dhcore/configurator.py +92 -289
  37. digitalhub/stores/client/dhcore/enums.py +0 -16
  38. digitalhub/stores/client/dhcore/params_builder.py +41 -83
  39. digitalhub/stores/client/dhcore/utils.py +14 -22
  40. digitalhub/stores/client/local/client.py +77 -45
  41. digitalhub/stores/credentials/enums.py +1 -0
  42. digitalhub/stores/credentials/ini_module.py +0 -16
  43. digitalhub/stores/data/api.py +1 -1
  44. digitalhub/stores/data/builder.py +66 -4
  45. digitalhub/stores/data/local/store.py +0 -103
  46. digitalhub/stores/data/s3/configurator.py +60 -6
  47. digitalhub/stores/data/s3/store.py +44 -2
  48. digitalhub/stores/data/sql/configurator.py +57 -7
  49. digitalhub/stores/data/sql/store.py +184 -78
  50. digitalhub/utils/file_utils.py +0 -17
  51. digitalhub/utils/generic_utils.py +1 -2
  52. digitalhub/utils/store_utils.py +44 -0
  53. {digitalhub-0.13.0b3.dist-info → digitalhub-0.14.0b0.dist-info}/METADATA +3 -2
  54. {digitalhub-0.13.0b3.dist-info → digitalhub-0.14.0b0.dist-info}/RECORD +63 -65
  55. digitalhub/entities/_commons/types.py +0 -9
  56. digitalhub/entities/task/_base/utils.py +0 -22
  57. digitalhub/stores/client/dhcore/models.py +0 -40
  58. digitalhub/stores/data/s3/utils.py +0 -78
  59. /digitalhub/entities/{_base/entity/_constructors → _constructors}/__init__.py +0 -0
  60. /digitalhub/entities/{_base/entity/_constructors → _constructors}/metadata.py +0 -0
  61. /digitalhub/entities/{_base/entity/_constructors → _constructors}/name.py +0 -0
  62. /digitalhub/entities/{_base/entity/_constructors → _constructors}/spec.py +0 -0
  63. /digitalhub/entities/{_base/entity/_constructors → _constructors}/status.py +0 -0
  64. /digitalhub/entities/{_base/entity/_constructors → _constructors}/uuid.py +0 -0
  65. {digitalhub-0.13.0b3.dist-info → digitalhub-0.14.0b0.dist-info}/WHEEL +0 -0
  66. {digitalhub-0.13.0b3.dist-info → digitalhub-0.14.0b0.dist-info}/licenses/AUTHORS +0 -0
  67. {digitalhub-0.13.0b3.dist-info → digitalhub-0.14.0b0.dist-info}/licenses/LICENSE +0 -0
@@ -28,8 +28,17 @@ if typing.TYPE_CHECKING:
28
28
 
29
29
  class SqlStore(Store):
30
30
  """
31
- SQL store class. It implements the Store interface and provides methods to fetch and persist
32
- artifacts on SQL based storage.
31
+ SQL-based data store implementation for database operations.
32
+
33
+ Provides functionality for reading, writing, and managing data in SQL
34
+ databases. Implements the Store interface with SQL-specific operations
35
+ including table downloads, DataFrame operations, and query execution.
36
+
37
+ Attributes
38
+ ----------
39
+ _configurator : SqlStoreConfigurator
40
+ The configurator instance for managing SQL database credentials
41
+ and connection parameters.
33
42
  """
34
43
 
35
44
  def __init__(self, configurator: Configurator | None = None) -> None:
@@ -47,21 +56,33 @@ class SqlStore(Store):
47
56
  overwrite: bool = False,
48
57
  ) -> str:
49
58
  """
50
- Download artifacts from storage.
59
+ Download a SQL table as a Parquet file to local storage.
60
+
61
+ Retrieves data from a SQL table and saves it as a Parquet file
62
+ at the specified destination. The source path should be in the
63
+ format 'sql://database/schema/table'.
51
64
 
52
65
  Parameters
53
66
  ----------
54
67
  src : str
55
- Path of the material entity.
56
- dst : str
57
- The destination of the material entity on local filesystem.
58
- overwrite : bool
59
- Specify if overwrite existing file(s).
68
+ The SQL URI path of the table to download in the format
69
+ 'sql://database/schema/table' or 'sql://database/table'.
70
+ dst : Path
71
+ The destination path on the local filesystem where the
72
+ Parquet file will be saved.
73
+ overwrite : bool, default False
74
+ Whether to overwrite existing files at the destination path.
60
75
 
61
76
  Returns
62
77
  -------
63
78
  str
64
- Destination path of the downloaded files.
79
+ The absolute path of the downloaded Parquet file.
80
+
81
+ Raises
82
+ ------
83
+ StoreError
84
+ If the destination path has an invalid extension or if
85
+ file operations fail.
65
86
  """
66
87
  table_name = self._get_table_name(src) + ".parquet"
67
88
  # Case where dst is not provided
@@ -92,12 +113,12 @@ class SqlStore(Store):
92
113
  dst: str,
93
114
  ) -> list[tuple[str, str]]:
94
115
  """
95
- Upload an artifact to storage.
116
+ Upload artifacts to SQL storage.
96
117
 
97
118
  Raises
98
119
  ------
99
120
  StoreError
100
- This method is not implemented.
121
+ Always raised as SQL store does not support direct upload.
101
122
  """
102
123
  raise StoreError("SQL store does not support upload.")
103
124
 
@@ -107,17 +128,12 @@ class SqlStore(Store):
107
128
  paths: list[tuple[str, str]],
108
129
  ) -> list[dict]:
109
130
  """
110
- Get file information from SQL based storage.
111
-
112
- Parameters
113
- ----------
114
- paths : list[str]
115
- List of source paths.
131
+ Get file metadata information from SQL storage.
116
132
 
117
133
  Returns
118
134
  -------
119
135
  list[dict]
120
- Returns files metadata.
136
+ Empty list.
121
137
  """
122
138
  return []
123
139
 
@@ -133,23 +149,33 @@ class SqlStore(Store):
133
149
  **kwargs,
134
150
  ) -> Any:
135
151
  """
136
- Read DataFrame from path.
152
+ Read a DataFrame from a SQL table.
153
+
154
+ Connects to the SQL database and reads data from the specified
155
+ table into a DataFrame using the specified engine (pandas, polars, etc.).
137
156
 
138
157
  Parameters
139
158
  ----------
140
159
  path : SourcesOrListOfSources
141
- Path(s) to read DataFrame from.
142
- file_format : str
143
- Extension of the file.
144
- engine : str
145
- Dataframe engine (pandas, polars, etc.).
160
+ The SQL URI path to read from in the format
161
+ 'sql://database/schema/table'. Only single paths are supported.
162
+ file_format : str, optional
163
+ File format specification (not used for SQL operations).
164
+ engine : str, optional
165
+ DataFrame engine to use (e.g., 'pandas', 'polars').
166
+ If None, uses the default engine.
146
167
  **kwargs : dict
147
- Keyword arguments.
168
+ Additional keyword arguments passed to the reader.
148
169
 
149
170
  Returns
150
171
  -------
151
172
  Any
152
- DataFrame.
173
+ DataFrame object containing the table data.
174
+
175
+ Raises
176
+ ------
177
+ StoreError
178
+ If a list of paths is provided (only single path supported).
153
179
  """
154
180
  if isinstance(path, list):
155
181
  raise StoreError("SQL store can only read a single DataFrame at a time.")
@@ -171,21 +197,26 @@ class SqlStore(Store):
171
197
  engine: str | None = None,
172
198
  ) -> Any:
173
199
  """
174
- Query data from database.
200
+ Execute a custom SQL query and return results as a DataFrame.
201
+
202
+ Runs a SQL query against the database specified in the path
203
+ and returns the results using the specified DataFrame engine.
175
204
 
176
205
  Parameters
177
206
  ----------
178
207
  query : str
179
- The query to execute.
208
+ The SQL query string to execute against the database.
180
209
  path : str
181
- Path to the database.
182
- engine : str
183
- Dataframe engine (pandas, polars, etc.).
210
+ The SQL URI path specifying the database connection
211
+ in the format 'sql://database/schema/table'.
212
+ engine : str, optional
213
+ DataFrame engine to use for result processing
214
+ (e.g., 'pandas', 'polars'). If None, uses the default.
184
215
 
185
216
  Returns
186
217
  -------
187
218
  Any
188
- DataFrame.
219
+ DataFrame object containing the query results.
189
220
  """
190
221
  reader = self._get_reader(engine)
191
222
  schema = self._get_schema(path)
@@ -194,47 +225,74 @@ class SqlStore(Store):
194
225
 
195
226
  def write_df(self, df: Any, dst: str, extension: str | None = None, **kwargs) -> str:
196
227
  """
197
- Write a dataframe to a database. Kwargs are passed to df.to_sql().
228
+ Write a DataFrame to a SQL database table.
229
+
230
+ Takes a DataFrame and writes it to the specified SQL table.
231
+ The destination should be in SQL URI format. Additional
232
+ parameters are passed to the underlying to_sql() method.
198
233
 
199
234
  Parameters
200
235
  ----------
201
236
  df : Any
202
- The dataframe to write.
237
+ The DataFrame object to write to the database.
203
238
  dst : str
204
- The destination of the dataframe.
239
+ The destination SQL URI in the format
240
+ 'sql://database/schema/table' or 'sql://database/table'.
241
+ extension : str, optional
242
+ File extension parameter (not used for SQL operations).
205
243
  **kwargs : dict
206
- Keyword arguments.
244
+ Additional keyword arguments passed to the DataFrame's
245
+ to_sql() method for controlling write behavior.
207
246
 
208
247
  Returns
209
248
  -------
210
249
  str
211
- Path of written dataframe.
250
+ The SQL URI path where the DataFrame was written.
212
251
  """
213
252
  schema = self._get_schema(dst)
214
253
  table = self._get_table_name(dst)
215
254
  return self._upload_table(df, schema, table, **kwargs)
216
255
 
256
+ ##############################
257
+ # Wrapper methods
258
+ ##############################
259
+
260
+ def get_engine(self, schema: str | None = None) -> Engine:
261
+ """
262
+ Get a SQLAlchemy engine connected to the database.
263
+
264
+ Returns
265
+ -------
266
+ Engine
267
+ A SQLAlchemy engine instance connected to the database.
268
+ """
269
+ return self._check_factory(schema=schema)
270
+
217
271
  ##############################
218
272
  # Private I/O methods
219
273
  ##############################
220
274
 
221
275
  def _download_table(self, schema: str, table: str, dst: str) -> str:
222
276
  """
223
- Download a table from SQL based storage.
277
+ Download a specific table from SQL database to Parquet file.
278
+
279
+ Internal method that handles the actual table download process.
280
+ Connects to the database, retrieves all data from the specified
281
+ table, and writes it to a Parquet file using PyArrow.
224
282
 
225
283
  Parameters
226
284
  ----------
227
285
  schema : str
228
- The origin schema.
286
+ The database schema name containing the table.
229
287
  table : str
230
- The origin table.
288
+ The name of the table to download.
231
289
  dst : str
232
- The destination path.
290
+ The local file path where the Parquet file will be saved.
233
291
 
234
292
  Returns
235
293
  -------
236
294
  str
237
- The destination path.
295
+ The destination file path of the created Parquet file.
238
296
  """
239
297
  engine = self._check_factory(schema=schema)
240
298
 
@@ -258,23 +316,29 @@ class SqlStore(Store):
258
316
 
259
317
  def _upload_table(self, df: Any, schema: str, table: str, **kwargs) -> str:
260
318
  """
261
- Upload a table to SQL based storage.
319
+ Upload a DataFrame to a SQL table.
320
+
321
+ Internal method that handles writing a DataFrame to a SQL database
322
+ table. Uses the appropriate reader based on the DataFrame type
323
+ and manages the database connection.
262
324
 
263
325
  Parameters
264
326
  ----------
265
- df : DataFrame
266
- The dataframe.
327
+ df : Any
328
+ The DataFrame object to upload to the database.
267
329
  schema : str
268
- Destination schema.
330
+ The target database schema name.
269
331
  table : str
270
- Destination table.
332
+ The target table name within the schema.
271
333
  **kwargs : dict
272
- Keyword arguments.
334
+ Additional keyword arguments passed to the write operation,
335
+ such as if_exists, index, method, etc.
273
336
 
274
337
  Returns
275
338
  -------
276
339
  str
277
- The SQL URI where the dataframe was saved.
340
+ The SQL URI where the DataFrame was saved in the format
341
+ 'sql://database/schema/table'.
278
342
  """
279
343
  reader = get_reader_by_object(df)
280
344
  engine = self._check_factory()
@@ -288,32 +352,43 @@ class SqlStore(Store):
288
352
 
289
353
  def _get_connection_string(self) -> str:
290
354
  """
291
- Get the connection string.
355
+ Retrieve the database connection string from the configurator.
356
+
357
+ Gets the PostgreSQL connection string using the configured
358
+ database credentials (username, password, host, port, database).
292
359
 
293
360
  Returns
294
361
  -------
295
362
  str
296
- The connection string.
363
+ The PostgreSQL connection string in the format
364
+ 'postgresql://username:password@host:port/database'.
297
365
  """
298
366
  return self._configurator.get_sql_conn_string()
299
367
 
300
- def _get_engine(self, origin: str, schema: str | None = None) -> Engine:
368
+ def _get_engine(self, schema: str | None = None) -> Engine:
301
369
  """
302
- Create engine from connection string.
370
+ Create a SQLAlchemy engine from the connection string.
371
+
372
+ Establishes a database engine using the configured connection
373
+ string with appropriate connection parameters and schema settings.
303
374
 
304
375
  Parameters
305
376
  ----------
306
- origin : str
307
- The origin of the credentials.
308
- schema : str
309
- The schema.
377
+ schema : str, optional
378
+ The database schema to set in the search path.
379
+ If provided, sets the PostgreSQL search_path option.
310
380
 
311
381
  Returns
312
382
  -------
313
383
  Engine
314
- An SQLAlchemy engine.
384
+ A configured SQLAlchemy engine instance.
385
+
386
+ Raises
387
+ ------
388
+ StoreError
389
+ If the connection string is invalid or engine creation fails.
315
390
  """
316
- connection_string = self._get_connection_string(origin)
391
+ connection_string = self._get_connection_string()
317
392
  if not isinstance(connection_string, str):
318
393
  raise StoreError("Connection string must be a string.")
319
394
  try:
@@ -326,19 +401,29 @@ class SqlStore(Store):
326
401
 
327
402
  def _check_factory(self, retry: bool = True, schema: str | None = None) -> Engine:
328
403
  """
329
- Check if the database is accessible and return the engine.
404
+ Validate database accessibility and return a working engine.
405
+
406
+ Creates and tests a database engine, with retry capability if
407
+ the initial connection fails. Handles configuration changes
408
+ and ensures the database is accessible before returning.
330
409
 
331
410
  Parameters
332
411
  ----------
333
- retry : bool
334
- Whether to retry if the database is not accessible.
335
- schema : str
336
- The schema.
412
+ retry : bool, default True
413
+ Whether to attempt a retry with different configuration
414
+ if the initial connection fails.
415
+ schema : str, optional
416
+ The database schema to configure in the engine.
337
417
 
338
418
  Returns
339
419
  -------
340
420
  Engine
341
- The database engine.
421
+ A validated SQLAlchemy engine with confirmed database access.
422
+
423
+ Raises
424
+ ------
425
+ ConfigError
426
+ If database access fails and retry is exhausted or disabled.
342
427
  """
343
428
  try:
344
429
  engine = self._get_engine(schema)
@@ -353,17 +438,29 @@ class SqlStore(Store):
353
438
  @staticmethod
354
439
  def _parse_path(path: str) -> dict:
355
440
  """
356
- Parse the path and return the components.
441
+ Parse a SQL URI path into its component parts.
442
+
443
+ Breaks down a SQL URI into database, schema, and table components.
444
+ Supports both full three-part paths and simplified two-part paths
445
+ (using 'public' as default schema).
357
446
 
358
447
  Parameters
359
448
  ----------
360
449
  path : str
361
- The path.
450
+ The SQL URI path to parse in the format
451
+ 'sql://database/schema/table' or 'sql://database/table'.
362
452
 
363
453
  Returns
364
454
  -------
365
455
  dict
366
- A dictionary containing the components of the path.
456
+ Dictionary containing parsed components with keys:
457
+ 'database', 'schema', and 'table'.
458
+
459
+ Raises
460
+ ------
461
+ ValueError
462
+ If the path format is invalid or doesn't follow the
463
+ expected SQL URI structure.
367
464
  """
368
465
  # Parse path
369
466
  err_msg = "Invalid SQL path. Must be sql://<database>/<schema>/<table> or sql://<database>/<table>"
@@ -380,45 +477,54 @@ class SqlStore(Store):
380
477
 
381
478
  def _get_schema(self, uri: str) -> str:
382
479
  """
383
- Get the name of the SQL schema from the URI.
480
+ Extract the schema name from a SQL URI.
481
+
482
+ Parses the SQL URI and returns the schema component.
483
+ Uses 'public' as the default schema if not specified in the URI.
384
484
 
385
485
  Parameters
386
486
  ----------
387
487
  uri : str
388
- The URI.
488
+ The SQL URI to extract the schema from.
389
489
 
390
490
  Returns
391
491
  -------
392
492
  str
393
- The name of the SQL schema.
493
+ The schema name extracted from the URI.
394
494
  """
395
495
  return str(self._parse_path(uri).get("schema"))
396
496
 
397
497
  def _get_table_name(self, uri: str) -> str:
398
498
  """
399
- Get the name of the table from the URI.
499
+ Extract the table name from a SQL URI.
500
+
501
+ Parses the SQL URI and returns the table component,
502
+ which is always the last part of the URI path.
400
503
 
401
504
  Parameters
402
505
  ----------
403
506
  uri : str
404
- The URI.
507
+ The SQL URI to extract the table name from.
405
508
 
406
509
  Returns
407
510
  -------
408
511
  str
409
- The name of the table
512
+ The table name extracted from the URI.
410
513
  """
411
514
  return str(self._parse_path(uri).get("table"))
412
515
 
413
516
  @staticmethod
414
517
  def _check_access_to_storage(engine: Engine) -> None:
415
518
  """
416
- Check if there is access to the storage.
519
+ Verify database connectivity using the provided engine.
520
+
521
+ Tests the database connection by attempting to connect.
522
+ Properly disposes of the engine if connection fails.
417
523
 
418
524
  Parameters
419
525
  ----------
420
526
  engine : Engine
421
- An SQLAlchemy engine.
527
+ The SQLAlchemy engine to test for connectivity.
422
528
 
423
529
  Returns
424
530
  -------
@@ -426,8 +532,8 @@ class SqlStore(Store):
426
532
 
427
533
  Raises
428
534
  ------
429
- StoreError
430
- If there is no access to the storage.
535
+ ConfigError
536
+ If database connection cannot be established.
431
537
  """
432
538
  try:
433
539
  engine.connect()
@@ -141,23 +141,6 @@ def get_last_modified(data_path: str) -> str:
141
141
  return datetime.fromtimestamp(timestamp).astimezone().isoformat()
142
142
 
143
143
 
144
- def get_s3_path(src_path: str) -> str:
145
- """
146
- Get the S3 URI of a file path.
147
-
148
- Parameters
149
- ----------
150
- src_path : str
151
- Path to the file.
152
-
153
- Returns
154
- -------
155
- str
156
- The S3 URI of the file.
157
- """
158
- return Path(src_path).as_uri()
159
-
160
-
161
144
  def get_file_info_from_local(path: str, src_path: str) -> None | dict:
162
145
  """
163
146
  Get file info from a local path.
@@ -262,8 +262,7 @@ def carriage_return_warn(string: str) -> None:
262
262
  None
263
263
  """
264
264
  if "\r\n" in string:
265
- warn("String contains a carriage return. "
266
- "It may not be parsed correctly from remote runtimes.")
265
+ warn("String contains a carriage return. It may not be parsed correctly from remote runtimes.")
267
266
 
268
267
 
269
268
  def read_source(path: str) -> str:
@@ -0,0 +1,44 @@
1
+ # SPDX-FileCopyrightText: © 2025 DSLab - Fondazione Bruno Kessler
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+ from __future__ import annotations
6
+
7
+ import typing
8
+
9
+ from digitalhub.stores.data.api import get_store
10
+
11
+ if typing.TYPE_CHECKING:
12
+ from digitalhub.stores.data.s3.store import S3Client, S3Store
13
+ from digitalhub.stores.data.sql.store import Engine, SqlStore
14
+
15
+
16
+ def get_s3_client() -> S3Client:
17
+ """
18
+ Returns a boto3 S3 client.
19
+
20
+ Returns
21
+ -------
22
+ S3Client
23
+ A boto3 S3 client instance.
24
+ """
25
+ store: S3Store = get_store("s3://")
26
+ return store.get_s3_client()
27
+
28
+
29
+ def get_sql_engine(schema: str | None = None) -> Engine:
30
+ """
31
+ Returns a SQLAlchemy engine connected to the database.
32
+
33
+ Parameters
34
+ ----------
35
+ schema : str, optional
36
+ The schema to connect to, by default None.
37
+
38
+ Returns
39
+ -------
40
+ Engine
41
+ A SQLAlchemy engine instance connected to the database.
42
+ """
43
+ store: SqlStore = get_store("sql://")
44
+ return store.get_engine(schema=schema)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: digitalhub
3
- Version: 0.13.0b3
3
+ Version: 0.14.0b0
4
4
  Summary: Python SDK for Digitalhub
5
5
  Project-URL: Homepage, https://github.com/scc-digitalhub/digitalhub-sdk
6
6
  Author-email: Fondazione Bruno Kessler <digitalhub@fbk.eu>, Matteo Martini <mmartini@fbk.eu>
@@ -252,7 +252,7 @@ Explore the full documentation at the [link](https://scc-digitalhub.github.io/sd
252
252
  To install the Digitalhub, you can use pip:
253
253
 
254
254
  ```bash
255
- pip install digitalhub
255
+ pip install digitalhub[full]
256
256
  ```
257
257
 
258
258
  To be able to create and execute functions or workflows, you need to install the runtime you want to use. The Digitalhub SDK supports multiple runtimes, each with its own installation instructions:
@@ -261,6 +261,7 @@ To be able to create and execute functions or workflows, you need to install the
261
261
  - [Digitalhub SDK Runtime Dbt](https://github.com/scc-digitalhub/digitalhub-sdk-runtime-dbt)
262
262
  - [Digitalhub SDK Runtime Container](https://github.com/scc-digitalhub/digitalhub-sdk-runtime-container)
263
263
  - [Digitalhub SDK Runtime Kfp](https://github.com/scc-digitalhub/digitalhub-sdk-runtime-kfp)
264
+ - [Digitalhub SDK Runtime Hera](https://github.com/scc-digitalhub/digitalhub-sdk-runtime-hera)
264
265
  - [Digitalhub SDK Runtime Modelserve](https://github.com/scc-digitalhub/digitalhub-sdk-runtime-modelserve)
265
266
 
266
267
  ## Development