truefoundry 0.10.7rc1__py3-none-any.whl → 0.11.1rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of truefoundry might be problematic. Click here for more details.

@@ -47,7 +47,12 @@ RUN apt update && \
47
47
  + _POST_USER_TEMPLATE
48
48
  )
49
49
 
50
- ADDITIONAL_PIP_PACKAGES = ["papermill>=2.6.0,<2.7.0", "ipykernel>=6.0.0,<7.0.0"]
50
+ ADDITIONAL_PIP_PACKAGES = [
51
+ "papermill>=2.6.0,<2.7.0",
52
+ "ipykernel>=6.0.0,<7.0.0",
53
+ "nbconvert>=7.16.6,<7.17.0",
54
+ "boto3>=1.38.43,<1.40.0",
55
+ ]
51
56
 
52
57
 
53
58
  def generate_pip_install_command(
@@ -4,9 +4,77 @@
4
4
 
5
5
 
6
6
  import argparse
7
+ import os
7
8
  import sys
8
9
 
10
+ import boto3
11
+ import nbformat
9
12
  import papermill as pm
13
+ from botocore.client import Config
14
+ from nbconvert import HTMLExporter
15
+
16
+
17
+ def convert_notebook_to_html(notebook_path, output_html_path):
18
+ """
19
+ Convert a Jupyter notebook to an HTML file.
20
+
21
+ Args:
22
+ notebook_path: Path to the input notebook (.ipynb)
23
+ output_html_path: Path for the output HTML file (.html)
24
+ """
25
+ print(f"Converting notebook {notebook_path} to HTML...")
26
+ try:
27
+ with open(notebook_path, "r", encoding="utf-8") as f:
28
+ notebook_content = nbformat.read(f, as_version=4)
29
+
30
+ html_exporter = HTMLExporter()
31
+ # Use lab for https://nbconvert.readthedocs.io/en/latest/customizing.html#where-are-nbconvert-templates-installed
32
+ html_exporter.template_name = "lab"
33
+ (body, _) = html_exporter.from_notebook_node(notebook_content)
34
+
35
+ with open(output_html_path, "w", encoding="utf-8") as f:
36
+ f.write(body)
37
+ print(f"Successfully converted notebook to HTML: {output_html_path}")
38
+ except Exception as e:
39
+ print(f"Error converting notebook to HTML: {e}")
40
+ raise
41
+
42
+
43
+ def upload_file_to_s3(file_path, bucket_name, s3_key):
44
+ print(f"Uploading {file_path} to s3://{bucket_name}/{s3_key}...")
45
+ # Use s3proxy for pushing data to s3
46
+ # The JWT token is already available in the pod
47
+ aws_access_key_id = os.environ.get("SPARK_APPLICATION_EVENT_LOG_JWT_TOKEN")
48
+ aws_secret_access_key = "__token__"
49
+ s3_endpoint_url = os.environ.get("S3_PROXY_URL")
50
+
51
+ if not aws_access_key_id:
52
+ raise ValueError(
53
+ "SPARK_APPLICATION_EVENT_LOG_JWT_TOKEN environment variable is not set"
54
+ )
55
+ if not s3_endpoint_url:
56
+ raise ValueError("S3_PROXY_URL environment variable is not set")
57
+
58
+ # Needed for the issue https://github.com/gaul/s3proxy/issues/765
59
+ s3_config = Config(
60
+ request_checksum_calculation="when_required",
61
+ response_checksum_validation="when_required",
62
+ )
63
+ try:
64
+ client = boto3.client(
65
+ "s3",
66
+ aws_access_key_id=aws_access_key_id,
67
+ aws_secret_access_key=aws_secret_access_key,
68
+ endpoint_url=s3_endpoint_url,
69
+ config=s3_config,
70
+ )
71
+
72
+ with open(file_path, "rb") as data:
73
+ client.put_object(Bucket=bucket_name, Key=s3_key, Body=data)
74
+ print(f"Successfully uploaded {file_path} to s3://{bucket_name}/{s3_key}")
75
+ except Exception as e:
76
+ print(f"Error uploading file to S3: {e}")
77
+ raise
10
78
 
11
79
 
12
80
  def execute_notebook(notebook_path, output_path="/tmp/output.ipynb", parameters=None):
@@ -26,8 +94,8 @@ def execute_notebook(notebook_path, output_path="/tmp/output.ipynb", parameters=
26
94
 
27
95
  print(f"Starting execution of notebook: {notebook_path}")
28
96
  pm.execute_notebook(
29
- notebook_path,
30
- output_path,
97
+ input_path=notebook_path,
98
+ output_path=output_path,
31
99
  parameters=parameters,
32
100
  # TODO(gw): Replace with kernel name for venv
33
101
  kernel_name="python3",
@@ -38,6 +106,7 @@ def execute_notebook(notebook_path, output_path="/tmp/output.ipynb", parameters=
38
106
  stderr_file=sys.stderr,
39
107
  )
40
108
  print(f"Successfully executed notebook: {notebook_path}")
109
+ return output_path
41
110
 
42
111
 
43
112
  if __name__ == "__main__":
@@ -45,17 +114,41 @@ if __name__ == "__main__":
45
114
  description="Execute a Jupyter notebook using papermill for Spark applications"
46
115
  )
47
116
  parser.add_argument("notebook_path", help="Path to the notebook file to execute")
48
-
49
117
  args = parser.parse_args()
50
118
 
51
- # TODO(gw): Add support for passing parameters to the notebook
119
+ output_notebook_path = "/tmp/output.ipynb"
120
+
121
+ # This would be the same as the default bucket used by servicefoundry-server
122
+ s3_bucket = os.environ.get("TFY_NOTEBOOK_OUTPUT_S3_BUCKET")
123
+ # This would be something like sparkjob-events/<tenant-id>
124
+ s3_key_prefix = os.environ.get("TFY_NOTEBOOK_OUTPUT_S3_KEY_PREFIX")
125
+
52
126
  try:
53
- execute_notebook(args.notebook_path)
127
+ executed_notebook_path = execute_notebook(
128
+ args.notebook_path, output_path=output_notebook_path
129
+ )
130
+
131
+ # The following may also be modeled as an entrypoint
132
+ # https://papermill.readthedocs.io/en/latest/extending-entry-points.html
133
+ # Will take that up with next iteration where we save the executed notebook periodically
134
+ if s3_bucket and s3_key_prefix:
135
+ print("Converting notebook to HTML and uploading to S3...")
136
+ html_output_path = "/tmp/output.html"
137
+ convert_notebook_to_html(
138
+ notebook_path=executed_notebook_path, output_html_path=html_output_path
139
+ )
140
+
141
+ # Construct S3 key: use the original notebook name for the HTML file
142
+ notebook_name = os.path.basename(args.notebook_path)
143
+ s3_html_key = f"{s3_key_prefix}/output.html"
144
+ upload_file_to_s3(
145
+ file_path=html_output_path, bucket_name=s3_bucket, s3_key=s3_html_key
146
+ )
147
+ print(f"Successfully uploaded HTML to s3://{s3_bucket}/{s3_html_key}")
148
+
54
149
  except Exception as e:
55
150
  print(f"Error executing notebook {args.notebook_path}: {e}")
56
151
  print(
57
152
  "Exiting with status code 1 to signal failure to parent process/orchestrator"
58
153
  )
59
154
  sys.exit(1)
60
-
61
- # TODO(gw): Publish the output notebook to blob storage from where it could be rendered
@@ -30,7 +30,14 @@ from rich.progress import (
30
30
  TransferSpeedColumn,
31
31
  )
32
32
  from tqdm.utils import CallbackIOWrapper
33
+ from truefoundry_sdk import (
34
+ MultiPartUploadResponse,
35
+ MultiPartUploadStorageProvider,
36
+ Operation,
37
+ SignedUrl,
38
+ )
33
39
 
40
+ from truefoundry import client
34
41
  from truefoundry.common.constants import ENV_VARS
35
42
  from truefoundry.common.request_utils import (
36
43
  augmented_raise_for_status,
@@ -45,23 +52,13 @@ from truefoundry.common.storage_provider_utils import (
45
52
  )
46
53
  from truefoundry.ml._autogen.client import ( # type: ignore[attr-defined]
47
54
  ApiClient,
48
- CreateMultiPartUploadForDatasetRequestDto,
49
- CreateMultiPartUploadRequestDto,
50
55
  FileInfoDto,
51
- GetSignedURLForDatasetWriteRequestDto,
52
- GetSignedURLsForArtifactVersionReadRequestDto,
53
- GetSignedURLsForArtifactVersionWriteRequestDto,
54
- GetSignedURLsForDatasetReadRequestDto,
55
56
  ListFilesForArtifactVersionRequestDto,
56
57
  ListFilesForArtifactVersionsResponseDto,
57
58
  ListFilesForDatasetRequestDto,
58
59
  ListFilesForDatasetResponseDto,
59
60
  MlfoundryArtifactsApi,
60
- MultiPartUploadDto,
61
- MultiPartUploadResponseDto,
62
- MultiPartUploadStorageProvider,
63
61
  RunArtifactsApi,
64
- SignedURLDto,
65
62
  )
66
63
  from truefoundry.ml.exceptions import MlFoundryException
67
64
  from truefoundry.ml.logger import logger
@@ -127,7 +124,7 @@ def verify_artifact_path(artifact_path):
127
124
 
128
125
 
129
126
  def _signed_url_upload_file(
130
- signed_url: SignedURLDto,
127
+ signed_url: SignedUrl,
131
128
  local_file: str,
132
129
  progress_bar: Progress,
133
130
  abort_event: Optional[Event] = None,
@@ -206,21 +203,20 @@ def _any_future_has_failed(futures) -> bool:
206
203
 
207
204
  class ArtifactIdentifier(BaseModel):
208
205
  artifact_version_id: Optional[uuid.UUID] = None
206
+ dataset_id: Optional[str] = None
209
207
  dataset_fqn: Optional[str] = None
210
208
 
211
209
  @root_validator
212
210
  def _check_identifier_type(cls, values: Dict[str, Any]):
213
211
  if not values.get("artifact_version_id", False) and not values.get(
214
- "dataset_fqn", False
212
+ "dataset_id", False
215
213
  ):
216
214
  raise MlFoundryException(
217
- "One of the version_id or dataset_fqn should be passed"
215
+ "One of the version_id or dataset_id should be passed"
218
216
  )
219
- if values.get("artifact_version_id", False) and values.get(
220
- "dataset_fqn", False
221
- ):
217
+ if values.get("artifact_version_id", False) and values.get("dataset_id", False):
222
218
  raise MlFoundryException(
223
- "Exactly one of version_id or dataset_fqn should be passed"
219
+ "Exactly one of version_id or dataset_id should be passed"
224
220
  )
225
221
  return values
226
222
 
@@ -243,24 +239,25 @@ class MlFoundryArtifactsRepository:
243
239
  self,
244
240
  artifact_identifier: ArtifactIdentifier,
245
241
  paths,
246
- ) -> List[SignedURLDto]:
242
+ ) -> List[SignedUrl]:
247
243
  if artifact_identifier.artifact_version_id:
248
- signed_urls_response = self._mlfoundry_artifacts_api.get_signed_urls_for_read_post(
249
- get_signed_urls_for_artifact_version_read_request_dto=GetSignedURLsForArtifactVersionReadRequestDto(
250
- id=str(artifact_identifier.artifact_version_id), paths=paths
251
- )
244
+ signed_urls_response = client.artifact_versions.get_signed_urls(
245
+ id=str(artifact_identifier.artifact_version_id),
246
+ paths=paths,
247
+ operation=Operation.READ,
252
248
  )
253
- signed_urls = signed_urls_response.signed_urls
254
- elif artifact_identifier.dataset_fqn:
255
- signed_urls_dataset_response = self._mlfoundry_artifacts_api.get_signed_urls_dataset_read_post(
256
- get_signed_urls_for_dataset_read_request_dto=GetSignedURLsForDatasetReadRequestDto(
257
- dataset_fqn=artifact_identifier.dataset_fqn, paths=paths
258
- )
249
+ signed_urls = signed_urls_response.data
250
+ elif artifact_identifier.dataset_id:
251
+ signed_urls_dataset_response = client.data_directories.get_signed_urls(
252
+ id=str(artifact_identifier.dataset_id),
253
+ paths=paths,
254
+ operation=Operation.READ,
259
255
  )
260
- signed_urls = signed_urls_dataset_response.signed_urls
256
+ signed_urls = signed_urls_dataset_response.data
257
+
261
258
  else:
262
259
  raise ValueError(
263
- "Invalid artifact type - both `artifact_version_id` and `dataset_fqn` both are None"
260
+ "Invalid artifact type - both `artifact_version_id` and `dataset_id` both are None"
264
261
  )
265
262
  return signed_urls
266
263
 
@@ -268,24 +265,24 @@ class MlFoundryArtifactsRepository:
268
265
  self,
269
266
  artifact_identifier: ArtifactIdentifier,
270
267
  paths: List[str],
271
- ) -> List[SignedURLDto]:
268
+ ) -> List[SignedUrl]:
272
269
  if artifact_identifier.artifact_version_id:
273
- signed_urls_response = self._mlfoundry_artifacts_api.get_signed_urls_for_write_post(
274
- get_signed_urls_for_artifact_version_write_request_dto=GetSignedURLsForArtifactVersionWriteRequestDto(
275
- id=str(artifact_identifier.artifact_version_id), paths=paths
276
- )
270
+ signed_urls_response = client.artifact_versions.get_signed_urls(
271
+ id=str(artifact_identifier.artifact_version_id),
272
+ paths=paths,
273
+ operation=Operation.WRITE,
277
274
  )
278
- signed_urls = signed_urls_response.signed_urls
279
- elif artifact_identifier.dataset_fqn:
280
- signed_urls_dataset_response = self._mlfoundry_artifacts_api.get_signed_urls_for_dataset_write_post(
281
- get_signed_url_for_dataset_write_request_dto=GetSignedURLForDatasetWriteRequestDto(
282
- dataset_fqn=artifact_identifier.dataset_fqn, paths=paths
283
- )
275
+ signed_urls = signed_urls_response.data
276
+ elif artifact_identifier.dataset_id:
277
+ signed_urls_dataset_response = client.data_directories.get_signed_urls(
278
+ id=str(artifact_identifier.dataset_id),
279
+ paths=paths,
280
+ operation=Operation.WRITE,
284
281
  )
285
- signed_urls = signed_urls_dataset_response.signed_urls
282
+ signed_urls = signed_urls_dataset_response.data
286
283
  else:
287
284
  raise ValueError(
288
- "Invalid artifact type - both `artifact_version_id` and `dataset_fqn` both are None"
285
+ "Invalid artifact type - both `artifact_version_id` and `dataset_id` both are None"
289
286
  )
290
287
  return signed_urls
291
288
 
@@ -293,7 +290,7 @@ class MlFoundryArtifactsRepository:
293
290
  self,
294
291
  local_file: str,
295
292
  artifact_path: str,
296
- signed_url: Optional[SignedURLDto],
293
+ signed_url: Optional[SignedUrl],
297
294
  progress_bar: Progress,
298
295
  abort_event: Optional[Event] = None,
299
296
  ):
@@ -322,28 +319,30 @@ class MlFoundryArtifactsRepository:
322
319
  artifact_identifier: ArtifactIdentifier,
323
320
  path,
324
321
  num_parts,
325
- ) -> MultiPartUploadDto:
322
+ ) -> MultiPartUpload:
326
323
  if artifact_identifier.artifact_version_id:
327
- create_multipart_response: MultiPartUploadResponseDto = self._mlfoundry_artifacts_api.create_multi_part_upload_post(
328
- create_multi_part_upload_request_dto=CreateMultiPartUploadRequestDto(
329
- artifact_version_id=str(artifact_identifier.artifact_version_id),
324
+ create_multipart_response: MultiPartUploadResponse = (
325
+ client.artifact_versions.create_multi_part_upload(
326
+ id=str(artifact_identifier.artifact_version_id),
330
327
  path=path,
331
328
  num_parts=num_parts,
332
329
  )
333
330
  )
334
- multipart_upload = create_multipart_response.multipart_upload
335
- elif artifact_identifier.dataset_fqn:
336
- create_multipart_for_dataset_response = self._mlfoundry_artifacts_api.create_multipart_upload_for_dataset_post(
337
- create_multi_part_upload_for_dataset_request_dto=CreateMultiPartUploadForDatasetRequestDto(
338
- dataset_fqn=artifact_identifier.dataset_fqn,
331
+ multipart_upload = create_multipart_response.data
332
+
333
+ elif artifact_identifier.dataset_id:
334
+ create_multipart_for_dataset_response: MultiPartUploadResponse = (
335
+ client.data_directories.create_multipart_upload(
336
+ id=str(artifact_identifier.dataset_id),
339
337
  path=path,
340
338
  num_parts=num_parts,
341
339
  )
342
340
  )
343
- multipart_upload = create_multipart_for_dataset_response.multipart_upload
341
+ multipart_upload = create_multipart_for_dataset_response.data
342
+
344
343
  else:
345
344
  raise ValueError(
346
- "Invalid artifact type - both `artifact_version_id` and `dataset_fqn` both are None"
345
+ "Invalid artifact type - both `artifact_version_id` and `dataset_id` both are None"
347
346
  )
348
347
  return multipart_upload
349
348
 
@@ -370,10 +369,10 @@ class MlFoundryArtifactsRepository:
370
369
  )
371
370
  if (
372
371
  multipart_upload.storage_provider
373
- is MultiPartUploadStorageProvider.S3_COMPATIBLE
372
+ is MultiPartUploadStorageProvider.S3COMPATIBLE
374
373
  ):
375
374
  s3_compatible_multipart_upload(
376
- multipart_upload=MultiPartUpload.parse_obj(multipart_upload.to_dict()),
375
+ multipart_upload=MultiPartUpload.parse_obj(multipart_upload.dict()),
377
376
  local_file=local_file,
378
377
  executor=executor,
379
378
  multipart_info=multipart_info,
@@ -403,7 +402,7 @@ class MlFoundryArtifactsRepository:
403
402
  artifact_path: str,
404
403
  multipart_info: _FileMultiPartInfo,
405
404
  progress_bar: Progress,
406
- signed_url: Optional[SignedURLDto] = None,
405
+ signed_url: Optional[SignedUrl] = None,
407
406
  abort_event: Optional[Event] = None,
408
407
  executor_for_multipart_upload: Optional[ThreadPoolExecutor] = None,
409
408
  ):
@@ -678,7 +677,7 @@ class MlFoundryArtifactsRepository:
678
677
  remote_file_path: str,
679
678
  local_path: str,
680
679
  progress_bar: Optional[Progress],
681
- signed_url: Optional[SignedURLDto],
680
+ signed_url: Optional[SignedUrl],
682
681
  abort_event: Optional[Event] = None,
683
682
  ):
684
683
  if not remote_file_path:
@@ -719,7 +718,7 @@ class MlFoundryArtifactsRepository:
719
718
  self,
720
719
  src_artifact_path: str,
721
720
  dst_local_dir_path: str,
722
- signed_url: Optional[SignedURLDto],
721
+ signed_url: Optional[SignedUrl],
723
722
  progress_bar: Optional[Progress] = None,
724
723
  abort_event=None,
725
724
  ) -> str:
@@ -8,6 +8,7 @@ import warnings
8
8
  from pathlib import Path
9
9
  from typing import TYPE_CHECKING, Any, Dict, NamedTuple, Optional, Union
10
10
 
11
+ from truefoundry import client
11
12
  from truefoundry.common.warnings import TrueFoundryDeprecationWarning
12
13
  from truefoundry.ml._autogen.client import ( # type: ignore[attr-defined]
13
14
  ArtifactDto,
@@ -16,7 +17,6 @@ from truefoundry.ml._autogen.client import ( # type: ignore[attr-defined]
16
17
  ArtifactVersionDto,
17
18
  CreateArtifactVersionRequestDto,
18
19
  DeleteArtifactVersionsRequestDto,
19
- ExperimentsApi,
20
20
  ExternalBlobStorageSource,
21
21
  FinalizeArtifactVersionRequestDto,
22
22
  MlfoundryArtifactsApi,
@@ -444,19 +444,15 @@ def _log_artifact_version_helper(
444
444
 
445
445
  if run:
446
446
  mlfoundry_artifacts_api = run._mlfoundry_artifacts_api
447
- experiments_api = run._experiments_api
448
- ml_repo = experiments_api.get_experiment_get(
449
- experiment_id=str(run._experiment_id)
450
- ).experiment.name
447
+ repos = client.ml_repos.get(id=run._experiment_id)
448
+ ml_repo = repos.data.manifest.name
451
449
  ml_repo_id = run._experiment_id
452
450
  else:
453
451
  assert ml_repo is not None
454
452
  api_client = _get_api_client()
455
453
  mlfoundry_artifacts_api = MlfoundryArtifactsApi(api_client=api_client)
456
- experiments_api = ExperimentsApi(api_client=api_client)
457
- ml_repo_id = experiments_api.get_experiment_by_name_get(
458
- experiment_name=ml_repo
459
- ).experiment.experiment_id
454
+ result = list(client.ml_repos.list(name=ml_repo, limit=1))[0]
455
+ ml_repo_id = result.id
460
456
 
461
457
  metadata = metadata or {}
462
458
  if ARTIFACT_METADATA_TRUEFOUNDRY_KEY not in metadata:
@@ -84,7 +84,9 @@ class DataDirectory:
84
84
 
85
85
  def _get_artifacts_repo(self):
86
86
  return MlFoundryArtifactsRepository(
87
- artifact_identifier=ArtifactIdentifier(dataset_fqn=self._dataset.fqn),
87
+ artifact_identifier=ArtifactIdentifier(
88
+ dataset_id=self._dataset.id, dataset_fqn=self._dataset.fqn
89
+ ),
88
90
  api_client=self._api_client,
89
91
  )
90
92
 
@@ -9,12 +9,12 @@ import warnings
9
9
  from pathlib import Path
10
10
  from typing import TYPE_CHECKING, Any, Dict, Optional, Union
11
11
 
12
+ from truefoundry import client
12
13
  from truefoundry.common.warnings import TrueFoundryDeprecationWarning
13
14
  from truefoundry.ml._autogen.client import ( # type: ignore[attr-defined]
14
15
  ArtifactType,
15
16
  CreateArtifactVersionRequestDto,
16
17
  DeleteArtifactVersionsRequestDto,
17
- ExperimentsApi,
18
18
  ExternalBlobStorageSource,
19
19
  FinalizeArtifactVersionRequestDto,
20
20
  Framework,
@@ -531,19 +531,15 @@ def _log_model_version( # noqa: C901
531
531
 
532
532
  if run:
533
533
  mlfoundry_artifacts_api = run._mlfoundry_artifacts_api
534
- experiments_api = run._experiments_api
535
- ml_repo = experiments_api.get_experiment_get(
536
- experiment_id=str(run._experiment_id)
537
- ).experiment.name
534
+ repos = client.ml_repos.get(id=run._experiment_id)
535
+ ml_repo = repos.data.manifest.name
538
536
  ml_repo_id = run._experiment_id
539
537
  else:
540
538
  assert ml_repo is not None
541
539
  api_client = _get_api_client()
542
540
  mlfoundry_artifacts_api = MlfoundryArtifactsApi(api_client=api_client)
543
- experiments_api = ExperimentsApi(api_client=api_client)
544
- ml_repo_id = experiments_api.get_experiment_by_name_get(
545
- experiment_name=ml_repo
546
- ).experiment.experiment_id
541
+ result = list(client.ml_repos.list(name=ml_repo, limit=1))[0]
542
+ ml_repo_id = result.id
547
543
 
548
544
  step = step or 0
549
545
  total_size = None
@@ -16,14 +16,16 @@ from typing import (
16
16
  )
17
17
 
18
18
  import coolname
19
+ from truefoundry_sdk import Collaborator, MlRepoManifest, NotFoundError
20
+ from truefoundry_sdk.core import ApiError
19
21
 
22
+ from truefoundry import client
20
23
  from truefoundry.common.utils import ContextualDirectoryManager
21
24
  from truefoundry.ml import constants
22
25
  from truefoundry.ml._autogen.client import ( # type: ignore[attr-defined]
23
26
  ArtifactDto,
24
27
  ArtifactType,
25
28
  CreateDatasetRequestDto,
26
- CreateExperimentRequestDto,
27
29
  CreateRunRequestDto,
28
30
  DatasetDto,
29
31
  ExperimentsApi,
@@ -44,7 +46,6 @@ from truefoundry.ml._autogen.client.exceptions import (
44
46
  ApiException,
45
47
  NotFoundException,
46
48
  )
47
- from truefoundry.ml.clients.servicefoundry_client import ServiceFoundryServiceClient
48
49
  from truefoundry.ml.enums import ModelFramework, ViewType
49
50
  from truefoundry.ml.exceptions import MlFoundryException
50
51
  from truefoundry.ml.internal_namespace import NAMESPACE
@@ -132,24 +133,22 @@ class MlFoundry:
132
133
  str: The id of the ML Repo.
133
134
  """
134
135
  try:
135
- _ml_repo_instance = self._experiments_api.get_experiment_by_name_get(
136
- experiment_name=ml_repo
137
- )
138
- ml_repo_instance = _ml_repo_instance.experiment
139
- except NotFoundException:
136
+ result = list(client.ml_repos.list(name=ml_repo, limit=1))
137
+ ml_repo_instance = result[0]
138
+ except (IndexError, NotFoundError):
140
139
  err_msg = (
141
140
  f"ML Repo Does Not Exist for name: {ml_repo}. You may either "
142
- "create it from the dashboard or using client.create_ml_repo('<ml_repo_name>')"
141
+ f"create it from the dashboard or using "
142
+ f"`client.create_ml_repo(ml_repo='{ml_repo}', storage_integration_fqn='<storage_integration_fqn>')`"
143
143
  )
144
144
  raise MlFoundryException(err_msg) from None
145
- except ApiException as e:
145
+ except ApiError as e:
146
146
  err_msg = (
147
147
  f"Error happened in getting ML Repo based on name: "
148
148
  f"{ml_repo}. Error details: {e}"
149
149
  )
150
150
  raise MlFoundryException(err_msg) from e
151
-
152
- return ml_repo_instance.experiment_id
151
+ return ml_repo_instance.id
153
152
 
154
153
  def list_ml_repos(self) -> List[str]:
155
154
  """Returns a list of names of ML Repos accessible by the current user.
@@ -160,26 +159,14 @@ class MlFoundry:
160
159
  # TODO (chiragjn): This API should yield ML Repo Entities instead of just names
161
160
  # Kinda useless without it
162
161
  ml_repos_names = []
163
- done, page_token, max_results = False, None, 25
164
- while not done:
165
- try:
166
- _ml_repos = self._experiments_api.list_experiments_get(
167
- view_type=ViewType.ALL.value,
168
- max_results=max_results,
169
- page_token=page_token,
170
- )
171
- except ApiException as e:
172
- err_msg = f"Error happened in fetching ML Repos. Error details: {e}"
173
- raise MlFoundryException(err_msg) from e
174
- else:
175
- ml_repos = _ml_repos.experiments
176
- page_token = _ml_repos.next_page_token
177
- for ml_repo in ml_repos:
178
- # ML Repo with experiment_id 0 represents default ML Repo which we are removing.
179
- if ml_repo.experiment_id != "0":
180
- ml_repos_names.append(ml_repo.name)
181
- if not ml_repos or page_token is None:
182
- done = True
162
+ max_results = 25
163
+ try:
164
+ for ml_repo_instance in client.ml_repos.list(limit=max_results):
165
+ if ml_repo_instance.id != "0":
166
+ ml_repos_names.append(ml_repo_instance.manifest.name)
167
+ except ApiError as e:
168
+ err_msg = f"Error happened in fetching ML Repos. Error details: {e}"
169
+ raise MlFoundryException(err_msg) from e
183
170
  return ml_repos_names
184
171
 
185
172
  def create_ml_repo(
@@ -204,56 +191,60 @@ class MlFoundry:
204
191
 
205
192
  client = get_client()
206
193
 
207
- client.create_ml_repo(ml_repo="my-repo")
194
+ client.create_ml_repo(
195
+ ml_repo="my-repo",
196
+ # This controls which bucket is used.
197
+ # You can get this from Platform > Integrations > Copy FQN of any Blob Storage type integration.
198
+ storage_integration_fqn="..."
199
+ )
208
200
  ```
209
201
  """
210
202
  _validate_ml_repo_name(ml_repo_name=name)
211
203
  if description:
212
204
  _validate_ml_repo_description(description=description)
213
205
  try:
214
- _ml_repo_instance = self._experiments_api.get_experiment_by_name_get(
215
- experiment_name=name
216
- )
217
- existing_ml_repo = _ml_repo_instance.experiment
218
- except NotFoundException:
206
+ result = list(client.ml_repos.list(name=name, limit=1))
207
+ existing_ml_repo = result[0]
208
+ except (IndexError, NotFoundError):
219
209
  existing_ml_repo = None
210
+ except ApiError as e:
211
+ err_msg = (
212
+ f"Error happened in getting ML Repo based on name: "
213
+ f"{name}. Error details: {e}"
214
+ )
215
+ raise MlFoundryException(err_msg) from e
220
216
 
221
217
  if not existing_ml_repo:
218
+ session = client._get_session()
219
+ user_info = session.user_info
220
+ if not user_info.email:
221
+ raise MlFoundryException(
222
+ "Virtual accounts cannot be used to create new ML Repos"
223
+ )
222
224
  try:
223
- self._experiments_api.create_experiment_post(
224
- create_experiment_request_dto=CreateExperimentRequestDto(
225
+ client.ml_repos.create_or_update(
226
+ manifest=MlRepoManifest(
225
227
  name=name,
226
228
  description=description,
227
229
  storage_integration_fqn=storage_integration_fqn,
228
- )
230
+ collaborators=[
231
+ Collaborator(
232
+ subject=f"user:{user_info.email}",
233
+ role_id="mlf-project-admin",
234
+ )
235
+ ],
236
+ ),
229
237
  )
230
- except ApiException as e:
238
+ except ApiError as e:
231
239
  err_msg = f"Error happened in creating ML Repo with name: {name}. Error details: {e}"
232
240
  raise MlFoundryException(err_msg) from e
233
241
  return
234
242
 
235
- servicefoundry_client = ServiceFoundryServiceClient(
236
- tfy_host=self._api_client.tfy_host,
237
- token=self._api_client.access_token,
238
- )
239
-
240
- assert existing_ml_repo.storage_integration_id is not None
241
- try:
242
- existing_storage_integration = (
243
- servicefoundry_client.get_integration_from_id(
244
- existing_ml_repo.storage_integration_id
245
- )
246
- )
247
- except Exception as e:
248
- raise MlFoundryException(
249
- "Error in getting storage integration for ML Repo"
250
- ) from e
251
-
252
- if existing_storage_integration["fqn"] != storage_integration_fqn:
243
+ if existing_ml_repo.manifest.storage_integration_fqn != storage_integration_fqn:
253
244
  raise MlFoundryException(
254
245
  f"ML Repo with same name already exists with storage integration:"
255
- f"{existing_storage_integration['fqn']}. Cannot update the storage integration to: "
256
- f"{storage_integration_fqn}"
246
+ f"{existing_ml_repo.manifest.storage_integration_fqn!r}. "
247
+ f"Cannot update the storage integration to: {storage_integration_fqn!r}"
257
248
  )
258
249
 
259
250
  def create_run(
@@ -554,16 +545,20 @@ class MlFoundry:
554
545
  """
555
546
  _validate_ml_repo_name(ml_repo_name=ml_repo)
556
547
  try:
557
- _ml_repo_obj = self._experiments_api.get_experiment_by_name_get(
558
- experiment_name=ml_repo
559
- )
560
- ml_repo_obj = _ml_repo_obj.experiment
561
- except ApiException as e:
548
+ result = list(client.ml_repos.list(name=ml_repo, limit=1))
549
+ ml_repo_instance = result[0]
550
+ except (IndexError, NotFoundError):
551
+ raise MlFoundryException(
552
+ f"ML Repo with name {ml_repo} does not exist. "
553
+ f"You may either create it from the dashboard or using "
554
+ f"`client.create_ml_repo(ml_repo='{ml_repo}', storage_integration_fqn='<storage_integration_fqn>')`"
555
+ ) from None
556
+ except ApiError as e:
562
557
  raise MlFoundryException(
563
558
  f"ML Repo with name {ml_repo} does not exist or your user does not have permission to access it: {e}"
564
559
  ) from e
565
560
 
566
- ml_repo_id = ml_repo_obj.experiment_id
561
+ ml_repo_id = ml_repo_instance.id
567
562
 
568
563
  page_token = None
569
564
  done = False
@@ -1196,7 +1191,12 @@ class MlFoundry:
1196
1191
  client = get_client()
1197
1192
  ml_repo = "sample-repo"
1198
1193
 
1199
- client.create_ml_repo(ml_repo=ml_repo)
1194
+ client.create_ml_repo(
1195
+ ml_repo=ml_repo,
1196
+ # This controls which bucket is used.
1197
+ # You can get this from Platform > Integrations > Copy FQN of any Blob Storage type integration.
1198
+ storage_integration_fqn="..."
1199
+ )
1200
1200
  client.log_artifact(
1201
1201
  ml_repo=ml_repo,
1202
1202
  name="hello-world-file",
@@ -1295,8 +1295,8 @@ class MlFoundry:
1295
1295
  client.create_ml_repo( # This is only required once
1296
1296
  ml_repo="my-classification-project",
1297
1297
  # This controls which bucket is used.
1298
- # You can get this from Integrations > Blob Storage. `None` picks the default
1299
- storage_integration_fqn=None
1298
+ # You can get this from Platform > Integrations > Copy FQN of any Blob Storage type integration.
1299
+ storage_integration_fqn="..."
1300
1300
  )
1301
1301
  model_version = client.log_model(
1302
1302
  ml_repo="my-classification-project",
@@ -16,12 +16,11 @@ from typing import (
16
16
  )
17
17
  from urllib.parse import urljoin, urlsplit
18
18
 
19
- from truefoundry import version
19
+ from truefoundry import client, version
20
20
  from truefoundry.ml import constants
21
21
  from truefoundry.ml._autogen.client import ( # type: ignore[attr-defined]
22
22
  ArtifactType,
23
23
  DeleteRunRequest,
24
- ExperimentsApi,
25
24
  ListArtifactVersionsRequestDto,
26
25
  ListModelVersionsRequestDto,
27
26
  LogBatchRequestDto,
@@ -110,7 +109,6 @@ class MlFoundryRun:
110
109
  ACTIVE_RUNS.add_run(self)
111
110
 
112
111
  self._api_client = _get_api_client()
113
- self._experiments_api = ExperimentsApi(api_client=self._api_client)
114
112
  self._runs_api = RunsApi(api_client=self._api_client)
115
113
  self._metrics_api = MetricsApi(api_client=self._api_client)
116
114
  self._mlfoundry_artifacts_api = MlfoundryArtifactsApi(
@@ -165,10 +163,8 @@ class MlFoundryRun:
165
163
  @_ensure_not_deleted
166
164
  def ml_repo(self) -> str:
167
165
  """Get ml_repo name of which the current `run` is part of"""
168
- _experiment = self._experiments_api.get_experiment_get(
169
- experiment_id=self._experiment_id
170
- )
171
- return _experiment.experiment.name
166
+ _experiment = client.ml_repos.get(id=self._experiment_id)
167
+ return _experiment.data.manifest.name
172
168
 
173
169
  @property
174
170
  @_ensure_not_deleted
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: truefoundry
3
- Version: 0.10.7rc1
3
+ Version: 0.11.1rc1
4
4
  Summary: TrueFoundry CLI
5
5
  Author-email: TrueFoundry Team <abhishek@truefoundry.com>
6
6
  Requires-Python: <3.14,>=3.8.1
@@ -30,7 +30,7 @@ Requires-Dist: requirements-parser<0.12.0,>=0.11.0
30
30
  Requires-Dist: rich-click<2.0.0,>=1.2.1
31
31
  Requires-Dist: rich<14.0.0,>=13.7.1
32
32
  Requires-Dist: tqdm<5.0.0,>=4.0.0
33
- Requires-Dist: truefoundry-sdk<0.2.0,>=0.1.3
33
+ Requires-Dist: truefoundry-sdk<0.2.0,>=0.1.7
34
34
  Requires-Dist: typing-extensions>=4.0
35
35
  Requires-Dist: urllib3<3,>=1.26.18
36
36
  Requires-Dist: yq<4.0.0,>=3.1.0
@@ -66,8 +66,8 @@ truefoundry/deploy/builder/builders/tfy_notebook_buildpack/dockerfile_template.p
66
66
  truefoundry/deploy/builder/builders/tfy_python_buildpack/__init__.py,sha256=_fjqHKn80qKi68SAMMALge7_A6e1sTsQWichw8uoGIw,2025
67
67
  truefoundry/deploy/builder/builders/tfy_python_buildpack/dockerfile_template.py,sha256=f4l3fH21E2b8W3-JotMKc0AdPcCxV7LRPxxYJa7z_UQ,9134
68
68
  truefoundry/deploy/builder/builders/tfy_spark_buildpack/__init__.py,sha256=NEPlM6_vTVxp4ITa18B8DBbgYCn1q5d8be21lbgu5oY,2888
69
- truefoundry/deploy/builder/builders/tfy_spark_buildpack/dockerfile_template.py,sha256=dAmWgAHcNdBLSedpz18r5r7n9hdXKV-PB7yFPMdaoww,5990
70
- truefoundry/deploy/builder/builders/tfy_spark_buildpack/tfy_execute_notebook.py,sha256=PKJWNlW76dRw_XtFkRnqpzwBGuXU4w6_kIJkgoBmZ5Q,1975
69
+ truefoundry/deploy/builder/builders/tfy_spark_buildpack/dockerfile_template.py,sha256=2zohUaW8Yw_QREHlpRW7Pooomt19HJh44fHjlsiDmwM,6064
70
+ truefoundry/deploy/builder/builders/tfy_spark_buildpack/tfy_execute_notebook.py,sha256=tTx-GZDVf5iB1Pyz2z5c2LH1yrb7lErFbJcr-giAIuI,5734
71
71
  truefoundry/deploy/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
72
72
  truefoundry/deploy/cli/commands/__init__.py,sha256=qv818jxqSAygJ3h-6Ul8t-5VOgR_UrSgsVtNCl3e5G0,1408
73
73
  truefoundry/deploy/cli/commands/apply_command.py,sha256=DmXmKVokkauyKIiJDtErTwbJ5_LvQeJbTQsG5BjyKpo,2427
@@ -127,8 +127,8 @@ truefoundry/ml/exceptions.py,sha256=QpDJSWmF7dIsByS0qOQbQZ_jytdNTzkHDDO3BxhTSo0,
127
127
  truefoundry/ml/git_info.py,sha256=jvAVm9ilqivnGq8qJdUvYdd8Siv0PLtqurB-PXsS5ho,2023
128
128
  truefoundry/ml/internal_namespace.py,sha256=QcqMHp6-C2im2H_02hlhi01EIcr1HhNaZprszs13EMU,1790
129
129
  truefoundry/ml/logger.py,sha256=VT-BF3BnBYTWVq87O58F0c8uXMu94gYzsiFlGY3_7Ao,458
130
- truefoundry/ml/mlfoundry_api.py,sha256=WiIVpJRylBn8NrcAmXClQnavffqI9fhY5h-8Vwx4RKo,59872
131
- truefoundry/ml/mlfoundry_run.py,sha256=34yyQqgpG6EfrAJd37vkbCjrFoHkhvbOAxxSQcSWPtY,44320
130
+ truefoundry/ml/mlfoundry_api.py,sha256=EZvFs-HiUDV2H0tDrgRdeotplWTZm3T5Uw-0Zq3WuHo,60174
131
+ truefoundry/ml/mlfoundry_run.py,sha256=b0wm08OUAiO1UpzdDNBeJh-Gsm_O5kEdbY86Ugv-26Y,44181
132
132
  truefoundry/ml/model_framework.py,sha256=nVbKTtKDRBdLzt7Wrg5_vJKZ-awHbISGvL73s-V9egU,18975
133
133
  truefoundry/ml/prompt_utils.py,sha256=8FueyElVTXLnLtC3O6hKsW_snocArr_B8KG3Qv6eFIQ,2651
134
134
  truefoundry/ml/run_utils.py,sha256=0W208wSLUrbdfk2pjNcZlkUi9bNxG2JORqoe-5rVqHI,2423
@@ -347,7 +347,7 @@ truefoundry/ml/_autogen/models/schema.py,sha256=a_bp42MMPUbwO3407m0UW2W8EOhnxZXf
347
347
  truefoundry/ml/_autogen/models/signature.py,sha256=rBjpxUIsEeWM0sIyYG5uCJB18DKHR4k5yZw8TzuoP48,4987
348
348
  truefoundry/ml/_autogen/models/utils.py,sha256=c7RtSLXhOLcP8rjuUtfnMdaKVTZvvbsmw98gPAkAFrs,24371
349
349
  truefoundry/ml/artifact/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
350
- truefoundry/ml/artifact/truefoundry_artifact_repo.py,sha256=0ng8RqSWufpPwz-uG8iyWkiKcrZ5lUAmS4_UOgmzkx0,37738
350
+ truefoundry/ml/artifact/truefoundry_artifact_repo.py,sha256=ocX5EIcLQa8Uc_C3NxxgNorpxc-z1Yp4TLvmzSORPpw,36862
351
351
  truefoundry/ml/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
352
352
  truefoundry/ml/cli/cli.py,sha256=MwpY7z_NEeJE_XIP7XbZELjNeu2vpMmohttHCKDRk54,335
353
353
  truefoundry/ml/cli/utils.py,sha256=j6_mZ4Spn114mz3P4QQ8jx0tmorXIuyQnHXVUSDvZi4,1035
@@ -355,16 +355,15 @@ truefoundry/ml/cli/commands/__init__.py,sha256=diDUiRUX4l6TtNLI4iF-ZblczkELM7FRV
355
355
  truefoundry/ml/cli/commands/download.py,sha256=N9MhsEQ3U24v_OmnMZT8Q4SoAi38Sm7a21unrACOSDw,2573
356
356
  truefoundry/ml/cli/commands/model_init.py,sha256=INyUAU6hiFClI8cZqX5hgnrtNbeKxlZxrjFrjzStU18,2664
357
357
  truefoundry/ml/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
358
- truefoundry/ml/clients/servicefoundry_client.py,sha256=GiB_SmETI2uo8E0Q43cEsOG6qYb80nB6gM7l3rL7zEs,2422
359
358
  truefoundry/ml/log_types/__init__.py,sha256=g4u4D4Jaj0aBK5GtrLV88-qThKZR9pSZ17vFEkN-LmM,125
360
359
  truefoundry/ml/log_types/plot.py,sha256=LDh4uy6z2P_a2oPM2lc85c0lt8utVvunohzeMawFjZw,7572
361
360
  truefoundry/ml/log_types/pydantic_base.py,sha256=eBlw_AEyAz4iJKDP4zgJOCFWcldwQqpf7FADW1jzIQY,272
362
361
  truefoundry/ml/log_types/utils.py,sha256=xjJ21jdPScvFmw3TbVh5NCzbzJwaqiXJyiiT4xxX1EI,335
363
- truefoundry/ml/log_types/artifacts/artifact.py,sha256=N1M1a7Oq9fY_7s1PT7uv64s_Ek62Lj-JsPBrWdRKpdY,20050
362
+ truefoundry/ml/log_types/artifacts/artifact.py,sha256=dZj9UkRvvU5EN-HUiuLCPsmosSS0yyHM5JNWzaaIxMA,19887
364
363
  truefoundry/ml/log_types/artifacts/constants.py,sha256=uB2JPEqwTbqevkQv2QcEMROsm_4cVAl6s0QU1MLa8SQ,1088
365
- truefoundry/ml/log_types/artifacts/dataset.py,sha256=UpLXoqhfONqp6YG4N8lDrDe-XhTK6ZZ9Lwg8mI0UZn4,13113
364
+ truefoundry/ml/log_types/artifacts/dataset.py,sha256=OgWIoT59AhMw8P01FfvUKbJ3EL6HQf_Xw8X4E3Ff5Sg,13172
366
365
  truefoundry/ml/log_types/artifacts/general_artifact.py,sha256=yr-SQ2fhUR_sE1MB5zoHHYpGC8tizH_-t3lhsxCAULU,2747
367
- truefoundry/ml/log_types/artifacts/model.py,sha256=0_2EGwj8VYRJ3g8Ti2k75s-OiC1Tu-7Ay9U1_QYG_iw,25027
366
+ truefoundry/ml/log_types/artifacts/model.py,sha256=kAiq7TDJ8RHG1Z4IN9mQD1dzKgmLCP1p0v_Yta9GHlM,24864
368
367
  truefoundry/ml/log_types/artifacts/utils.py,sha256=q_atcGzn3wfxItt3RABxjdris8b3njEFNuC8ihWqUSI,8088
369
368
  truefoundry/ml/log_types/image/__init__.py,sha256=fcOq8yQnNj1rkLcPeIjLXBpdA1WIeiPsXOlAAvMxx7M,76
370
369
  truefoundry/ml/log_types/image/constants.py,sha256=wLtGEOA4T5fZHSlOXPuNDLX3lpbCtwlvGKPFk_1fah0,255
@@ -381,7 +380,7 @@ truefoundry/workflow/remote_filesystem/__init__.py,sha256=LQ95ViEjJ7Ts4JcCGOxMPs
381
380
  truefoundry/workflow/remote_filesystem/logger.py,sha256=em2l7D6sw7xTLDP0kQSLpgfRRCLpN14Qw85TN7ujQcE,1022
382
381
  truefoundry/workflow/remote_filesystem/tfy_signed_url_client.py,sha256=xcT0wQmQlgzcj0nP3tJopyFSVWT1uv3nhiTIuwfXYeg,12342
383
382
  truefoundry/workflow/remote_filesystem/tfy_signed_url_fs.py,sha256=nSGPZu0Gyd_jz0KsEE-7w_BmnTD8CVF1S8cUJoxaCbc,13305
384
- truefoundry-0.10.7rc1.dist-info/METADATA,sha256=T0oUo-QR8p-afUbJyLte32yJXkdc5w4-iMytXtBv3iI,2508
385
- truefoundry-0.10.7rc1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
386
- truefoundry-0.10.7rc1.dist-info/entry_points.txt,sha256=xVjn7RMN-MW2-9f7YU-bBdlZSvvrwzhpX1zmmRmsNPU,98
387
- truefoundry-0.10.7rc1.dist-info/RECORD,,
383
+ truefoundry-0.11.1rc1.dist-info/METADATA,sha256=jIzJR7W9qc5D4a-WuEne8Y8BUotdhjnc_CEcz-Bdp78,2508
384
+ truefoundry-0.11.1rc1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
385
+ truefoundry-0.11.1rc1.dist-info/entry_points.txt,sha256=xVjn7RMN-MW2-9f7YU-bBdlZSvvrwzhpX1zmmRmsNPU,98
386
+ truefoundry-0.11.1rc1.dist-info/RECORD,,
@@ -1,65 +0,0 @@
1
- import functools
2
-
3
- from truefoundry.common.constants import (
4
- SERVICEFOUNDRY_CLIENT_MAX_RETRIES,
5
- VERSION_PREFIX,
6
- )
7
- from truefoundry.common.exceptions import HttpRequestException
8
- from truefoundry.common.request_utils import (
9
- http_request,
10
- request_handling,
11
- requests_retry_session,
12
- )
13
- from truefoundry.common.servicefoundry_client import (
14
- ServiceFoundryServiceClient as BaseServiceFoundryServiceClient,
15
- )
16
- from truefoundry.common.utils import get_user_agent
17
- from truefoundry.ml.exceptions import MlFoundryException
18
-
19
-
20
- class ServiceFoundryServiceClient(BaseServiceFoundryServiceClient):
21
- def __init__(self, tfy_host: str, token: str):
22
- super().__init__(tfy_host=tfy_host)
23
- self._token = token
24
-
25
- @functools.cached_property
26
- def _min_cli_version_required(self) -> str:
27
- # TODO (chiragjn): read the mlfoundry min cli version from the config?
28
- return self.python_sdk_config.truefoundry_cli_min_version
29
-
30
- def get_integration_from_id(self, integration_id: str):
31
- integration_id = integration_id or ""
32
- session = requests_retry_session(retries=SERVICEFOUNDRY_CLIENT_MAX_RETRIES)
33
- response = http_request(
34
- method="get",
35
- url=f"{self._api_server_url}/{VERSION_PREFIX}/provider-accounts/provider-integrations",
36
- token=self._token,
37
- timeout=3,
38
- params={"id": integration_id, "type": "blob-storage"},
39
- session=session,
40
- headers={
41
- "User-Agent": get_user_agent(),
42
- },
43
- )
44
-
45
- try:
46
- result = request_handling(response)
47
- assert isinstance(result, dict)
48
- except HttpRequestException as he:
49
- raise MlFoundryException(
50
- f"Failed to get storage integration from id: {integration_id}. Error: {he.message}",
51
- status_code=he.status_code,
52
- ) from None
53
- except Exception as e:
54
- raise MlFoundryException(
55
- f"Failed to get storage integration from id: {integration_id}. Error: {str(e)}"
56
- ) from None
57
-
58
- data = result.get("data", result.get("providerAccounts"))
59
- # TODO (chiragjn): Parse this using Pydantic
60
- if data and len(data) > 0 and data[0]:
61
- return data[0]
62
- else:
63
- raise MlFoundryException(
64
- f"Invalid storage integration id: {integration_id}"
65
- )