datacrunch 1.15.0__py3-none-any.whl → 1.17.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. datacrunch/__init__.py +53 -1
  2. datacrunch/datacrunch.py +44 -81
  3. datacrunch-1.17.1.dist-info/METADATA +30 -0
  4. datacrunch-1.17.1.dist-info/RECORD +5 -0
  5. datacrunch-1.17.1.dist-info/WHEEL +4 -0
  6. datacrunch/InferenceClient/__init__.py +0 -3
  7. datacrunch/InferenceClient/inference_client.py +0 -379
  8. datacrunch/__version__.py +0 -1
  9. datacrunch/authentication/__init__.py +0 -0
  10. datacrunch/authentication/authentication.py +0 -112
  11. datacrunch/balance/__init__.py +0 -0
  12. datacrunch/balance/balance.py +0 -52
  13. datacrunch/constants.py +0 -107
  14. datacrunch/containers/__init__.py +0 -33
  15. datacrunch/containers/containers.py +0 -1081
  16. datacrunch/exceptions.py +0 -29
  17. datacrunch/helpers.py +0 -13
  18. datacrunch/http_client/__init__.py +0 -0
  19. datacrunch/http_client/http_client.py +0 -241
  20. datacrunch/images/__init__.py +0 -0
  21. datacrunch/images/images.py +0 -87
  22. datacrunch/instance_types/__init__.py +0 -0
  23. datacrunch/instance_types/instance_types.py +0 -188
  24. datacrunch/instances/__init__.py +0 -0
  25. datacrunch/instances/instances.py +0 -247
  26. datacrunch/locations/__init__.py +0 -0
  27. datacrunch/locations/locations.py +0 -16
  28. datacrunch/ssh_keys/__init__.py +0 -0
  29. datacrunch/ssh_keys/ssh_keys.py +0 -112
  30. datacrunch/startup_scripts/__init__.py +0 -0
  31. datacrunch/startup_scripts/startup_scripts.py +0 -113
  32. datacrunch/volume_types/__init__.py +0 -0
  33. datacrunch/volume_types/volume_types.py +0 -66
  34. datacrunch/volumes/__init__.py +0 -0
  35. datacrunch/volumes/volumes.py +0 -398
  36. datacrunch-1.15.0.dist-info/METADATA +0 -208
  37. datacrunch-1.15.0.dist-info/RECORD +0 -69
  38. datacrunch-1.15.0.dist-info/WHEEL +0 -5
  39. datacrunch-1.15.0.dist-info/licenses/LICENSE +0 -21
  40. datacrunch-1.15.0.dist-info/top_level.txt +0 -2
  41. tests/__init__.py +0 -0
  42. tests/integration_tests/__init__.py +0 -0
  43. tests/integration_tests/conftest.py +0 -20
  44. tests/integration_tests/test_instances.py +0 -36
  45. tests/integration_tests/test_locations.py +0 -65
  46. tests/integration_tests/test_volumes.py +0 -94
  47. tests/unit_tests/__init__.py +0 -0
  48. tests/unit_tests/authentication/__init__.py +0 -0
  49. tests/unit_tests/authentication/test_authentication.py +0 -202
  50. tests/unit_tests/balance/__init__.py +0 -0
  51. tests/unit_tests/balance/test_balance.py +0 -25
  52. tests/unit_tests/conftest.py +0 -21
  53. tests/unit_tests/containers/__init__.py +0 -1
  54. tests/unit_tests/containers/test_containers.py +0 -959
  55. tests/unit_tests/http_client/__init__.py +0 -0
  56. tests/unit_tests/http_client/test_http_client.py +0 -193
  57. tests/unit_tests/images/__init__.py +0 -0
  58. tests/unit_tests/images/test_images.py +0 -41
  59. tests/unit_tests/instance_types/__init__.py +0 -0
  60. tests/unit_tests/instance_types/test_instance_types.py +0 -87
  61. tests/unit_tests/instances/__init__.py +0 -0
  62. tests/unit_tests/instances/test_instances.py +0 -483
  63. tests/unit_tests/ssh_keys/__init__.py +0 -0
  64. tests/unit_tests/ssh_keys/test_ssh_keys.py +0 -198
  65. tests/unit_tests/startup_scripts/__init__.py +0 -0
  66. tests/unit_tests/startup_scripts/test_startup_scripts.py +0 -196
  67. tests/unit_tests/test_datacrunch.py +0 -65
  68. tests/unit_tests/test_exceptions.py +0 -33
  69. tests/unit_tests/volume_types/__init__.py +0 -0
  70. tests/unit_tests/volume_types/test_volume_types.py +0 -50
  71. tests/unit_tests/volumes/__init__.py +0 -0
  72. tests/unit_tests/volumes/test_volumes.py +0 -641
@@ -1,1081 +0,0 @@
1
- """Container deployment and management service for DataCrunch.
2
-
3
- This module provides functionality for managing container deployments, including
4
- creation, updates, deletion, and monitoring of containerized applications.
5
- """
6
-
7
- import base64
8
- import os
9
- from dataclasses import dataclass, field
10
- from dataclasses_json import dataclass_json, Undefined # type: ignore
11
- from typing import List, Optional, Dict, Any, Union
12
- from enum import Enum
13
-
14
- from datacrunch.http_client.http_client import HTTPClient
15
- from datacrunch.InferenceClient import InferenceClient, InferenceResponse
16
-
17
-
18
- # API endpoints
19
- CONTAINER_DEPLOYMENTS_ENDPOINT = '/container-deployments'
20
- SERVERLESS_COMPUTE_RESOURCES_ENDPOINT = '/serverless-compute-resources'
21
- CONTAINER_REGISTRY_CREDENTIALS_ENDPOINT = '/container-registry-credentials'
22
- SECRETS_ENDPOINT = '/secrets'
23
- FILESET_SECRETS_ENDPOINT = '/file-secrets'
24
-
25
-
26
- class EnvVarType(str, Enum):
27
- """Types of environment variables that can be set in containers."""
28
-
29
- PLAIN = "plain"
30
- SECRET = "secret"
31
-
32
-
33
- class SecretType(str, Enum):
34
- """Types of secrets that can be set in containers."""
35
-
36
- GENERIC = "generic" # Regular secret, can be used in env vars
37
- FILESET = "file-secret" # A file secret that can be mounted into the container
38
-
39
-
40
- class VolumeMountType(str, Enum):
41
- """Types of volume mounts that can be configured for containers."""
42
-
43
- SCRATCH = "scratch"
44
- SECRET = "secret"
45
- MEMORY = "memory"
46
- SHARED = "shared"
47
-
48
-
49
- class ContainerRegistryType(str, Enum):
50
- """Supported container registry types."""
51
-
52
- GCR = "gcr"
53
- DOCKERHUB = "dockerhub"
54
- GITHUB = "ghcr"
55
- AWS_ECR = "aws-ecr"
56
- CUSTOM = "custom"
57
-
58
-
59
- class ContainerDeploymentStatus(str, Enum):
60
- """Possible states of a container deployment."""
61
-
62
- INITIALIZING = "initializing"
63
- HEALTHY = "healthy"
64
- DEGRADED = "degraded"
65
- UNHEALTHY = "unhealthy"
66
- PAUSED = "paused"
67
- QUOTA_REACHED = "quota_reached"
68
- IMAGE_PULLING = "image_pulling"
69
- VERSION_UPDATING = "version_updating"
70
-
71
-
72
- @dataclass_json
73
- @dataclass
74
- class HealthcheckSettings:
75
- """Configuration for container health checking.
76
-
77
- Attributes:
78
- enabled: Whether health checking is enabled.
79
- port: Port number to perform health check on.
80
- path: HTTP path to perform health check on.
81
- """
82
-
83
- enabled: bool = True
84
- port: Optional[int] = None
85
- path: Optional[str] = None
86
-
87
-
88
- @dataclass_json
89
- @dataclass
90
- class EntrypointOverridesSettings:
91
- """Configuration for overriding container entrypoint and command.
92
-
93
- Attributes:
94
- enabled: Whether entrypoint overrides are enabled.
95
- entrypoint: List of strings forming the entrypoint command.
96
- cmd: List of strings forming the command arguments.
97
- """
98
-
99
- enabled: bool = True
100
- entrypoint: Optional[List[str]] = None
101
- cmd: Optional[List[str]] = None
102
-
103
-
104
- @dataclass_json
105
- @dataclass
106
- class EnvVar:
107
- """Environment variable configuration for containers.
108
-
109
- Attributes:
110
- name: Name of the environment variable.
111
- value_or_reference_to_secret: Direct value or reference to a secret.
112
- type: Type of the environment variable.
113
- """
114
-
115
- name: str
116
- value_or_reference_to_secret: str
117
- type: EnvVarType
118
-
119
-
120
- @dataclass_json(undefined=Undefined.EXCLUDE)
121
- @dataclass
122
- class VolumeMount:
123
- """Base class for volume mount configurations.
124
-
125
- Attributes:
126
- type: Type of volume mount.
127
- mount_path: Path where the volume should be mounted in the container.
128
- size_in_mb: Size of the volume in megabytes. Deprecated: use MemoryMount for memory volumes instead.
129
- """
130
-
131
- type: VolumeMountType
132
- mount_path: str
133
- # Deprecated: use MemoryMount for memory volumes instead.
134
- size_in_mb: Optional[int] = field(default=None, kw_only=True)
135
-
136
-
137
- @dataclass_json(undefined=Undefined.EXCLUDE)
138
- @dataclass
139
- class GeneralStorageMount(VolumeMount):
140
- """General storage volume mount configuration.
141
- """
142
-
143
- def __init__(self, mount_path: str):
144
- """Initialize a general scratch volume mount.
145
-
146
- Args:
147
- mount_path: Path where the volume should be mounted in the container.
148
- """
149
- super().__init__(type=VolumeMountType.SCRATCH, mount_path=mount_path)
150
-
151
-
152
- @dataclass_json(undefined=Undefined.EXCLUDE)
153
- @dataclass
154
- class SecretMount(VolumeMount):
155
- """Secret volume mount configuration.
156
-
157
- A secret volume mount allows mounting secret files into the container.
158
-
159
- Attributes:
160
- secret_name: The name of the fileset secret to mount. This secret must be created in advance, for example using `create_fileset_secret_from_file_paths`
161
- file_names: List of file names that are part of the fileset secret.
162
- """
163
-
164
- secret_name: str
165
- file_names: Optional[List[str]] = None
166
-
167
- def __init__(self, mount_path: str, secret_name: str, file_names: Optional[List[str]] = None):
168
- self.secret_name = secret_name
169
- self.file_names = file_names
170
- super().__init__(type=VolumeMountType.SECRET, mount_path=mount_path)
171
-
172
-
173
- @dataclass_json(undefined=Undefined.EXCLUDE)
174
- @dataclass
175
- class MemoryMount(VolumeMount):
176
- """Memory volume mount configuration.
177
-
178
- A memory volume mount provides high-speed, ephemeral in-memory storage inside your container.
179
- The mount path is currently hardcoded to /dev/shm and cannot be changed.
180
-
181
- Attributes:
182
- size_in_mb: Size of the memory volume in megabytes.
183
- """
184
-
185
- size_in_mb: int
186
-
187
- def __init__(self, size_in_mb: int):
188
- super().__init__(type=VolumeMountType.MEMORY, mount_path='/dev/shm')
189
- self.size_in_mb = size_in_mb
190
-
191
-
192
- @dataclass_json(undefined=Undefined.EXCLUDE)
193
- @dataclass
194
- class SharedFileSystemMount(VolumeMount):
195
- """Shared filesystem volume mount configuration.
196
-
197
- A shared filesystem volume mount allows mounting a shared filesystem into the container.
198
- """
199
-
200
- volume_id: str # The ID of the shared filesystem volume to mount, needs to be created first
201
-
202
- def __init__(self, mount_path: str, volume_id: str):
203
- super().__init__(type=VolumeMountType.SHARED, mount_path=mount_path)
204
- self.volume_id = volume_id
205
-
206
-
207
- @dataclass_json
208
- @dataclass
209
- class Container:
210
- """Container configuration for deployment creation and updates.
211
-
212
- Attributes:
213
- image: Container image to use.
214
- exposed_port: Port to expose from the container.
215
- name: Name of the container (system-managed, read-only).
216
- healthcheck: Optional health check configuration.
217
- entrypoint_overrides: Optional entrypoint override settings.
218
- env: Optional list of environment variables.
219
- volume_mounts: Optional list of volume mounts.
220
- """
221
-
222
- image: Union[str, dict]
223
- exposed_port: int
224
- name: Optional[str] = None
225
- healthcheck: Optional[HealthcheckSettings] = None
226
- entrypoint_overrides: Optional[EntrypointOverridesSettings] = None
227
- env: Optional[List[EnvVar]] = None
228
- volume_mounts: Optional[List[VolumeMount]] = None
229
-
230
-
231
- @dataclass_json
232
- @dataclass
233
- class ContainerRegistryCredentials:
234
- """Credentials for accessing a container registry.
235
-
236
- Attributes:
237
- name: Name of the credentials.
238
- """
239
-
240
- name: str
241
-
242
-
243
- @dataclass_json
244
- @dataclass
245
- class ContainerRegistrySettings:
246
- """Settings for container registry access.
247
-
248
- Attributes:
249
- is_private: Whether the registry is private.
250
- credentials: Optional credentials for accessing private registry.
251
- """
252
-
253
- is_private: bool
254
- credentials: Optional[ContainerRegistryCredentials] = None
255
-
256
-
257
- @dataclass_json
258
- @dataclass
259
- class ComputeResource:
260
- """Compute resource configuration.
261
-
262
- Attributes:
263
- name: Name of the compute resource.
264
- size: Size of the compute resource.
265
- is_available: Whether the compute resource is currently available.
266
- """
267
-
268
- name: str
269
- size: int
270
- # Made optional since it's only used in API responses
271
- is_available: Optional[bool] = None
272
-
273
-
274
- @dataclass_json
275
- @dataclass
276
- class ScalingPolicy:
277
- """Policy for controlling scaling behavior.
278
-
279
- Attributes:
280
- delay_seconds: Number of seconds to wait before applying scaling action.
281
- """
282
-
283
- delay_seconds: int
284
-
285
-
286
- @dataclass_json
287
- @dataclass
288
- class QueueLoadScalingTrigger:
289
- """Trigger for scaling based on queue load.
290
-
291
- Attributes:
292
- threshold: Queue load threshold that triggers scaling.
293
- """
294
-
295
- threshold: float
296
-
297
-
298
- @dataclass_json
299
- @dataclass
300
- class UtilizationScalingTrigger:
301
- """Trigger for scaling based on resource utilization.
302
-
303
- Attributes:
304
- enabled: Whether this trigger is enabled.
305
- threshold: Utilization threshold that triggers scaling.
306
- """
307
-
308
- enabled: bool
309
- threshold: Optional[float] = None
310
-
311
-
312
- @dataclass_json
313
- @dataclass
314
- class ScalingTriggers:
315
- """Collection of triggers that can cause scaling actions.
316
-
317
- Attributes:
318
- queue_load: Optional trigger based on queue load.
319
- cpu_utilization: Optional trigger based on CPU utilization.
320
- gpu_utilization: Optional trigger based on GPU utilization.
321
- """
322
-
323
- queue_load: Optional[QueueLoadScalingTrigger] = None
324
- cpu_utilization: Optional[UtilizationScalingTrigger] = None
325
- gpu_utilization: Optional[UtilizationScalingTrigger] = None
326
-
327
-
328
- @dataclass_json
329
- @dataclass
330
- class ScalingOptions:
331
- """Configuration for automatic scaling behavior.
332
-
333
- Attributes:
334
- min_replica_count: Minimum number of replicas to maintain.
335
- max_replica_count: Maximum number of replicas allowed.
336
- scale_down_policy: Policy for scaling down replicas.
337
- scale_up_policy: Policy for scaling up replicas.
338
- queue_message_ttl_seconds: Time-to-live for queue messages in seconds.
339
- concurrent_requests_per_replica: Number of concurrent requests each replica can handle.
340
- scaling_triggers: Configuration for various scaling triggers.
341
- """
342
-
343
- min_replica_count: int
344
- max_replica_count: int
345
- scale_down_policy: ScalingPolicy
346
- scale_up_policy: ScalingPolicy
347
- queue_message_ttl_seconds: int
348
- concurrent_requests_per_replica: int
349
- scaling_triggers: ScalingTriggers
350
-
351
-
352
- @dataclass_json(undefined=Undefined.EXCLUDE)
353
- @dataclass
354
- class Deployment:
355
- """Configuration for creating or updating a container deployment.
356
-
357
- Attributes:
358
- name: Name of the deployment.
359
- container_registry_settings: Settings for accessing container registry.
360
- containers: List of container specifications in the deployment.
361
- compute: Compute resource configuration.
362
- is_spot: Whether is spot deployment.
363
- endpoint_base_url: Optional base URL for the deployment endpoint.
364
- scaling: Optional scaling configuration.
365
- created_at: Optional timestamp when the deployment was created.
366
- """
367
-
368
- name: str
369
- containers: List[Container]
370
- compute: ComputeResource
371
- container_registry_settings: ContainerRegistrySettings = field(
372
- default_factory=lambda: ContainerRegistrySettings(is_private=False))
373
- is_spot: bool = False
374
- endpoint_base_url: Optional[str] = None
375
- scaling: Optional[ScalingOptions] = None
376
- created_at: Optional[str] = None
377
-
378
- _inference_client: Optional[InferenceClient] = None
379
-
380
- def __str__(self):
381
- """Returns a string representation of the deployment, excluding sensitive information.
382
-
383
- Returns:
384
- str: A formatted string representation of the deployment.
385
- """
386
- # Get all attributes except _inference_client
387
- attrs = {k: v for k, v in self.__dict__.items() if k !=
388
- '_inference_client'}
389
- # Format each attribute
390
- attr_strs = [f"{k}={repr(v)}" for k, v in attrs.items()]
391
- return f"Deployment({', '.join(attr_strs)})"
392
-
393
- def __repr__(self):
394
- """Returns a repr representation of the deployment, excluding sensitive information.
395
-
396
- Returns:
397
- str: A formatted string representation of the deployment.
398
- """
399
- return self.__str__()
400
-
401
- @classmethod
402
- def from_dict_with_inference_key(cls, data: Dict[str, Any], inference_key: str = None) -> 'Deployment':
403
- """Creates a Deployment instance from a dictionary with an inference key.
404
-
405
- Args:
406
- data: Dictionary containing deployment data.
407
- inference_key: Inference key to set on the deployment.
408
-
409
- Returns:
410
- Deployment: A new Deployment instance with the inference client initialized.
411
- """
412
- deployment = Deployment.from_dict(data, infer_missing=True)
413
- if inference_key and deployment.endpoint_base_url:
414
- deployment._inference_client = InferenceClient(
415
- inference_key=inference_key,
416
- endpoint_base_url=deployment.endpoint_base_url
417
- )
418
- return deployment
419
-
420
- def set_inference_client(self, inference_key: str) -> None:
421
- """Sets the inference client for this deployment.
422
-
423
- Args:
424
- inference_key: The inference key to use for authentication.
425
-
426
- Raises:
427
- ValueError: If endpoint_base_url is not set.
428
- """
429
- if self.endpoint_base_url is None:
430
- raise ValueError(
431
- "Endpoint base URL must be set to use inference client")
432
- self._inference_client = InferenceClient(
433
- inference_key=inference_key,
434
- endpoint_base_url=self.endpoint_base_url
435
- )
436
-
437
- def _validate_inference_client(self) -> None:
438
- """Validates that the inference client is initialized.
439
-
440
- Raises:
441
- ValueError: If inference client is not initialized.
442
- """
443
- if self._inference_client is None:
444
- raise ValueError(
445
- "Inference client not initialized. Use from_dict_with_inference_key or set_inference_client to initialize inference capabilities.")
446
-
447
- def run_sync(self, data: Dict[str, Any], path: str = "", timeout_seconds: int = 60 * 5, headers: Optional[Dict[str, str]] = None, http_method: str = "POST", stream: bool = False) -> InferenceResponse:
448
- """Runs a synchronous inference request.
449
-
450
- Args:
451
- data: The data to send in the request.
452
- path: The endpoint path to send the request to.
453
- timeout_seconds: Maximum time to wait for the response.
454
- headers: Optional headers to include in the request.
455
- http_method: The HTTP method to use for the request.
456
- stream: Whether to stream the response.
457
-
458
- Returns:
459
- InferenceResponse: The response from the inference request.
460
-
461
- Raises:
462
- ValueError: If the inference client is not initialized.
463
- """
464
- self._validate_inference_client()
465
- return self._inference_client.run_sync(data, path, timeout_seconds, headers, http_method, stream)
466
-
467
- def run(self, data: Dict[str, Any], path: str = "", timeout_seconds: int = 60 * 5, headers: Optional[Dict[str, str]] = None, http_method: str = "POST", stream: bool = False):
468
- """Runs an asynchronous inference request.
469
-
470
- Args:
471
- data: The data to send in the request.
472
- path: The endpoint path to send the request to.
473
- timeout_seconds: Maximum time to wait for the response.
474
- headers: Optional headers to include in the request.
475
- http_method: The HTTP method to use for the request.
476
- stream: Whether to stream the response.
477
-
478
- Returns:
479
- The response from the inference request.
480
-
481
- Raises:
482
- ValueError: If the inference client is not initialized.
483
- """
484
- self._validate_inference_client()
485
- return self._inference_client.run(data, path, timeout_seconds, headers, http_method, stream)
486
-
487
- def health(self):
488
- """Checks the health of the deployed application.
489
-
490
- Returns:
491
- The health check response.
492
-
493
- Raises:
494
- ValueError: If the inference client is not initialized.
495
- """
496
- self._validate_inference_client()
497
- # build healthcheck path
498
- healthcheck_path = "/health"
499
- if self.containers and self.containers[0].healthcheck and self.containers[0].healthcheck.path:
500
- healthcheck_path = self.containers[0].healthcheck.path
501
-
502
- return self._inference_client.health(healthcheck_path)
503
- # Function alias
504
- healthcheck = health
505
-
506
-
507
- @dataclass_json
508
- @dataclass
509
- class ReplicaInfo:
510
- """Information about a deployment replica.
511
-
512
- Attributes:
513
- id: Unique identifier of the replica.
514
- status: Current status of the replica.
515
- started_at: Timestamp when the replica was started.
516
- """
517
-
518
- id: str
519
- status: str
520
- started_at: str
521
-
522
-
523
- @dataclass_json
524
- @dataclass
525
- class Secret:
526
- """A secret model class.
527
-
528
- Attributes:
529
- name: Name of the secret.
530
- created_at: Timestamp when the secret was created.
531
- secret_type: Type of the secret.
532
- """
533
-
534
- name: str
535
- created_at: str
536
- secret_type: SecretType
537
-
538
-
539
- @dataclass_json
540
- @dataclass
541
- class RegistryCredential:
542
- """A container registry credential model class.
543
-
544
- Attributes:
545
- name: Name of the registry credential.
546
- created_at: Timestamp when the credential was created.
547
- """
548
-
549
- name: str
550
- created_at: str
551
-
552
-
553
- @dataclass_json
554
- @dataclass
555
- class BaseRegistryCredentials:
556
- """Base class for registry credentials.
557
-
558
- Attributes:
559
- name: Name of the registry credential.
560
- type: Type of the container registry.
561
- """
562
-
563
- name: str
564
- type: ContainerRegistryType
565
-
566
-
567
- @dataclass_json
568
- @dataclass
569
- class DockerHubCredentials(BaseRegistryCredentials):
570
- """Credentials for DockerHub registry.
571
-
572
- Attributes:
573
- username: DockerHub username.
574
- access_token: DockerHub access token.
575
- """
576
-
577
- username: str
578
- access_token: str
579
-
580
- def __init__(self, name: str, username: str, access_token: str):
581
- """Initializes DockerHub credentials.
582
-
583
- Args:
584
- name: Name of the credentials.
585
- username: DockerHub username.
586
- access_token: DockerHub access token.
587
- """
588
- super().__init__(name=name, type=ContainerRegistryType.DOCKERHUB)
589
- self.username = username
590
- self.access_token = access_token
591
-
592
-
593
- @dataclass_json
594
- @dataclass
595
- class GithubCredentials(BaseRegistryCredentials):
596
- """Credentials for GitHub Container Registry.
597
-
598
- Attributes:
599
- username: GitHub username.
600
- access_token: GitHub access token.
601
- """
602
-
603
- username: str
604
- access_token: str
605
-
606
- def __init__(self, name: str, username: str, access_token: str):
607
- """Initializes GitHub credentials.
608
-
609
- Args:
610
- name: Name of the credentials.
611
- username: GitHub username.
612
- access_token: GitHub access token.
613
- """
614
- super().__init__(name=name, type=ContainerRegistryType.GITHUB)
615
- self.username = username
616
- self.access_token = access_token
617
-
618
-
619
- @dataclass_json
620
- @dataclass
621
- class GCRCredentials(BaseRegistryCredentials):
622
- """Credentials for Google Container Registry.
623
-
624
- Attributes:
625
- service_account_key: GCP service account key JSON.
626
- """
627
-
628
- service_account_key: str
629
-
630
- def __init__(self, name: str, service_account_key: str):
631
- """Initializes GCR credentials.
632
-
633
- Args:
634
- name: Name of the credentials.
635
- service_account_key: GCP service account key JSON.
636
- """
637
- super().__init__(name=name, type=ContainerRegistryType.GCR)
638
- self.service_account_key = service_account_key
639
-
640
-
641
- @dataclass_json
642
- @dataclass
643
- class AWSECRCredentials(BaseRegistryCredentials):
644
- """Credentials for AWS Elastic Container Registry.
645
-
646
- Attributes:
647
- access_key_id: AWS access key ID.
648
- secret_access_key: AWS secret access key.
649
- region: AWS region.
650
- ecr_repo: ECR repository name.
651
- """
652
-
653
- access_key_id: str
654
- secret_access_key: str
655
- region: str
656
- ecr_repo: str
657
-
658
- def __init__(self, name: str, access_key_id: str, secret_access_key: str, region: str, ecr_repo: str):
659
- """Initializes AWS ECR credentials.
660
-
661
- Args:
662
- name: Name of the credentials.
663
- access_key_id: AWS access key ID.
664
- secret_access_key: AWS secret access key.
665
- region: AWS region.
666
- ecr_repo: ECR repository name.
667
- """
668
- super().__init__(name=name, type=ContainerRegistryType.AWS_ECR)
669
- self.access_key_id = access_key_id
670
- self.secret_access_key = secret_access_key
671
- self.region = region
672
- self.ecr_repo = ecr_repo
673
-
674
-
675
- @dataclass_json
676
- @dataclass
677
- class CustomRegistryCredentials(BaseRegistryCredentials):
678
- """Credentials for custom container registries.
679
-
680
- Attributes:
681
- docker_config_json: Docker config JSON containing registry credentials.
682
- """
683
-
684
- docker_config_json: str
685
-
686
- def __init__(self, name: str, docker_config_json: str):
687
- """Initializes custom registry credentials.
688
-
689
- Args:
690
- name: Name of the credentials.
691
- docker_config_json: Docker config JSON containing registry credentials.
692
- """
693
- super().__init__(name=name, type=ContainerRegistryType.CUSTOM)
694
- self.docker_config_json = docker_config_json
695
-
696
-
697
- class ContainersService:
698
- """Service for managing container deployments.
699
-
700
- This class provides methods for interacting with the DataCrunch container
701
- deployment API, including CRUD operations for deployments and related resources.
702
- """
703
-
704
- def __init__(self, http_client: HTTPClient, inference_key: str = None) -> None:
705
- """Initializes the containers service.
706
-
707
- Args:
708
- http_client: HTTP client for making API requests.
709
- inference_key: Optional inference key for authenticating inference requests.
710
- """
711
- self.client = http_client
712
- self._inference_key = inference_key
713
-
714
- def get_deployments(self) -> List[Deployment]:
715
- """Retrieves all container deployments.
716
-
717
- Returns:
718
- List[Deployment]: List of all deployments.
719
- """
720
- response = self.client.get(CONTAINER_DEPLOYMENTS_ENDPOINT)
721
- return [Deployment.from_dict_with_inference_key(deployment, self._inference_key) for deployment in response.json()]
722
-
723
- def get_deployment_by_name(self, deployment_name: str) -> Deployment:
724
- """Retrieves a specific deployment by name.
725
-
726
- Args:
727
- deployment_name: Name of the deployment to retrieve.
728
-
729
- Returns:
730
- Deployment: The requested deployment.
731
- """
732
- response = self.client.get(
733
- f"{CONTAINER_DEPLOYMENTS_ENDPOINT}/{deployment_name}")
734
- return Deployment.from_dict_with_inference_key(response.json(), self._inference_key)
735
-
736
- # Function alias
737
- get_deployment = get_deployment_by_name
738
-
739
- def create_deployment(
740
- self,
741
- deployment: Deployment
742
- ) -> Deployment:
743
- """Creates a new container deployment.
744
-
745
- Args:
746
- deployment: Deployment configuration to create.
747
-
748
- Returns:
749
- Deployment: The created deployment.
750
- """
751
- response = self.client.post(
752
- CONTAINER_DEPLOYMENTS_ENDPOINT,
753
- deployment.to_dict()
754
- )
755
- return Deployment.from_dict_with_inference_key(response.json(), self._inference_key)
756
-
757
- def update_deployment(self, deployment_name: str, deployment: Deployment) -> Deployment:
758
- """Updates an existing deployment.
759
-
760
- Args:
761
- deployment_name: Name of the deployment to update.
762
- deployment: Updated deployment configuration.
763
-
764
- Returns:
765
- Deployment: The updated deployment.
766
- """
767
- response = self.client.patch(
768
- f"{CONTAINER_DEPLOYMENTS_ENDPOINT}/{deployment_name}",
769
- deployment.to_dict()
770
- )
771
- return Deployment.from_dict_with_inference_key(response.json(), self._inference_key)
772
-
773
- def delete_deployment(self, deployment_name: str) -> None:
774
- """Deletes a deployment.
775
-
776
- Args:
777
- deployment_name: Name of the deployment to delete.
778
- """
779
- self.client.delete(
780
- f"{CONTAINER_DEPLOYMENTS_ENDPOINT}/{deployment_name}")
781
-
782
- def get_deployment_status(self, deployment_name: str) -> ContainerDeploymentStatus:
783
- """Retrieves the current status of a deployment.
784
-
785
- Args:
786
- deployment_name: Name of the deployment.
787
-
788
- Returns:
789
- ContainerDeploymentStatus: Current status of the deployment.
790
- """
791
- response = self.client.get(
792
- f"{CONTAINER_DEPLOYMENTS_ENDPOINT}/{deployment_name}/status")
793
- return ContainerDeploymentStatus(response.json()["status"])
794
-
795
- def restart_deployment(self, deployment_name: str) -> None:
796
- """Restarts a deployment.
797
-
798
- Args:
799
- deployment_name: Name of the deployment to restart.
800
- """
801
- self.client.post(
802
- f"{CONTAINER_DEPLOYMENTS_ENDPOINT}/{deployment_name}/restart")
803
-
804
- def get_deployment_scaling_options(self, deployment_name: str) -> ScalingOptions:
805
- """Retrieves the scaling options for a deployment.
806
-
807
- Args:
808
- deployment_name: Name of the deployment.
809
-
810
- Returns:
811
- ScalingOptions: Current scaling options for the deployment.
812
- """
813
- response = self.client.get(
814
- f"{CONTAINER_DEPLOYMENTS_ENDPOINT}/{deployment_name}/scaling")
815
- return ScalingOptions.from_dict(response.json())
816
-
817
- def update_deployment_scaling_options(self, deployment_name: str, scaling_options: ScalingOptions) -> ScalingOptions:
818
- """Updates the scaling options for a deployment.
819
-
820
- Args:
821
- deployment_name: Name of the deployment.
822
- scaling_options: New scaling options to apply.
823
-
824
- Returns:
825
- ScalingOptions: Updated scaling options for the deployment.
826
- """
827
- response = self.client.patch(
828
- f"{CONTAINER_DEPLOYMENTS_ENDPOINT}/{deployment_name}/scaling",
829
- scaling_options.to_dict()
830
- )
831
- return ScalingOptions.from_dict(response.json())
832
-
833
- def get_deployment_replicas(self, deployment_name: str) -> List[ReplicaInfo]:
834
- """Retrieves information about deployment replicas.
835
-
836
- Args:
837
- deployment_name: Name of the deployment.
838
-
839
- Returns:
840
- List[ReplicaInfo]: List of replica information.
841
- """
842
- response = self.client.get(
843
- f"{CONTAINER_DEPLOYMENTS_ENDPOINT}/{deployment_name}/replicas")
844
- return [ReplicaInfo.from_dict(replica) for replica in response.json()["list"]]
845
-
846
- def purge_deployment_queue(self, deployment_name: str) -> None:
847
- """Purges the deployment queue.
848
-
849
- Args:
850
- deployment_name: Name of the deployment.
851
- """
852
- self.client.post(
853
- f"{CONTAINER_DEPLOYMENTS_ENDPOINT}/{deployment_name}/purge-queue")
854
-
855
- def pause_deployment(self, deployment_name: str) -> None:
856
- """Pauses a deployment.
857
-
858
- Args:
859
- deployment_name: Name of the deployment to pause.
860
- """
861
- self.client.post(
862
- f"{CONTAINER_DEPLOYMENTS_ENDPOINT}/{deployment_name}/pause")
863
-
864
- def resume_deployment(self, deployment_name: str) -> None:
865
- """Resumes a paused deployment.
866
-
867
- Args:
868
- deployment_name: Name of the deployment to resume.
869
- """
870
- self.client.post(
871
- f"{CONTAINER_DEPLOYMENTS_ENDPOINT}/{deployment_name}/resume")
872
-
873
- def get_deployment_environment_variables(self, deployment_name: str) -> Dict[str, List[EnvVar]]:
874
- """Retrieves environment variables for a deployment.
875
-
876
- Args:
877
- deployment_name: Name of the deployment.
878
-
879
- Returns:
880
- Dict[str, List[EnvVar]]: Dictionary mapping container names to their environment variables.
881
- """
882
- response = self.client.get(
883
- f"{CONTAINER_DEPLOYMENTS_ENDPOINT}/{deployment_name}/environment-variables")
884
- result = {}
885
- for item in response.json():
886
- container_name = item["container_name"]
887
- env_vars = item["env"]
888
- result[container_name] = [EnvVar.from_dict(
889
- env_var) for env_var in env_vars]
890
- return result
891
-
892
- def add_deployment_environment_variables(self, deployment_name: str, container_name: str, env_vars: List[EnvVar]) -> Dict[str, List[EnvVar]]:
893
- """Adds environment variables to a container in a deployment.
894
-
895
- Args:
896
- deployment_name: Name of the deployment.
897
- container_name: Name of the container.
898
- env_vars: List of environment variables to add.
899
-
900
- Returns:
901
- Dict[str, List[EnvVar]]: Updated environment variables for all containers.
902
- """
903
- response = self.client.post(
904
- f"{CONTAINER_DEPLOYMENTS_ENDPOINT}/{deployment_name}/environment-variables",
905
- {"container_name": container_name, "env": [
906
- env_var.to_dict() for env_var in env_vars]}
907
- )
908
- result = {}
909
- for item in response.json():
910
- container_name = item["container_name"]
911
- env_vars = item["env"]
912
- result[container_name] = [EnvVar.from_dict(
913
- env_var) for env_var in env_vars]
914
- return result
915
-
916
- def update_deployment_environment_variables(self, deployment_name: str, container_name: str, env_vars: List[EnvVar]) -> Dict[str, List[EnvVar]]:
917
- """Updates environment variables for a container in a deployment.
918
-
919
- Args:
920
- deployment_name: Name of the deployment.
921
- container_name: Name of the container.
922
- env_vars: List of updated environment variables.
923
-
924
- Returns:
925
- Dict[str, List[EnvVar]]: Updated environment variables for all containers.
926
- """
927
- response = self.client.patch(
928
- f"{CONTAINER_DEPLOYMENTS_ENDPOINT}/{deployment_name}/environment-variables",
929
- {"container_name": container_name, "env": [
930
- env_var.to_dict() for env_var in env_vars]}
931
- )
932
- result = {}
933
- item = response.json()
934
- container_name = item["container_name"]
935
- env_vars = item["env"]
936
- result[container_name] = [EnvVar.from_dict(
937
- env_var) for env_var in env_vars]
938
- return result
939
-
940
- def delete_deployment_environment_variables(self, deployment_name: str, container_name: str, env_var_names: List[str]) -> Dict[str, List[EnvVar]]:
941
- """Deletes environment variables from a container in a deployment.
942
-
943
- Args:
944
- deployment_name: Name of the deployment.
945
- container_name: Name of the container.
946
- env_var_names: List of environment variable names to delete.
947
-
948
- Returns:
949
- Dict[str, List[EnvVar]]: Updated environment variables for all containers.
950
- """
951
- response = self.client.delete(
952
- f"{CONTAINER_DEPLOYMENTS_ENDPOINT}/{deployment_name}/environment-variables",
953
- {"container_name": container_name, "env": env_var_names}
954
- )
955
- result = {}
956
- for item in response.json():
957
- container_name = item["container_name"]
958
- env_vars = item["env"]
959
- result[container_name] = [EnvVar.from_dict(
960
- env_var) for env_var in env_vars]
961
- return result
962
-
963
- def get_compute_resources(self, size: int = None, is_available: bool = None) -> List[ComputeResource]:
964
- """Retrieves compute resources, optionally filtered by size and availability.
965
-
966
- Args:
967
- size: Optional size to filter resources by (e.g. 8 for 8x GPUs)
968
- available: Optional boolean to filter by availability status
969
-
970
- Returns:
971
- List[ComputeResource]: List of compute resources matching the filters.
972
- If no filters provided, returns all resources.
973
- """
974
- response = self.client.get(SERVERLESS_COMPUTE_RESOURCES_ENDPOINT)
975
- resources = []
976
- for resource_group in response.json():
977
- for resource in resource_group:
978
- resources.append(ComputeResource.from_dict(resource))
979
- if size:
980
- resources = [r for r in resources if r.size == size]
981
- if is_available:
982
- resources = [
983
- r for r in resources if r.is_available == is_available]
984
- return resources
985
-
986
- # Function alias
987
- get_gpus = get_compute_resources
988
-
989
- def get_secrets(self) -> List[Secret]:
990
- """Retrieves all secrets.
991
-
992
- Returns:
993
- List[Secret]: List of all secrets.
994
- """
995
- response = self.client.get(SECRETS_ENDPOINT)
996
- return [Secret.from_dict(secret) for secret in response.json()]
997
-
998
- def create_secret(self, name: str, value: str) -> None:
999
- """Creates a new secret.
1000
-
1001
- Args:
1002
- name: Name of the secret.
1003
- value: Value of the secret.
1004
- """
1005
- self.client.post(SECRETS_ENDPOINT, {"name": name, "value": value})
1006
-
1007
- def delete_secret(self, secret_name: str, force: bool = False) -> None:
1008
- """Deletes a secret.
1009
-
1010
- Args:
1011
- secret_name: Name of the secret to delete.
1012
- force: Whether to force delete even if secret is in use.
1013
- """
1014
- self.client.delete(
1015
- f"{SECRETS_ENDPOINT}/{secret_name}", params={"force": str(force).lower()})
1016
-
1017
- def get_registry_credentials(self) -> List[RegistryCredential]:
1018
- """Retrieves all registry credentials.
1019
-
1020
- Returns:
1021
- List[RegistryCredential]: List of all registry credentials.
1022
- """
1023
- response = self.client.get(CONTAINER_REGISTRY_CREDENTIALS_ENDPOINT)
1024
- return [RegistryCredential.from_dict(credential) for credential in response.json()]
1025
-
1026
- def add_registry_credentials(self, credentials: BaseRegistryCredentials) -> None:
1027
- """Adds new registry credentials.
1028
-
1029
- Args:
1030
- credentials: Registry credentials to add.
1031
- """
1032
- data = credentials.to_dict()
1033
- self.client.post(CONTAINER_REGISTRY_CREDENTIALS_ENDPOINT, data)
1034
-
1035
- def delete_registry_credentials(self, credentials_name: str) -> None:
1036
- """Deletes registry credentials.
1037
-
1038
- Args:
1039
- credentials_name: Name of the credentials to delete.
1040
- """
1041
- self.client.delete(
1042
- f"{CONTAINER_REGISTRY_CREDENTIALS_ENDPOINT}/{credentials_name}")
1043
-
1044
- def get_fileset_secrets(self) -> List[Secret]:
1045
- """Retrieves all fileset secrets.
1046
-
1047
- Returns:
1048
- List of all fileset secrets.
1049
- """
1050
- response = self.client.get(FILESET_SECRETS_ENDPOINT)
1051
- return [Secret.from_dict(secret) for secret in response.json()]
1052
-
1053
- def delete_fileset_secret(self, secret_name: str) -> None:
1054
- """Deletes a fileset secret.
1055
-
1056
- Args:
1057
- secret_name: Name of the secret to delete.
1058
- """
1059
- self.client.delete(f"{FILESET_SECRETS_ENDPOINT}/{secret_name}")
1060
-
1061
- def create_fileset_secret_from_file_paths(self, secret_name: str, file_paths: List[str]) -> None:
1062
- """Creates a new fileset secret.
1063
- A fileset secret is a secret that contains several files,
1064
- and can be used to mount a directory with the files in a container.
1065
-
1066
- Args:
1067
- secret_name: Name of the secret.
1068
- file_paths: List of file paths to include in the secret.
1069
- """
1070
- processed_files = []
1071
- for file_path in file_paths:
1072
- with open(file_path, "rb") as f:
1073
- base64_content = base64.b64encode(f.read()).decode("utf-8")
1074
- processed_files.append({
1075
- "file_name": os.path.basename(file_path),
1076
- "base64_content": base64_content
1077
- })
1078
- self.client.post(FILESET_SECRETS_ENDPOINT, {
1079
- "name": secret_name,
1080
- "files": processed_files
1081
- })