lightning-sdk 0.2.22__py3-none-any.whl → 0.2.24rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lightning_sdk/__init__.py +1 -1
- lightning_sdk/api/base_studio_api.py +9 -2
- lightning_sdk/api/deployment_api.py +9 -9
- lightning_sdk/api/license_api.py +2 -2
- lightning_sdk/api/llm_api.py +7 -11
- lightning_sdk/api/pipeline_api.py +31 -10
- lightning_sdk/api/studio_api.py +6 -0
- lightning_sdk/base_studio.py +22 -6
- lightning_sdk/cli/entrypoint.py +15 -13
- lightning_sdk/cli/start.py +5 -2
- lightning_sdk/deployment/deployment.py +17 -7
- lightning_sdk/lightning_cloud/openapi/__init__.py +20 -0
- lightning_sdk/lightning_cloud/openapi/api/__init__.py +1 -0
- lightning_sdk/lightning_cloud/openapi/api/assistants_service_api.py +114 -1
- lightning_sdk/lightning_cloud/openapi/api/cloud_space_service_api.py +206 -0
- lightning_sdk/lightning_cloud/openapi/api/cloudy_service_api.py +129 -0
- lightning_sdk/lightning_cloud/openapi/api/cluster_service_api.py +97 -0
- lightning_sdk/lightning_cloud/openapi/api/organizations_service_api.py +105 -0
- lightning_sdk/lightning_cloud/openapi/api/pipelines_service_api.py +118 -1
- lightning_sdk/lightning_cloud/openapi/api/user_service_api.py +105 -0
- lightning_sdk/lightning_cloud/openapi/configuration.py +1 -1
- lightning_sdk/lightning_cloud/openapi/models/__init__.py +19 -0
- lightning_sdk/lightning_cloud/openapi/models/agents_id_body.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/assistant_id_conversations_body.py +81 -3
- lightning_sdk/lightning_cloud/openapi/models/cloudspace_id_visibility_body.py +123 -0
- lightning_sdk/lightning_cloud/openapi/models/create_deployment_request_defines_a_spec_for_the_job_that_allows_for_autoscaling_jobs.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/credits_autoreplenish_body.py +175 -0
- lightning_sdk/lightning_cloud/openapi/models/credits_autoreplenish_body1.py +175 -0
- lightning_sdk/lightning_cloud/openapi/models/externalv1_user_status.py +79 -1
- lightning_sdk/lightning_cloud/openapi/models/metricsstream_create_body.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/orgs_id_body.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/pipelines_id_body.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/pipelines_id_body1.py +123 -0
- lightning_sdk/lightning_cloud/openapi/models/project_id_agents_body.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/project_id_cloudspaces_body.py +53 -1
- lightning_sdk/lightning_cloud/openapi/models/update.py +29 -3
- lightning_sdk/lightning_cloud/openapi/models/v1_assistant.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_check_cluster_name_availability_request.py +123 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_check_cluster_name_availability_response.py +123 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_cloud_provider.py +2 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_cloud_space.py +79 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_cloud_space_environment_config.py +29 -3
- lightning_sdk/lightning_cloud/openapi/models/v1_cloud_space_environment_template_config.py +29 -3
- lightning_sdk/lightning_cloud/openapi/models/v1_cloud_space_environment_type.py +1 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_cloud_space_session.py +29 -3
- lightning_sdk/lightning_cloud/openapi/models/v1_cloud_space_specialized_view.py +104 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_cloudy_expert.py +279 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_cluster_accelerator.py +79 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_cluster_capacity_reservation.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_cluster_security_options.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_cluster_spec.py +105 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_cluster_status.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_conversation_response_chunk.py +29 -3
- lightning_sdk/lightning_cloud/openapi/models/v1_create_cloud_space_environment_template_request.py +29 -3
- lightning_sdk/lightning_cloud/openapi/models/v1_create_deployment_request.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_create_organization_request.py +79 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_deployment_status.py +47 -21
- lightning_sdk/lightning_cloud/openapi/models/v1_external_cluster.py +253 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_external_cluster_spec.py +853 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_get_job_stats_response.py +53 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_get_user_response.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_instance_overprovisioning_spec.py +29 -27
- lightning_sdk/lightning_cloud/openapi/models/v1_kubernetes_direct_v1.py +123 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_kubernetes_direct_v1_status.py +149 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_lightning_run.py +53 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_list_cloudy_experts_response.py +123 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_list_clusters_response.py +6 -6
- lightning_sdk/lightning_cloud/openapi/models/v1_list_project_clusters_response.py +6 -6
- lightning_sdk/lightning_cloud/openapi/models/v1_lite_published_cloud_space_response.py +513 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_login_request.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_magic_link_login_request.py +29 -3
- lightning_sdk/lightning_cloud/openapi/models/v1_magic_link_login_response.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_metrics_stream.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_organization.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_pipeline.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_shared_filesystem.py +131 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_token_usage.py +175 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_update_cloud_space_visibility_response.py +97 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_update_organization_credits_auto_replenish_response.py +97 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_update_user_credits_auto_replenish_response.py +97 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_update_user_request.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_user_features.py +234 -104
- lightning_sdk/lightning_cloud/openapi/models/v1_volume.py +78 -104
- lightning_sdk/lightning_cloud/openapi/models/v1_volume_state.py +104 -0
- lightning_sdk/llm/llm.py +113 -115
- lightning_sdk/llm/public_assistants.json +8 -0
- lightning_sdk/pipeline/__init__.py +11 -2
- lightning_sdk/pipeline/pipeline.py +54 -14
- lightning_sdk/pipeline/printer.py +36 -16
- lightning_sdk/pipeline/schedule.py +2 -1
- lightning_sdk/pipeline/{types.py → steps.py} +77 -56
- lightning_sdk/pipeline/utils.py +65 -3
- lightning_sdk/sandbox.py +157 -0
- lightning_sdk/services/license.py +12 -6
- lightning_sdk/studio.py +10 -1
- {lightning_sdk-0.2.22.dist-info → lightning_sdk-0.2.24rc0.dist-info}/METADATA +1 -1
- {lightning_sdk-0.2.22.dist-info → lightning_sdk-0.2.24rc0.dist-info}/RECORD +101 -79
- {lightning_sdk-0.2.22.dist-info → lightning_sdk-0.2.24rc0.dist-info}/LICENSE +0 -0
- {lightning_sdk-0.2.22.dist-info → lightning_sdk-0.2.24rc0.dist-info}/WHEEL +0 -0
- {lightning_sdk-0.2.22.dist-info → lightning_sdk-0.2.24rc0.dist-info}/entry_points.txt +0 -0
- {lightning_sdk-0.2.22.dist-info → lightning_sdk-0.2.24rc0.dist-info}/top_level.txt +0 -0
lightning_sdk/llm/llm.py
CHANGED
|
@@ -1,19 +1,37 @@
|
|
|
1
|
+
import json
|
|
1
2
|
import os
|
|
2
|
-
import
|
|
3
|
-
from typing import AsyncGenerator, Dict, Generator, List, Optional, Set, Tuple, Union
|
|
3
|
+
from typing import AsyncGenerator, ClassVar, Dict, Generator, List, Optional, Tuple, Union
|
|
4
4
|
|
|
5
5
|
from lightning_sdk.api.llm_api import LLMApi
|
|
6
|
-
from lightning_sdk.cli.teamspace_menu import _TeamspacesMenu
|
|
7
|
-
from lightning_sdk.lightning_cloud.openapi import V1Assistant
|
|
8
6
|
from lightning_sdk.lightning_cloud.openapi.models.v1_conversation_response_chunk import V1ConversationResponseChunk
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
7
|
+
|
|
8
|
+
PUBLIC_MODEL_PROVIDERS: Dict[str, str] = {
|
|
9
|
+
"openai": "OpenAI",
|
|
10
|
+
"anthropic": "Anthropic",
|
|
11
|
+
"google": "Google",
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _load_public_assistants() -> Dict[str, str]:
|
|
16
|
+
"""Load public assistants from a JSON file."""
|
|
17
|
+
try:
|
|
18
|
+
json_path = os.path.join(os.path.dirname(__file__), "public_assistants.json")
|
|
19
|
+
with open(json_path) as f:
|
|
20
|
+
return json.load(f)
|
|
21
|
+
except Exception as e:
|
|
22
|
+
print(f"[warning] Failed to load public_assistants.json: {e}")
|
|
23
|
+
return {}
|
|
14
24
|
|
|
15
25
|
|
|
16
26
|
class LLM:
|
|
27
|
+
_auth_info_cached: ClassVar[bool] = False
|
|
28
|
+
_cached_auth_info: ClassVar[Dict[str, Optional[str]]] = {}
|
|
29
|
+
_llm_api_cache: ClassVar[Dict[Optional[str], LLMApi]] = {}
|
|
30
|
+
_public_assistants: ClassVar[Optional[Dict[str, str]]] = None
|
|
31
|
+
|
|
32
|
+
def __new__(cls, name: str, teamspace: Optional[str] = None, enable_async: Optional[bool] = False) -> "LLM":
|
|
33
|
+
return super().__new__(cls)
|
|
34
|
+
|
|
17
35
|
def __init__(
|
|
18
36
|
self,
|
|
19
37
|
name: str,
|
|
@@ -36,67 +54,18 @@ class LLM:
|
|
|
36
54
|
Raises:
|
|
37
55
|
ValueError: If teamspace information cannot be resolved.
|
|
38
56
|
"""
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
possible_teamspaces = menu._get_possible_teamspaces(user)
|
|
42
|
-
if teamspace is None:
|
|
43
|
-
# get current teamspace
|
|
44
|
-
self._teamspace = _resolve_teamspace(teamspace=None, org=None, user=None)
|
|
45
|
-
else:
|
|
46
|
-
self._teamspace = Teamspace(**menu._get_teamspace_from_name(teamspace, possible_teamspaces))
|
|
47
|
-
|
|
48
|
-
if self._teamspace is None:
|
|
49
|
-
# select the first available teamspace
|
|
50
|
-
first_teamspace = next(iter(possible_teamspaces.values()), None)
|
|
51
|
-
|
|
52
|
-
if first_teamspace:
|
|
53
|
-
self._teamspace = Teamspace(
|
|
54
|
-
name=first_teamspace["name"],
|
|
55
|
-
org=first_teamspace["org"],
|
|
56
|
-
user=first_teamspace["user"],
|
|
57
|
-
)
|
|
58
|
-
warnings.warn(
|
|
59
|
-
f"No teamspace given. Using teamspace: {self._teamspace.name}.",
|
|
60
|
-
UserWarning,
|
|
61
|
-
stacklevel=2,
|
|
62
|
-
)
|
|
63
|
-
|
|
64
|
-
if self._teamspace is None:
|
|
65
|
-
raise ValueError("Teamspace is required for billing but could not be resolved. ")
|
|
66
|
-
|
|
67
|
-
self._user = user
|
|
57
|
+
# TODO support user input teamspace
|
|
58
|
+
self._get_auth_info()
|
|
68
59
|
|
|
69
60
|
self._model_provider, self._model_name = self._parse_model_name(name)
|
|
70
|
-
|
|
71
|
-
self._llm_api = LLMApi()
|
|
72
61
|
self._enable_async = enable_async
|
|
73
62
|
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
63
|
+
# Reuse LLMApi per teamspace (as billing is based on teamspace)
|
|
64
|
+
if teamspace not in LLM._llm_api_cache:
|
|
65
|
+
LLM._llm_api_cache[teamspace] = LLMApi()
|
|
66
|
+
self._llm_api = LLM._llm_api_cache[teamspace]
|
|
77
67
|
|
|
78
|
-
|
|
79
|
-
# check if user has access to the org
|
|
80
|
-
self._org_models = self._build_model_lookup(self._get_org_models())
|
|
81
|
-
except ApiException:
|
|
82
|
-
warnings.warn(
|
|
83
|
-
f"User is not authenticated to access the model in organization: '{self._model_provider}'.\n"
|
|
84
|
-
" Proceeding with appropriate org models, user models, or public models.",
|
|
85
|
-
UserWarning,
|
|
86
|
-
stacklevel=2,
|
|
87
|
-
)
|
|
88
|
-
self._model_provider = None
|
|
89
|
-
raise
|
|
90
|
-
except ApiException:
|
|
91
|
-
if isinstance(self._teamspace.owner, Organization):
|
|
92
|
-
self._org = self._teamspace.owner
|
|
93
|
-
else:
|
|
94
|
-
self._org = None
|
|
95
|
-
self._org_models = self._build_model_lookup(self._get_org_models())
|
|
96
|
-
|
|
97
|
-
self._public_models = self._build_model_lookup(self._get_public_models())
|
|
98
|
-
self._user_models = self._build_model_lookup(self._get_user_models())
|
|
99
|
-
self._model = self._get_model()
|
|
68
|
+
self._model_id = self._get_model_id()
|
|
100
69
|
self._conversations = {}
|
|
101
70
|
|
|
102
71
|
@property
|
|
@@ -107,9 +76,33 @@ class LLM:
|
|
|
107
76
|
def provider(self) -> str:
|
|
108
77
|
return self._model_provider
|
|
109
78
|
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
79
|
+
def _get_auth_info(self) -> None:
|
|
80
|
+
if not LLM._auth_info_cached:
|
|
81
|
+
teamspace_name = os.environ.get("LIGHTNING_TEAMSPACE", None)
|
|
82
|
+
if teamspace_name is None:
|
|
83
|
+
raise ValueError(
|
|
84
|
+
"Teamspace name must be provided either through "
|
|
85
|
+
"the environment variable LIGHTNING_TEAMSPACE or as an argument."
|
|
86
|
+
)
|
|
87
|
+
LLM._cached_auth_info = {
|
|
88
|
+
"teamspace_name": teamspace_name,
|
|
89
|
+
"teamspace_id": os.environ.get("LIGHTNING_CLOUD_PROJECT_ID", None),
|
|
90
|
+
"user_name": os.environ.get("LIGHTNING_USERNAME", ""),
|
|
91
|
+
"user_id": os.environ.get("LIGHTNING_USER_ID", None),
|
|
92
|
+
"org_name": os.environ.get("LIGHTNING_ORG", ""),
|
|
93
|
+
"cloud_url": os.environ.get("LIGHTNING_CLOUD_URL", None),
|
|
94
|
+
}
|
|
95
|
+
LLM._auth_info_cached = True
|
|
96
|
+
if LLM._public_assistants is None:
|
|
97
|
+
LLM._public_assistants = _load_public_assistants()
|
|
98
|
+
# Always assign to the current instance
|
|
99
|
+
self._teamspace_name = LLM._cached_auth_info["teamspace_name"]
|
|
100
|
+
self._teamspace_id = LLM._cached_auth_info["teamspace_id"]
|
|
101
|
+
self._user_name = LLM._cached_auth_info["user_name"]
|
|
102
|
+
self._user_id = LLM._cached_auth_info["user_id"]
|
|
103
|
+
self._org_name = LLM._cached_auth_info["org_name"]
|
|
104
|
+
self._cloud_url = LLM._cached_auth_info["cloud_url"]
|
|
105
|
+
self._org = None
|
|
113
106
|
|
|
114
107
|
def _parse_model_name(self, name: str) -> Tuple[str, str]:
|
|
115
108
|
parts = name.split("/")
|
|
@@ -117,50 +110,60 @@ class LLM:
|
|
|
117
110
|
# a user model or a org model
|
|
118
111
|
return None, parts[0]
|
|
119
112
|
if len(parts) == 2:
|
|
120
|
-
return parts[0], parts[1]
|
|
113
|
+
return parts[0].lower(), parts[1]
|
|
121
114
|
raise ValueError(
|
|
122
115
|
f"Model name must be in the format `organization/model_name` or `model_name`, but got '{name}'."
|
|
123
116
|
)
|
|
124
117
|
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
return self._user_models.get(self._model_name)[0]
|
|
148
|
-
|
|
149
|
-
available_models = []
|
|
150
|
-
if self._public_models:
|
|
151
|
-
available_models.append(f"Public Models: {', '.join(self._public_models.keys())}")
|
|
152
|
-
|
|
153
|
-
if self._org and self._org_models:
|
|
154
|
-
available_models.append(f"Org ({self._org.name}) Models: {', '.join(self._org_models.keys())}")
|
|
118
|
+
# returns the assistant ID
|
|
119
|
+
def _get_model_id(self) -> str:
|
|
120
|
+
if self._model_provider in PUBLIC_MODEL_PROVIDERS:
|
|
121
|
+
# if prod
|
|
122
|
+
if (
|
|
123
|
+
self._cloud_url == "https://lightning.ai"
|
|
124
|
+
and LLM._public_assistants
|
|
125
|
+
and f"{self._model_provider}/{self._model_name}" in LLM._public_assistants
|
|
126
|
+
):
|
|
127
|
+
return LLM._public_assistants[f"{self._model_provider}/{self._model_name}"]
|
|
128
|
+
try:
|
|
129
|
+
return self._llm_api.get_assistant(
|
|
130
|
+
model_provider=PUBLIC_MODEL_PROVIDERS[self._model_provider],
|
|
131
|
+
model_name=self._model_name,
|
|
132
|
+
user_name="",
|
|
133
|
+
org_name="",
|
|
134
|
+
)
|
|
135
|
+
except Exception as e:
|
|
136
|
+
raise ValueError(
|
|
137
|
+
f"Public model '{self._model_provider}/{self._model_name}' not found. "
|
|
138
|
+
"Please check the model name or provider."
|
|
139
|
+
) from e
|
|
155
140
|
|
|
156
|
-
|
|
157
|
-
|
|
141
|
+
# Try organization model
|
|
142
|
+
try:
|
|
143
|
+
return self._llm_api.get_assistant(
|
|
144
|
+
model_provider="",
|
|
145
|
+
model_name=self._model_name,
|
|
146
|
+
user_name="",
|
|
147
|
+
org_name=self._model_provider,
|
|
148
|
+
)
|
|
149
|
+
except Exception:
|
|
150
|
+
pass
|
|
158
151
|
|
|
159
|
-
|
|
160
|
-
|
|
152
|
+
# Try user model
|
|
153
|
+
try:
|
|
154
|
+
return self._llm_api.get_assistant(
|
|
155
|
+
model_provider="",
|
|
156
|
+
model_name=self._model_name,
|
|
157
|
+
user_name=self._model_provider,
|
|
158
|
+
org_name="",
|
|
159
|
+
)
|
|
160
|
+
except Exception as user_error:
|
|
161
|
+
raise ValueError(
|
|
162
|
+
f"Model '{self._model_provider}/{self._model_name}' not found as either an org or user model.\n"
|
|
163
|
+
) from user_error
|
|
161
164
|
|
|
162
165
|
def _get_conversations(self) -> None:
|
|
163
|
-
conversations = self._llm_api.list_conversations(assistant_id=self.
|
|
166
|
+
conversations = self._llm_api.list_conversations(assistant_id=self._model_id)
|
|
164
167
|
for conversation in conversations:
|
|
165
168
|
if conversation.name and conversation.name not in self._conversations:
|
|
166
169
|
self._conversations[conversation.name] = conversation.id
|
|
@@ -191,7 +194,6 @@ class LLM:
|
|
|
191
194
|
conversation: Optional[str] = None,
|
|
192
195
|
metadata: Optional[Dict[str, str]] = None,
|
|
193
196
|
stream: bool = False,
|
|
194
|
-
upload_local_images: bool = False,
|
|
195
197
|
) -> Union[str, AsyncGenerator[str, None]]:
|
|
196
198
|
conversation_id = self._conversations.get(conversation) if conversation else None
|
|
197
199
|
output = await self._llm_api.async_start_conversation(
|
|
@@ -199,9 +201,9 @@ class LLM:
|
|
|
199
201
|
system_prompt=system_prompt,
|
|
200
202
|
max_completion_tokens=max_completion_tokens,
|
|
201
203
|
images=images,
|
|
202
|
-
assistant_id=self.
|
|
204
|
+
assistant_id=self._model_id,
|
|
203
205
|
conversation_id=conversation_id,
|
|
204
|
-
billing_project_id=self.
|
|
206
|
+
billing_project_id=self._teamspace_id,
|
|
205
207
|
metadata=metadata,
|
|
206
208
|
name=conversation,
|
|
207
209
|
stream=stream,
|
|
@@ -221,7 +223,6 @@ class LLM:
|
|
|
221
223
|
conversation: Optional[str] = None,
|
|
222
224
|
metadata: Optional[Dict[str, str]] = None,
|
|
223
225
|
stream: bool = False,
|
|
224
|
-
upload_local_images: bool = False,
|
|
225
226
|
) -> Union[str, Generator[str, None, None]]:
|
|
226
227
|
if conversation and conversation not in self._conversations:
|
|
227
228
|
self._get_conversations()
|
|
@@ -232,8 +233,6 @@ class LLM:
|
|
|
232
233
|
for image in images:
|
|
233
234
|
if not isinstance(image, str):
|
|
234
235
|
raise NotImplementedError(f"Image type {type(image)} are not supported yet.")
|
|
235
|
-
if not image.startswith("http") and upload_local_images:
|
|
236
|
-
self._teamspace.upload_file(file_path=image, remote_path=f"images/{os.path.basename(image)}")
|
|
237
236
|
|
|
238
237
|
conversation_id = self._conversations.get(conversation) if conversation else None
|
|
239
238
|
|
|
@@ -246,7 +245,6 @@ class LLM:
|
|
|
246
245
|
conversation,
|
|
247
246
|
metadata,
|
|
248
247
|
stream,
|
|
249
|
-
upload_local_images,
|
|
250
248
|
)
|
|
251
249
|
|
|
252
250
|
output = self._llm_api.start_conversation(
|
|
@@ -254,9 +252,9 @@ class LLM:
|
|
|
254
252
|
system_prompt=system_prompt,
|
|
255
253
|
max_completion_tokens=max_completion_tokens,
|
|
256
254
|
images=images,
|
|
257
|
-
assistant_id=self.
|
|
255
|
+
assistant_id=self._model_id,
|
|
258
256
|
conversation_id=conversation_id,
|
|
259
|
-
billing_project_id=self.
|
|
257
|
+
billing_project_id=self._teamspace_id,
|
|
260
258
|
metadata=metadata,
|
|
261
259
|
name=conversation,
|
|
262
260
|
stream=stream,
|
|
@@ -272,7 +270,7 @@ class LLM:
|
|
|
272
270
|
return list(self._conversations.keys())
|
|
273
271
|
|
|
274
272
|
def _get_conversation_messages(self, conversation_id: str) -> Optional[str]:
|
|
275
|
-
return self._llm_api.get_conversation(assistant_id=self.
|
|
273
|
+
return self._llm_api.get_conversation(assistant_id=self._model_id, conversation_id=conversation_id)
|
|
276
274
|
|
|
277
275
|
def get_history(self, conversation: str) -> Optional[List[Dict]]:
|
|
278
276
|
if conversation not in self._conversations:
|
|
@@ -297,7 +295,7 @@ class LLM:
|
|
|
297
295
|
self._get_conversations()
|
|
298
296
|
if conversation in self._conversations:
|
|
299
297
|
self._llm_api.reset_conversation(
|
|
300
|
-
assistant_id=self.
|
|
298
|
+
assistant_id=self._model_id,
|
|
301
299
|
conversation_id=self._conversations[conversation],
|
|
302
300
|
)
|
|
303
301
|
del self._conversations[conversation]
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
{
|
|
2
|
+
"openai/gpt-4o": "ast_01jdjds71fs8gt47jexzed4czs",
|
|
3
|
+
"openai/gpt-4": "ast_01jd38ze6tjbrcd4942nhz41zn",
|
|
4
|
+
"openai/o3-mini": "ast_01jz3t13fhnjhh11t1k8b5gyp1",
|
|
5
|
+
"anthropic/claude-3-5-sonnet-20240620": "ast_01jd3923a6p98rqwh3dpj686pq",
|
|
6
|
+
"google/gemini-2.5-pro": "ast_01jz3tdb1fhey798k95pv61v57",
|
|
7
|
+
"google/gemini-2.5-flash": "ast_01jz3thxskg4fcdk4xhkjkym5a"
|
|
8
|
+
}
|
|
@@ -1,5 +1,14 @@
|
|
|
1
|
+
from lightning_sdk import Studio
|
|
1
2
|
from lightning_sdk.pipeline.pipeline import Pipeline
|
|
2
3
|
from lightning_sdk.pipeline.schedule import Schedule
|
|
3
|
-
from lightning_sdk.pipeline.
|
|
4
|
+
from lightning_sdk.pipeline.steps import DeploymentReleaseStep, DeploymentStep, JobStep, MMTStep
|
|
4
5
|
|
|
5
|
-
__all__ = [
|
|
6
|
+
__all__ = [
|
|
7
|
+
"Pipeline",
|
|
8
|
+
"JobStep",
|
|
9
|
+
"MMTStep",
|
|
10
|
+
"DeploymentStep",
|
|
11
|
+
"Schedule",
|
|
12
|
+
"Studio",
|
|
13
|
+
"DeploymentReleaseStep",
|
|
14
|
+
]
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import os
|
|
1
2
|
from typing import TYPE_CHECKING, List, Optional, Union
|
|
2
3
|
|
|
3
4
|
from lightning_sdk.api import UserApi
|
|
@@ -5,9 +6,10 @@ from lightning_sdk.api.pipeline_api import PipelineApi
|
|
|
5
6
|
from lightning_sdk.lightning_cloud.login import Auth
|
|
6
7
|
from lightning_sdk.organization import Organization
|
|
7
8
|
from lightning_sdk.pipeline.printer import PipelinePrinter
|
|
8
|
-
from lightning_sdk.pipeline.
|
|
9
|
+
from lightning_sdk.pipeline.steps import DeploymentStep, JobStep, MMTStep, _get_studio
|
|
9
10
|
from lightning_sdk.pipeline.utils import prepare_steps
|
|
10
11
|
from lightning_sdk.services.utilities import _get_cluster
|
|
12
|
+
from lightning_sdk.studio import Studio
|
|
11
13
|
from lightning_sdk.teamspace import Teamspace
|
|
12
14
|
from lightning_sdk.user import User
|
|
13
15
|
from lightning_sdk.utils.resolve import _resolve_org, _resolve_teamspace, _resolve_user
|
|
@@ -25,6 +27,7 @@ class Pipeline:
|
|
|
25
27
|
user: Union[str, "User", None] = None,
|
|
26
28
|
cloud_account: Optional[str] = None,
|
|
27
29
|
shared_filesystem: Optional[bool] = None,
|
|
30
|
+
studio: Optional[Union[Studio, str]] = None,
|
|
28
31
|
) -> None:
|
|
29
32
|
"""The Lightning Pipeline can be used to create complex DAG.
|
|
30
33
|
|
|
@@ -58,10 +61,12 @@ class Pipeline:
|
|
|
58
61
|
)
|
|
59
62
|
|
|
60
63
|
self._pipeline_api = PipelineApi()
|
|
61
|
-
self._cloud_account =
|
|
64
|
+
self._cloud_account = cloud_account
|
|
65
|
+
self._default_cluster = _get_cluster(
|
|
62
66
|
client=self._pipeline_api._client, project_id=self._teamspace.id, cluster_id=cloud_account
|
|
63
67
|
)
|
|
64
|
-
self._shared_filesystem = shared_filesystem
|
|
68
|
+
self._shared_filesystem = shared_filesystem if shared_filesystem is not None else True
|
|
69
|
+
self._studio = _get_studio(studio)
|
|
65
70
|
self._is_created = False
|
|
66
71
|
|
|
67
72
|
pipeline = None
|
|
@@ -75,35 +80,66 @@ class Pipeline:
|
|
|
75
80
|
else:
|
|
76
81
|
self._pipeline = None
|
|
77
82
|
|
|
78
|
-
def run(
|
|
83
|
+
def run(
|
|
84
|
+
self, steps: List[Union[JobStep, DeploymentStep, MMTStep]], schedules: Optional[List["Schedule"]] = None
|
|
85
|
+
) -> None:
|
|
79
86
|
if len(steps) == 0:
|
|
80
87
|
raise ValueError("The provided steps is empty")
|
|
81
88
|
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
89
|
+
provided_cloud_account = None
|
|
90
|
+
if self._cloud_account:
|
|
91
|
+
provided_cloud_account = self._cloud_account
|
|
92
|
+
elif self._default_cluster:
|
|
93
|
+
provided_cloud_account = self._default_cluster.cluster_id
|
|
94
|
+
|
|
95
|
+
for step_idx, pipeline_step in enumerate(steps):
|
|
96
|
+
if pipeline_step.name in [None, ""]:
|
|
97
|
+
pipeline_step.name = f"step-{step_idx}"
|
|
98
|
+
|
|
99
|
+
if (
|
|
100
|
+
self._studio is not None
|
|
101
|
+
and (pipeline_step.image == "" or pipeline_step.image is None)
|
|
102
|
+
and pipeline_step.studio is None
|
|
103
|
+
):
|
|
104
|
+
pipeline_step.cloud_account = self._studio.cloud_account
|
|
105
|
+
pipeline_step.studio = self._studio
|
|
106
|
+
|
|
107
|
+
if not pipeline_step.cloud_account and isinstance(provided_cloud_account, str):
|
|
108
|
+
pipeline_step.cloud_account = provided_cloud_account
|
|
109
|
+
|
|
110
|
+
cluster_ids = set(step.cloud_account for step in steps if step.cloud_account not in ["", None]) # noqa: C401
|
|
111
|
+
|
|
112
|
+
cloud_account = (
|
|
113
|
+
list(cluster_ids)[0] if len(cluster_ids) == 1 and self._cloud_account is None else "" # noqa: RUF015
|
|
114
|
+
)
|
|
85
115
|
|
|
86
|
-
steps = [
|
|
87
|
-
step.to_proto(self._teamspace, self._cloud_account.cluster_id or "", self._shared_filesystem)
|
|
88
|
-
for step in steps
|
|
89
|
-
]
|
|
116
|
+
steps = [step.to_proto(self._teamspace, cloud_account, self._shared_filesystem) for step in steps]
|
|
90
117
|
|
|
91
118
|
proto_steps = prepare_steps(steps)
|
|
92
119
|
schedules = schedules or []
|
|
93
120
|
|
|
121
|
+
for schedule_idx, schedule in enumerate(schedules):
|
|
122
|
+
if schedule.name is None:
|
|
123
|
+
schedule.name = f"schedule-{schedule_idx}"
|
|
124
|
+
|
|
94
125
|
parent_pipeline_id = None if self._pipeline is None else self._pipeline.id
|
|
95
126
|
|
|
96
127
|
self._pipeline = self._pipeline_api.create_pipeline(
|
|
97
128
|
self._name,
|
|
98
|
-
self._teamspace
|
|
129
|
+
self._teamspace,
|
|
99
130
|
proto_steps,
|
|
100
|
-
self._shared_filesystem
|
|
131
|
+
self._shared_filesystem,
|
|
101
132
|
schedules,
|
|
102
133
|
parent_pipeline_id,
|
|
103
134
|
)
|
|
104
135
|
|
|
105
136
|
printer = PipelinePrinter(
|
|
106
|
-
self._name,
|
|
137
|
+
self._name,
|
|
138
|
+
parent_pipeline_id is None,
|
|
139
|
+
self._pipeline,
|
|
140
|
+
self._teamspace,
|
|
141
|
+
proto_steps,
|
|
142
|
+
schedules,
|
|
107
143
|
)
|
|
108
144
|
printer.print_summary()
|
|
109
145
|
|
|
@@ -124,3 +160,7 @@ class Pipeline:
|
|
|
124
160
|
if self._pipeline:
|
|
125
161
|
return self._pipeline.name
|
|
126
162
|
return None
|
|
163
|
+
|
|
164
|
+
@classmethod
|
|
165
|
+
def from_env(cls) -> "Pipeline":
|
|
166
|
+
return Pipeline(name=os.getenv("LIGHTNING_PIPELINE_ID", ""))
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
import os
|
|
2
2
|
from typing import Any, ClassVar, Dict, List
|
|
3
3
|
|
|
4
|
-
from lightning_sdk.lightning_cloud.openapi.models import
|
|
4
|
+
from lightning_sdk.lightning_cloud.openapi.models import V1Pipeline, V1PipelineStepType
|
|
5
|
+
from lightning_sdk.pipeline.utils import _get_spec
|
|
5
6
|
|
|
6
7
|
|
|
7
8
|
class PipelinePrinter:
|
|
@@ -14,14 +15,26 @@ class PipelinePrinter:
|
|
|
14
15
|
}
|
|
15
16
|
|
|
16
17
|
def __init__(
|
|
17
|
-
self,
|
|
18
|
+
self,
|
|
19
|
+
name: str,
|
|
20
|
+
initial: bool,
|
|
21
|
+
pipeline: V1Pipeline,
|
|
22
|
+
teamspace: Any,
|
|
23
|
+
proto_steps: List[Any],
|
|
24
|
+
schedules: List[Any],
|
|
18
25
|
) -> None:
|
|
19
26
|
self._name = name
|
|
20
27
|
self._initial = initial
|
|
21
28
|
self._pipeline = pipeline
|
|
22
29
|
self._teamspace = teamspace
|
|
23
30
|
self._proto_steps = proto_steps
|
|
31
|
+
self._shared_filesystem = pipeline.shared_filesystem
|
|
24
32
|
self._schedules = schedules
|
|
33
|
+
cluster_ids: set[str] = set()
|
|
34
|
+
for step in self._proto_steps:
|
|
35
|
+
job_spec = _get_spec(step)
|
|
36
|
+
cluster_ids.add(job_spec.cluster_id)
|
|
37
|
+
self._cluster_ids = cluster_ids
|
|
25
38
|
|
|
26
39
|
def print_summary(self) -> None:
|
|
27
40
|
"""Prints the full, formatted summary of the created pipeline."""
|
|
@@ -32,6 +45,7 @@ class PipelinePrinter:
|
|
|
32
45
|
self._print_steps()
|
|
33
46
|
self._print_schedules()
|
|
34
47
|
self._print_cloud_account()
|
|
48
|
+
self._print_shared_filesystem()
|
|
35
49
|
self._print_footer()
|
|
36
50
|
|
|
37
51
|
def _print(self, value: str) -> None:
|
|
@@ -75,7 +89,7 @@ class PipelinePrinter:
|
|
|
75
89
|
team: str = self._teamspace.name
|
|
76
90
|
pipeline_name: str = self._name
|
|
77
91
|
|
|
78
|
-
pipeline_url = f"{cloud_url}/{owner}/{team}/pipelines/{pipeline_name}?app_id=pipeline
|
|
92
|
+
pipeline_url = f"{cloud_url}/{owner}/{team}/pipelines/{pipeline_name}?app_id=pipeline"
|
|
79
93
|
|
|
80
94
|
self._print("\n" + "─" * 60)
|
|
81
95
|
self._print(f"🔗 View your pipeline in the browser:\n {pipeline_url}")
|
|
@@ -85,18 +99,24 @@ class PipelinePrinter:
|
|
|
85
99
|
if not self._proto_steps:
|
|
86
100
|
return
|
|
87
101
|
|
|
88
|
-
|
|
89
|
-
for
|
|
90
|
-
job_spec = self._get_spec(step)
|
|
91
|
-
cluster_ids.add(job_spec.cluster_id)
|
|
92
|
-
|
|
93
|
-
self._print(f"\nCloud account{'s' if len(cluster_ids) > 1 else ''}:")
|
|
94
|
-
for cluster_id in sorted(cluster_ids):
|
|
102
|
+
self._print(f"\nCloud account{'s' if len(self._cluster_ids) > 1 else ''}:")
|
|
103
|
+
for cluster_id in sorted(self._cluster_ids):
|
|
95
104
|
self._print(f" - {cluster_id}")
|
|
96
105
|
|
|
97
|
-
def
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
if
|
|
101
|
-
|
|
102
|
-
|
|
106
|
+
def _print_shared_filesystem(self) -> None:
|
|
107
|
+
self._print(f"\nShared filesystem: {self._shared_filesystem.enabled}")
|
|
108
|
+
|
|
109
|
+
if self._shared_filesystem.enabled and len(self._cluster_ids) == 1:
|
|
110
|
+
shared_path = ""
|
|
111
|
+
cluster_id = list(self._cluster_ids)[0] # noqa: RUF015
|
|
112
|
+
if self._pipeline.shared_filesystem.s3_folder:
|
|
113
|
+
shared_path = f"/teamspace/s3_folders/pipelines-{cluster_id}"
|
|
114
|
+
if self._pipeline.shared_filesystem.gcs_folder:
|
|
115
|
+
shared_path = f"/teamspace/gcs_folders/pipelines-{cluster_id}"
|
|
116
|
+
if self._pipeline.shared_filesystem.efs:
|
|
117
|
+
shared_path = f"/teamspace/efs_connections/pipelines-{cluster_id}"
|
|
118
|
+
if self._pipeline.shared_filesystem.filestore:
|
|
119
|
+
shared_path = f"/teamspace/gcs_connections/pipelines-{cluster_id}"
|
|
120
|
+
|
|
121
|
+
if shared_path:
|
|
122
|
+
self._print(f" - {shared_path}")
|