thestage 0.5.36__py3-none-any.whl → 0.5.38__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
thestage/.env CHANGED
@@ -1,5 +1,6 @@
1
1
  THESTAGE_CONFIG_DIR=.thestage
2
2
  THESTAGE_CONFIG_FILE=config.json
3
- THESTAGE_API_URL=https://backend-staging3.thestage.ai
4
- THESTAGE_API_URL=http://localhost:8100
5
- LOG_FILE=thestage.log
3
+ THESTAGE_CLI_ENV=DEV
4
+ THESTAGE_API_URL=https://backend-staging.thestage.ai
5
+ THESTAGE_API_URL=https://backend.thestage.ai
6
+ LOG_FILE=thestage.log
thestage/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  from . import *
2
2
  __app_name__ = "thestage"
3
- __version__ = "0.5.36"
3
+ __version__ = "0.5.38"
@@ -1,3 +1,4 @@
1
+ import time
1
2
  from typing import Optional, List
2
3
 
3
4
  import re
@@ -723,4 +724,138 @@ def inference_simulator_logs(
723
724
  logging_service.print_last_inference_simulator_logs(config=config, inference_simulator_id=inference_simulator_id, logs_number=logs_number)
724
725
 
725
726
  app_logger.info(f'Inference simulator logs - end')
726
- raise typer.Exit(0)
727
+ raise typer.Exit(0)
728
+
729
+
730
+ @inference_simulator_model_app.command("deploy-instance", no_args_is_help=True, help=__("Deploy an inference simulator model to an instance"))
731
+ def deploy_inference_simulator_model_to_instance(
732
+ unique_id: Optional[str] = typer.Argument(help=__("Inference simulator model unique ID"), ),
733
+ rented_instance_unique_id: Optional[str] = typer.Option(
734
+ None,
735
+ '--rented-instance-unique-id',
736
+ '-ruid',
737
+ help=__("The rented instance unique ID on which the inference simulator model will be deployed"),
738
+ is_eager=False,
739
+ ),
740
+ self_hosted_instance_unique_id: Optional[str] = typer.Option(
741
+ None,
742
+ '--self-hosted-instance-unique-id',
743
+ '-suid',
744
+ help=__("The self-hosted instance unique ID on which the inference simulator model will be deployed"),
745
+ is_eager=False,
746
+ ),
747
+ working_directory: Optional[str] = typer.Option(
748
+ None,
749
+ "--working-directory",
750
+ "-wd",
751
+ help=__("Full path to working directory. By default, the current directory is used"),
752
+ show_default=False,
753
+ is_eager=False,
754
+ ),
755
+ enable_log_stream: Optional[bool] = typer.Option(
756
+ True,
757
+ " /--no-logs",
758
+ " /-nl",
759
+ help=__("Disable real-time log streaming"),
760
+ is_eager=False,
761
+ ),
762
+ ):
763
+ """
764
+ Deploy an inference simulator model to an instance
765
+ """
766
+
767
+ if unique_id and not re.match(r"^[a-zA-Z0-9-]+$", unique_id):
768
+ raise typer.BadParameter(__("Invalid UID format. The UID can only contain letters, numbers, and hyphens."))
769
+
770
+ unique_id_with_timestamp = f"{unique_id}-{int(time.time())}"
771
+
772
+ app_logger.info(f'Deploying an inference simulator model')
773
+
774
+ service_factory = validate_config_and_get_service_factory(working_directory=working_directory)
775
+ config = service_factory.get_config_provider().get_full_config()
776
+
777
+ project_service = service_factory.get_project_service()
778
+
779
+ project_service.project_deploy_inference_simulator_model_to_instance(
780
+ config=config,
781
+ unique_id=unique_id,
782
+ unique_id_with_timestamp=unique_id_with_timestamp,
783
+ rented_instance_unique_id=rented_instance_unique_id,
784
+ self_hosted_instance_unique_id=self_hosted_instance_unique_id,
785
+ )
786
+
787
+ if enable_log_stream:
788
+ logging_service: LoggingService = service_factory.get_logging_service()
789
+
790
+ logging_service.stream_inference_simulator_logs_with_controls(
791
+ config=config,
792
+ slug=unique_id_with_timestamp
793
+ )
794
+ raise typer.Exit(0)
795
+
796
+
797
+ @inference_simulator_model_app.command("deploy-sagemaker", no_args_is_help=True, help=__("Deploy an inference simulator model to SageMaker"))
798
+ def deploy_inference_simulator_model_to_sagemaker(
799
+ unique_id: Optional[str] = typer.Argument(help=__("Inference simulator model unique ID"), ),
800
+ arn: Optional[str] = typer.Option(
801
+ None,
802
+ '--amazon-resource-name',
803
+ '-arn',
804
+ help=__("The Amazon Resource Name of the IAM Role to use, e.g., arn:aws:iam::{aws_account_id}:role/{role}"),
805
+ is_eager=False,
806
+ ),
807
+ working_directory: Optional[str] = typer.Option(
808
+ None,
809
+ "--working-directory",
810
+ "-wd",
811
+ help=__("Full path to working directory. By default, the current directory is used"),
812
+ show_default=False,
813
+ is_eager=False,
814
+ ),
815
+ instance_type: Optional[str] = typer.Option(
816
+ None,
817
+ '--instance-type',
818
+ '-it',
819
+ help=__("Instance type on which the inference simulator model will be deployed"),
820
+ is_eager=False,
821
+ ),
822
+ initial_variant_weight: Optional[float] = typer.Option(
823
+ None,
824
+ "--initial-variant-weight",
825
+ "-ivw",
826
+ help=__("Initial Variant Weight. By default 1.0"),
827
+ show_default=False,
828
+ is_eager=False,
829
+ ),
830
+ initial_instance_count: Optional[int] = typer.Option(
831
+ None,
832
+ "--initial-instance-count",
833
+ "-iic",
834
+ help=__("Initial Instance Count"),
835
+ show_default=False,
836
+ is_eager=False,
837
+ ),
838
+
839
+ ):
840
+ """
841
+ Deploy an inference simulator model to SageMaker
842
+ """
843
+
844
+ if unique_id and not re.match(r"^[a-zA-Z0-9-]+$", unique_id):
845
+ raise typer.BadParameter(__("Invalid UID format. The UID can only contain letters, numbers, and hyphens."))
846
+
847
+ app_logger.info(f'Deploying an inference simulator model')
848
+
849
+ service_factory = validate_config_and_get_service_factory(working_directory=working_directory)
850
+ config = service_factory.get_config_provider().get_full_config()
851
+
852
+ project_service = service_factory.get_project_service()
853
+
854
+ project_service.project_deploy_inference_simulator_model_to_sagemaker(
855
+ config=config,
856
+ unique_id=unique_id,
857
+ arn=arn,
858
+ instance_type=instance_type,
859
+ initial_variant_weight=initial_variant_weight,
860
+ initial_instance_count=initial_instance_count,
861
+ )
@@ -11,6 +11,7 @@ class ProjectInferenceSimulatorModelEntity(BaseModel):
11
11
  )
12
12
 
13
13
  id: Optional[int] = Field(None, alias='ID')
14
+ slug: Optional[str] = Field(None, alias='UNIQUE_ID')
14
15
  status: Optional[str] = Field(None, alias='STATUS')
15
16
  commit_hash: Optional[str] = Field(None, alias='COMMIT_HASH')
16
17
  environment_metadata: Optional[str] = Field(None, alias='ENVIRONMENT_METADATA')
@@ -10,6 +10,14 @@ from thestage.services.clients.thestage_api.dtos.docker_container_controller.doc
10
10
  from thestage.services.clients.thestage_api.dtos.docker_container_controller.docker_container_list_response import \
11
11
  DockerContainerListResponse
12
12
  from thestage.services.clients.thestage_api.dtos.entity_filter_request import EntityFilterRequest
13
+ from thestage.services.clients.thestage_api.dtos.inference_controller.deploy_inference_model_to_instance_request import \
14
+ DeployInferenceModelToInstanceRequest
15
+ from thestage.services.clients.thestage_api.dtos.inference_controller.deploy_inference_model_to_instance_response import \
16
+ DeployInferenceModelToInstanceResponse
17
+ from thestage.services.clients.thestage_api.dtos.inference_controller.deploy_inference_model_to_sagemaker_request import \
18
+ DeployInferenceModelToSagemakerRequest
19
+ from thestage.services.clients.thestage_api.dtos.inference_controller.deploy_inference_model_to_sagemaker_response import \
20
+ DeployInferenceModelToSagemakerResponse
13
21
  from thestage.services.clients.thestage_api.dtos.inference_controller.get_inference_simulator_request import \
14
22
  GetInferenceSimulatorRequest
15
23
  from thestage.services.clients.thestage_api.dtos.inference_controller.get_inference_simulator_response import \
@@ -736,6 +744,53 @@ class TheStageApiClient(TheStageApiClientCore):
736
744
  return GetInferenceSimulatorResponse.model_validate(response) if response else None
737
745
 
738
746
 
747
+ @error_handler()
748
+ def deploy_inference_model_to_instance(
749
+ self,
750
+ token: str,
751
+ unique_id: str,
752
+ unique_id_with_timestamp: str,
753
+ rented_instance_unique_id: Optional[str] = None,
754
+ self_hosted_instance_unique_id: Optional[str] = None,
755
+
756
+ ) -> Optional[DeployInferenceModelToInstanceResponse]:
757
+ request = DeployInferenceModelToInstanceRequest(
758
+ inferenceSimulatorSlug=unique_id_with_timestamp,
759
+ modelSlug=unique_id,
760
+ instanceRentedSlug = rented_instance_unique_id,
761
+ selfhostedInstanceSlug = self_hosted_instance_unique_id
762
+ )
763
+
764
+ response = self._request(
765
+ method='POST',
766
+ url='/user-api/v1/inference-simulator-model/deploy/instance',
767
+ data=request.model_dump(),
768
+ token=token,
769
+ )
770
+ return DeployInferenceModelToInstanceResponse.model_validate(response) if response else None
771
+
772
+
773
+ @error_handler()
774
+ def deploy_inference_model_to_sagemaker(
775
+ self,
776
+ token: str,
777
+ unique_id: str,
778
+ arn: Optional[str] = None,
779
+ ) -> Optional[DeployInferenceModelToSagemakerResponse]:
780
+ request = DeployInferenceModelToSagemakerRequest(
781
+ modelSlug=unique_id,
782
+ arn=arn,
783
+ )
784
+
785
+ response = self._request(
786
+ method='POST',
787
+ url='/user-api/v1/inference-simulator-model/grant-user-arn-access',
788
+ data=request.model_dump(),
789
+ token=token,
790
+ )
791
+ return DeployInferenceModelToSagemakerResponse.model_validate(response) if response else None
792
+
793
+
739
794
  def query_user_logs(self, token: str, limit: int, task_id: Optional[int] = None, inference_simulator_id: Optional[int] = None) -> UserLogsQueryResponse:
740
795
  request = UserLogsQueryRequest(
741
796
  inferenceSimulatorId=inference_simulator_id,
@@ -0,0 +1,14 @@
1
+ from typing import Optional, List
2
+
3
+ from pydantic import Field, ConfigDict, BaseModel
4
+
5
+ from thestage.services.clients.thestage_api.dtos.entity_filter_request import EntityFilterRequest
6
+
7
+
8
+ class DeployInferenceModelToInstanceRequest(BaseModel):
9
+ model_config = ConfigDict(use_enum_values=True)
10
+
11
+ modelSlug: Optional[str] = Field(None, alias='modelSlug')
12
+ instanceRentedSlug: Optional[str] = Field(None, alias='instanceRentedSlug')
13
+ selfhostedInstanceSlug: Optional[str] = Field(None, alias='selfhostedInstanceSlug')
14
+ inferenceSimulatorSlug: Optional[str] = Field(None, alias='inferenceSimulatorSlug')
@@ -0,0 +1,13 @@
1
+ from typing import Optional
2
+
3
+ from pydantic import Field, ConfigDict
4
+
5
+ from thestage.services.clients.thestage_api.dtos.base_response import TheStageBaseResponse
6
+ from thestage.services.project.dto.inference_simulator_model_dto import InferenceSimulatorModelDto
7
+
8
+
9
+ class DeployInferenceModelToInstanceResponse(TheStageBaseResponse):
10
+ model_config = ConfigDict(use_enum_values=True)
11
+ inferenceSimulatorModel: Optional[InferenceSimulatorModelDto] = Field(None, alias='inferenceSimulatorModel')
12
+
13
+
@@ -0,0 +1,12 @@
1
+ from typing import Optional, List
2
+
3
+ from pydantic import Field, ConfigDict, BaseModel
4
+
5
+ from thestage.services.clients.thestage_api.dtos.entity_filter_request import EntityFilterRequest
6
+
7
+
8
+ class DeployInferenceModelToSagemakerRequest(BaseModel):
9
+ model_config = ConfigDict(use_enum_values=True)
10
+
11
+ modelSlug: Optional[str] = Field(None, alias='modelSlug')
12
+ arn: Optional[str] = Field(None, alias='arn')
@@ -0,0 +1,12 @@
1
+ from pydantic import Field, ConfigDict
2
+
3
+ from thestage.services.clients.thestage_api.dtos.base_response import TheStageBaseResponse
4
+
5
+
6
+ class DeployInferenceModelToSagemakerResponse(TheStageBaseResponse):
7
+ model_config = ConfigDict(use_enum_values=True)
8
+ modelId: str = Field(None, alias='modelId')
9
+ ecrImageUrl: str = Field(None, alias='ecrImageUrl')
10
+ s3ArtifactsUrl: str = Field(None, alias='s3ArtifactsUrl')
11
+
12
+
@@ -196,7 +196,7 @@ class ContainerService(AbstractService):
196
196
  typer.echo(__("Unable to connect to container: container system_name is missing"))
197
197
  raise typer.Exit(1)
198
198
 
199
- starting_directory: str = '/app'
199
+ starting_directory: str = '/'
200
200
  workspace_mappings = {v for v in container.mappings.directory_mappings.values() if v.startswith('/workspace/') or v == '/workspace'}
201
201
  if len(workspace_mappings) > 0:
202
202
  starting_directory = '/workspace'
@@ -5,6 +5,7 @@ class InferenceSimulatorModelDto(BaseModel):
5
5
  model_config = ConfigDict(use_enum_values=True)
6
6
 
7
7
  id: Optional[int] = Field(None, alias='id')
8
+ slug: Optional[str] = Field(None, alias='slug')
8
9
  client_id: Optional[int] = Field(None, alias='clientId')
9
10
  instance_rented_id: Optional[int] = Field(None, alias='instanceRentedId')
10
11
  selfhosted_instance_id: Optional[int] = Field(None, alias='selfhostedInstanceId')
@@ -12,6 +12,7 @@ class ProjectInferenceSimulatorModelMapper(AbstractMapper):
12
12
 
13
13
  return ProjectInferenceSimulatorModelEntity(
14
14
  id=item.id,
15
+ slug=item.slug,
15
16
  status=item.status or '',
16
17
  commit_hash=item.commit_hash or '',
17
18
  environment_metadata=item.environment_metadata or '',
@@ -1,8 +1,12 @@
1
1
  import os
2
+ import time
3
+ from datetime import datetime
2
4
  from pathlib import Path
3
5
  from typing import Optional, List
4
6
 
5
7
  import json
8
+
9
+ import boto3
6
10
  import click
7
11
  import typer
8
12
  from git import Commit
@@ -18,6 +22,10 @@ from thestage.i18n.translation import __
18
22
  from thestage.services.clients.git.git_client import GitLocalClient
19
23
  from thestage.services.clients.thestage_api.dtos.container_response import DockerContainerDto
20
24
  from thestage.services.clients.thestage_api.dtos.enums.container_status import DockerContainerStatus
25
+ from thestage.services.clients.thestage_api.dtos.inference_controller.deploy_inference_model_to_instance_response import \
26
+ DeployInferenceModelToInstanceResponse
27
+ from thestage.services.clients.thestage_api.dtos.inference_controller.deploy_inference_model_to_sagemaker_response import \
28
+ DeployInferenceModelToSagemakerResponse
21
29
  from thestage.services.clients.thestage_api.dtos.inference_controller.get_inference_simulator_response import \
22
30
  GetInferenceSimulatorResponse
23
31
  from thestage.services.clients.thestage_api.dtos.paginated_entity_list import PaginatedEntityList
@@ -902,3 +910,170 @@ class ProjectService(AbstractService):
902
910
 
903
911
  return project_config
904
912
 
913
+ @error_handler()
914
+ def project_deploy_inference_simulator_model_to_instance(
915
+ self,
916
+ config: ConfigEntity,
917
+ unique_id: Optional[str] = None,
918
+ unique_id_with_timestamp: Optional[str] = None,
919
+ rented_instance_unique_id: Optional[str] = None,
920
+ self_hosted_instance_unique_id: Optional[str] = None,
921
+ ) -> None:
922
+ project_config: ProjectConfig = self.__get_fixed_project_config(config=config)
923
+ if not project_config:
924
+ typer.echo(
925
+ __("No project found at the path: %path%. Please initialize or clone a project first. Or provide path to project using --working-directory option.",
926
+ {"path": config.runtime.working_directory}))
927
+ raise typer.Exit(1)
928
+
929
+ if rented_instance_unique_id and self_hosted_instance_unique_id:
930
+ typer.echo(__("Error: Cannot provide both rented and self-hosted instance unique IDs."))
931
+ raise typer.Exit(1)
932
+
933
+ if not rented_instance_unique_id and not self_hosted_instance_unique_id:
934
+ typer.echo(__("Error: Either a rented instance ID or a self-hosted instance unique ID must be provided."))
935
+ raise typer.Exit(1)
936
+
937
+ project_config: ProjectConfig = self.__config_provider.read_project_config()
938
+ if not project_config:
939
+ typer.echo(__("No project found at the path: %path%. Please initialize or clone a project first.",
940
+ {"path": config.runtime.working_directory}))
941
+ raise typer.Exit(1)
942
+
943
+ typer.echo(__("Creating inference simulator with unique ID: %unique_id_with_timestamp%", {"unique_id_with_timestamp": unique_id_with_timestamp}))
944
+ deploy_model_to_instance_response: DeployInferenceModelToInstanceResponse = self.__thestage_api_client.deploy_inference_model_to_instance(
945
+ token=config.main.thestage_auth_token,
946
+ unique_id=unique_id,
947
+ unique_id_with_timestamp=unique_id_with_timestamp,
948
+ rented_instance_unique_id=rented_instance_unique_id,
949
+ self_hosted_instance_unique_id=self_hosted_instance_unique_id
950
+ )
951
+ if deploy_model_to_instance_response:
952
+ if deploy_model_to_instance_response.message:
953
+ typer.echo(deploy_model_to_instance_response.message)
954
+ if deploy_model_to_instance_response.is_success:
955
+ typer.echo("Inference simulator has been scheduled to run successfully.")
956
+ else:
957
+ typer.echo(__(
958
+ 'Inference simulator failed to run with an error: %server_massage%',
959
+ {'server_massage': deploy_model_to_instance_response.message or ""}
960
+ ))
961
+ raise typer.Exit(1)
962
+ else:
963
+ typer.echo(__("Inference simulator failed to run with an error"))
964
+ raise typer.Exit(1)
965
+
966
+
967
+ @error_handler()
968
+ def project_deploy_inference_simulator_model_to_sagemaker(
969
+ self,
970
+ config: ConfigEntity,
971
+ unique_id: Optional[str] = None,
972
+ arn: Optional[str] = None,
973
+ instance_type: Optional[str] = None,
974
+ initial_variant_weight: Optional[float] = 1.0,
975
+ initial_instance_count: Optional[int] = None,
976
+ ) -> None:
977
+ project_config: ProjectConfig = self.__get_fixed_project_config(config=config)
978
+ if not project_config:
979
+ typer.echo(
980
+ __("No project found at the path: %path%. Please initialize or clone a project first. Or provide path to project using --working-directory option.",
981
+ {"path": config.runtime.working_directory}))
982
+ raise typer.Exit(1)
983
+
984
+ if not instance_type:
985
+ typer.echo(__("Error: Instance type is required."))
986
+ raise typer.Exit(1)
987
+
988
+ if not initial_instance_count:
989
+ typer.echo(__("Error: Initial instance count is required."))
990
+ raise typer.Exit(1)
991
+
992
+ if not arn:
993
+ typer.echo(__("Error: ARN is required."))
994
+ raise typer.Exit(1)
995
+
996
+ project_config: ProjectConfig = self.__config_provider.read_project_config()
997
+ if not project_config:
998
+ typer.echo(__("No project found at the path: %path%. Please initialize or clone a project first.",
999
+ {"path": config.runtime.working_directory}))
1000
+ raise typer.Exit(1)
1001
+
1002
+ deploy_model_to_sagemaker_response: DeployInferenceModelToSagemakerResponse = self.__thestage_api_client.deploy_inference_model_to_sagemaker(
1003
+ token=config.main.thestage_auth_token,
1004
+ unique_id=unique_id,
1005
+ arn=arn,
1006
+ )
1007
+
1008
+ if not deploy_model_to_sagemaker_response.is_success:
1009
+ typer.echo(__(
1010
+ 'Failed to prepare model for deployment with an error: %server_massage%',
1011
+ {'server_massage': deploy_model_to_sagemaker_response.message or ""}
1012
+ ))
1013
+ raise typer.Exit(1)
1014
+
1015
+ model_id = deploy_model_to_sagemaker_response.modelId
1016
+ image_uri = deploy_model_to_sagemaker_response.ecrImageUrl
1017
+ model_uri = deploy_model_to_sagemaker_response.s3ArtifactsUrl
1018
+ region = "us-east-1"
1019
+ sm_client = boto3.client('sagemaker', region_name=region)
1020
+
1021
+ try:
1022
+ container = {
1023
+ "Image": image_uri,
1024
+ "ModelDataUrl": model_uri,
1025
+ "Environment": {
1026
+ "SAGEMAKER_TRITON_DEFAULT_MODEL_NAME": model_id,
1027
+ "THESTAGE_API_URL": config.main.thestage_api_url,
1028
+ "THESTAGE_AUTH_TOKEN": config.main.thestage_auth_token
1029
+ },
1030
+ }
1031
+
1032
+ sm_model_name = f"{unique_id}-{datetime.now().strftime('%Y%m%d-%H%M%S')}"
1033
+ create_model_response = sm_client.create_model(
1034
+ ModelName=sm_model_name,
1035
+ ExecutionRoleArn=arn,
1036
+ PrimaryContainer=container,
1037
+ )
1038
+ typer.echo(f"Model created successfully. Model ARN: {create_model_response['ModelArn']}")
1039
+
1040
+ endpoint_config_name = f"{unique_id}-{datetime.now().strftime('%Y%m%d-%H%M%S')}"
1041
+ create_endpoint_config_response = sm_client.create_endpoint_config(
1042
+ EndpointConfigName=endpoint_config_name,
1043
+ ProductionVariants=[
1044
+ {
1045
+ "InstanceType": instance_type,
1046
+ "InitialVariantWeight": initial_variant_weight,
1047
+ "InitialInstanceCount": initial_instance_count,
1048
+ "ModelName": sm_model_name,
1049
+ "VariantName": "AllTraffic",
1050
+ }
1051
+ ],
1052
+ )
1053
+ typer.echo(
1054
+ f"Endpoint configuration created successfully. Endpoint Config ARN: {create_endpoint_config_response['EndpointConfigArn']}")
1055
+
1056
+ endpoint_name = f"{unique_id}-{datetime.now().strftime('%Y%m%d-%H%M%S')}"
1057
+ create_endpoint_response = sm_client.create_endpoint(
1058
+ EndpointName=endpoint_name,
1059
+ EndpointConfigName=endpoint_config_name,
1060
+ )
1061
+ typer.echo(f"Endpoint created successfully. Endpoint ARN: {create_endpoint_response['EndpointArn']}")
1062
+
1063
+ typer.echo("Waiting for the endpoint to become active...")
1064
+ while True:
1065
+ resp = sm_client.describe_endpoint(EndpointName=endpoint_name)
1066
+ status = resp["EndpointStatus"]
1067
+ typer.echo(f"Status: {status}")
1068
+ if status == "InService":
1069
+ break
1070
+ elif status == "Failed":
1071
+ typer.echo(f"Endpoint creation failed. Reason: {resp.get('FailureReason', 'Unknown')}")
1072
+ raise typer.Exit(1)
1073
+ time.sleep(60)
1074
+
1075
+ typer.echo(f"Endpoint is ready. ARN: {resp['EndpointArn']} Status: {status}")
1076
+
1077
+ except Exception as e:
1078
+ typer.echo(__("Failed to deploy the inference simulator model to SageMaker: %error%", {"error": str(e)}))
1079
+ raise typer.Exit(1)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: thestage
3
- Version: 0.5.36
3
+ Version: 0.5.38
4
4
  Summary:
5
5
  Author: TheStage AI team
6
6
  Author-email: hello@thestage.ai
@@ -11,6 +11,7 @@ Classifier: Programming Language :: Python :: 3.10
11
11
  Classifier: Programming Language :: Python :: 3.11
12
12
  Classifier: Programming Language :: Python :: 3.12
13
13
  Requires-Dist: aioconsole (>=0.8.0,<0.9.0)
14
+ Requires-Dist: boto3 (>=1.35.80,<2.0.0)
14
15
  Requires-Dist: gitpython (>=3.1.40,<4.0.0)
15
16
  Requires-Dist: httpx (>=0.27.2,<0.28.0)
16
17
  Requires-Dist: paramiko (>=3.4.0,<4.0.0)
@@ -1,5 +1,5 @@
1
- thestage/.env,sha256=xWuR3yPUo1cjCJ3gDt16DvCfaeHHwzN2flrH-TUpmiQ,177
2
- thestage/__init__.py,sha256=9DOcnuLrbhUXkNgjYX3B54sWbTZupOLthYt7Fk7zsxc,65
1
+ thestage/.env,sha256=e0VVlgIiJ9t4WkAN89-LEhNoNvuUpV8_dtOrK2itceo,204
2
+ thestage/__init__.py,sha256=4lvDPyXdqcfjRSHtVEx3UV0ZcUnvMXHqjYpF8GZl50g,65
3
3
  thestage/__main__.py,sha256=4ObdWrDRaIASaR06IxtFSsoMu58eyL0MnD64habvPj8,101
4
4
  thestage/color_scheme/color_scheme.py,sha256=jzdRCX0hi_XStXi4kvPHVItKlTm7dsD3fHIdeRQLeKw,87
5
5
  thestage/config/__init__.py,sha256=RNobilYVK1WAM1utcQ8ZuATKc9Zh9M9BAjCLZTnR_TA,428
@@ -9,7 +9,7 @@ thestage/controllers/base_controller.py,sha256=lX0XsBc7ZEPD_I56cN8IBAVuWGIkOkr7J
9
9
  thestage/controllers/config_controller.py,sha256=J08JI56Th1_vuJ71MjgjKnkgJTLoH3mlGtFrDY1oePU,5333
10
10
  thestage/controllers/container_controller.py,sha256=QNJbrZMWoOnhufnQNg5gg2fDfo1oVtAWFJTjPo3t-Y0,15445
11
11
  thestage/controllers/instance_controller.py,sha256=pFhkO7U2Ta0_1dzskEj8hbE7Izw_7I4SDbq5O5-bfIY,9757
12
- thestage/controllers/project_controller.py,sha256=BVfX8ZAOHy2fKaxoadqxnxHiMoI06ieg0n3PeIAJhpQ,27123
12
+ thestage/controllers/project_controller.py,sha256=ELEQtWcesiQsJd5h3LjxEvLMKJ0VaX-0tRScSFy4-94,32295
13
13
  thestage/controllers/utils_controller.py,sha256=y_4QnjUtLkWG_M_WCpQSzX15kDM92JYFku-4FCmYBXU,1032
14
14
  thestage/debug_main.dist.py,sha256=UPIJ58yf-6FtXZj-FLAwxi7HononseuCYm9xb5KlxTs,783
15
15
  thestage/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -20,7 +20,7 @@ thestage/entities/enums/shell_type.py,sha256=-XcPiI3psjeIzd0VXWHdca5Ttgbu9M5oQPe
20
20
  thestage/entities/enums/tail_output_type.py,sha256=6S55VRRosrI3yZW8XeAGm4u12H4haeiBRvoaExUCWcU,84
21
21
  thestage/entities/enums/yes_no_response.py,sha256=PXXkB7KMB-XCHvKOBRuX6NRKyq8UW0G228OICRr2ZTk,86
22
22
  thestage/entities/project_inference_simulator.py,sha256=J7Kq3Stb9EElaa8LYLo5kzLLAzgG0ibg4u1iNo0ZUns,597
23
- thestage/entities/project_inference_simulator_model.py,sha256=3MnWG6Gv3jT8kHa6S2Dzd9X5ZTidbfuiIvmv3LAZZJ4,489
23
+ thestage/entities/project_inference_simulator_model.py,sha256=_xfjaaCSUfVKFQLlsyMEvD4oboKAzIOl6YoMyS59XyQ,546
24
24
  thestage/entities/project_task.py,sha256=7uewPIC_HF32OviQHU7uyJ8KokCuK5OjB47r-ltdpyI,672
25
25
  thestage/entities/rented_instance.py,sha256=NiOZzOUi6Wt39S5uQBlDodEnMEB1_EA72nT0ZW16eCk,944
26
26
  thestage/entities/self_hosted_instance.py,sha256=YyQsuG4PJ8aMMaZyF0wo5Q1tXoeP0BwXA2MvrqEm_rg,761
@@ -46,12 +46,11 @@ thestage/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU
46
46
  thestage/services/abstract_mapper.py,sha256=_q7YLkPNRsNW5wOCqvZIu1KfpLkc7uVaAQKrMZtsGuY,218
47
47
  thestage/services/abstract_service.py,sha256=KRsn3xiVhsgX8kESBr4fzDJWbiBWk3LlScWe1uFQ8D8,3958
48
48
  thestage/services/app_config_service.py,sha256=1bJM8XzQo9L-NXSoBqSSdgu6yg9v3S0loOc-69055o4,1657
49
- thestage/services/clients/.DS_Store,sha256=nzq5skCC7S6pAZDF-RZCYmSwpDp-fIu7x4M5OFfbBUA,6148
50
49
  thestage/services/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
51
50
  thestage/services/clients/git/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
52
51
  thestage/services/clients/git/git_client.py,sha256=tkB7AeesiwOkA2SL2yrYj7Mu0Fgmpx9q56MaXx_we-s,11619
53
52
  thestage/services/clients/thestage_api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
- thestage/services/clients/thestage_api/api_client.py,sha256=jHASaicVoqIkcBUPD21JMdlKpKTmazqc5fj8ra09emI,29158
53
+ thestage/services/clients/thestage_api/api_client.py,sha256=6KBCd1hEDblqkHAVMSXUPgvSFuJA2jdttTdCg9_1cPk,31466
55
54
  thestage/services/clients/thestage_api/dtos/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
55
  thestage/services/clients/thestage_api/dtos/base_response.py,sha256=R6jjhvv0U0XD8j5N5T36q-6liiEAUy7xINFA1H7CMB8,410
57
56
  thestage/services/clients/thestage_api/dtos/cloud_provider_region.py,sha256=EJNhQywINW0dtjXKHEDb-etkWQ7keXNUSDhH-h9cabs,1011
@@ -82,6 +81,10 @@ thestage/services/clients/thestage_api/dtos/enums/selfhosted_status.py,sha256=EA
82
81
  thestage/services/clients/thestage_api/dtos/enums/task_execution_status.py,sha256=MU14K1PvkLEFnUizeVt8x_u1IP2OVkyf5pLFdrUUtYg,303
83
82
  thestage/services/clients/thestage_api/dtos/enums/task_status.py,sha256=Zg4QkuHilWTC6D_lKsby7u4y1rSRs69_kzfeFWCWoFg,282
84
83
  thestage/services/clients/thestage_api/dtos/frontend_status.py,sha256=MKrqCmFWVe-8InJqavkqz1w1N35OcT3ndBrXOu7mKdA,312
84
+ thestage/services/clients/thestage_api/dtos/inference_controller/deploy_inference_model_to_instance_request.py,sha256=5R8snv8kMqpyFtxiLKLNotGc-2a4cZYrf6_a8vY5wgg,612
85
+ thestage/services/clients/thestage_api/dtos/inference_controller/deploy_inference_model_to_instance_response.py,sha256=KMq_ySkrQC-sGUB4Znbj2qImjSPHOLKosnsOqJFsMBg,496
86
+ thestage/services/clients/thestage_api/dtos/inference_controller/deploy_inference_model_to_sagemaker_request.py,sha256=5NcSMRSlTon_zg9hI2SxRGd17Y5lRTGP3TXgOercjlw,408
87
+ thestage/services/clients/thestage_api/dtos/inference_controller/deploy_inference_model_to_sagemaker_response.py,sha256=lis4effVtF39eW8gCkWzJKxMVhIMAg3_8JHub83JR6s,422
85
88
  thestage/services/clients/thestage_api/dtos/inference_controller/get_inference_simulator_request.py,sha256=syCIJgOYxzm4E53qRVLh4u1ZbSP23_O09WhELMH6QIg,337
86
89
  thestage/services/clients/thestage_api/dtos/inference_controller/get_inference_simulator_response.py,sha256=zTt5ewR3oh4qyL0pzt0QP2jANrUpA-5V8As1YqNy3hA,474
87
90
  thestage/services/clients/thestage_api/dtos/inference_controller/inference_simulator_list_for_project_request.py,sha256=zsDl-23C2hGZIYwk4aR8wCXP6KDdOObanLsPYkQOplw,508
@@ -129,7 +132,7 @@ thestage/services/config_provider/config_provider.py,sha256=OR61zGI_4bchMolCo0_g
129
132
  thestage/services/connect/connect_service.py,sha256=EWX7mBgkb42WAtWcbjHXG4She5iglNCPIY_yx3xwf_k,9488
130
133
  thestage/services/connect/dto/remote_server_config.py,sha256=yuO0tTAgUxCiQ-h1nVvWUMlCUtR-WB_eOH6KYspV7zQ,272
131
134
  thestage/services/container/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
132
- thestage/services/container/container_service.py,sha256=AwmboxsEB0c6sszXbm9IxRMFodWkKQAmCycSypd5zrA,15076
135
+ thestage/services/container/container_service.py,sha256=OPkhFTm2NiBixCv6JSGVvBb3-6HwOuu3B3hX_fZcxZk,15073
133
136
  thestage/services/container/mapper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
134
137
  thestage/services/container/mapper/container_mapper.py,sha256=ymIjBLGZnpodh0W2KwNUqRxT0VgM1hsCViNza1nhWHk,1088
135
138
  thestage/services/instance/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -144,19 +147,19 @@ thestage/services/logging/logging_constants.py,sha256=4Gk2tglHW_-jnjB8uVIh-ds4fA
144
147
  thestage/services/logging/logging_service.py,sha256=B9xxhimmkRGnf4gtViJxTv0XWOWlwQfqj2ct6fviQZI,20457
145
148
  thestage/services/project/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
146
149
  thestage/services/project/dto/inference_simulator_dto.py,sha256=5U4uGp7VC1Yr-T0fqZiSNqZUIybs4J9sV25vjBbAUxI,1312
147
- thestage/services/project/dto/inference_simulator_model_dto.py,sha256=rvt2UDSLYNHWAb0_vvA3w4C2EeveM58l9fYLE1Jix9Q,1120
150
+ thestage/services/project/dto/inference_simulator_model_dto.py,sha256=j4dT-7cduzLd59QnmnfQt_aFsiUucpyJFGb-9rNx5K8,1172
148
151
  thestage/services/project/dto/project_config.py,sha256=xLy1P5fwToRNZ7ktEBzfEmmvSqXlEGeLANqWlC8VV_4,557
149
152
  thestage/services/project/mapper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
150
153
  thestage/services/project/mapper/project_inference_simulator_mapper.py,sha256=UdOu9IIF5rlNPoWSaaeSKU3ODe8E5uSMgm2V5ywMWKE,812
151
- thestage/services/project/mapper/project_inference_simulator_model_mapper.py,sha256=wJsiIm6r0IvfiZci5QSytb_NH6dkhda7byTJKblSW1k,841
154
+ thestage/services/project/mapper/project_inference_simulator_model_mapper.py,sha256=PWY0iWbXhvD-G0X0_aQZAFY2bqc0lvRJcQAyC8Y-88Q,869
152
155
  thestage/services/project/mapper/project_task_mapper.py,sha256=SHIEXjYwt4vm2B1X2QiI4sCPbBarum0bTOnmTWPOlto,813
153
- thestage/services/project/project_service.py,sha256=Wg-MIGKIF3eL6O8XGFXJoqk29n7S4akPCGHIXVkiB1E,42092
156
+ thestage/services/project/project_service.py,sha256=ifrP1RrYcHE8U66eumMDvAFZhH4yz4DIbGaWO2_MCxY,50567
154
157
  thestage/services/remote_server_service.py,sha256=gOkEEgCcu4lFo__d9-zFQFBYA9IFbrHIZFhfHfIm7ww,23268
155
158
  thestage/services/service_factory.py,sha256=dnYc_Ih4TOIYjQInjzf6bpeALXRmWq1TYMIVHvEeONs,5104
156
159
  thestage/services/task/dto/task_dto.py,sha256=PJwrUsLLAoO2uA9xvzb27b9iYAoNiBcsHSxKERh2VFo,2335
157
160
  thestage/services/validation_service.py,sha256=7rjJlMCduCzHUnOkLL22t3rr4mr-XudCtyWzZKnCo7A,1621
158
- thestage-0.5.36.dist-info/LICENSE.txt,sha256=U9QrxfdD7Ie7r8z1FleuvOGQvgCF1m0Mjd78cFvWaHE,572
159
- thestage-0.5.36.dist-info/METADATA,sha256=hP5I0XeWi72viBzqKOX4Wkk13QCujsK_hAlTLwKcdLY,5506
160
- thestage-0.5.36.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
161
- thestage-0.5.36.dist-info/entry_points.txt,sha256=57pMhs8zaCM-jgeTffC0WVqCsh35Uq_dUDmzXR80CI4,47
162
- thestage-0.5.36.dist-info/RECORD,,
161
+ thestage-0.5.38.dist-info/LICENSE.txt,sha256=U9QrxfdD7Ie7r8z1FleuvOGQvgCF1m0Mjd78cFvWaHE,572
162
+ thestage-0.5.38.dist-info/METADATA,sha256=RUISssQQGMvnUff6aA0-FTx5K5nYMguGKE9Us2_lsBc,5546
163
+ thestage-0.5.38.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
164
+ thestage-0.5.38.dist-info/entry_points.txt,sha256=57pMhs8zaCM-jgeTffC0WVqCsh35Uq_dUDmzXR80CI4,47
165
+ thestage-0.5.38.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.9.0
2
+ Generator: poetry-core 1.8.1
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
Binary file