kubetorch 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kubetorch/__init__.py +59 -0
- kubetorch/cli.py +1939 -0
- kubetorch/cli_utils.py +967 -0
- kubetorch/config.py +453 -0
- kubetorch/constants.py +18 -0
- kubetorch/docs/Makefile +18 -0
- kubetorch/docs/__init__.py +0 -0
- kubetorch/docs/_ext/json_globaltoc.py +42 -0
- kubetorch/docs/api/cli.rst +10 -0
- kubetorch/docs/api/python/app.rst +21 -0
- kubetorch/docs/api/python/cls.rst +19 -0
- kubetorch/docs/api/python/compute.rst +25 -0
- kubetorch/docs/api/python/config.rst +11 -0
- kubetorch/docs/api/python/fn.rst +19 -0
- kubetorch/docs/api/python/image.rst +14 -0
- kubetorch/docs/api/python/secret.rst +18 -0
- kubetorch/docs/api/python/volumes.rst +13 -0
- kubetorch/docs/api/python.rst +101 -0
- kubetorch/docs/conf.py +69 -0
- kubetorch/docs/index.rst +20 -0
- kubetorch/docs/requirements.txt +5 -0
- kubetorch/globals.py +269 -0
- kubetorch/logger.py +59 -0
- kubetorch/resources/__init__.py +0 -0
- kubetorch/resources/callables/__init__.py +0 -0
- kubetorch/resources/callables/cls/__init__.py +0 -0
- kubetorch/resources/callables/cls/cls.py +159 -0
- kubetorch/resources/callables/fn/__init__.py +0 -0
- kubetorch/resources/callables/fn/fn.py +140 -0
- kubetorch/resources/callables/module.py +1315 -0
- kubetorch/resources/callables/utils.py +203 -0
- kubetorch/resources/compute/__init__.py +0 -0
- kubetorch/resources/compute/app.py +253 -0
- kubetorch/resources/compute/compute.py +2414 -0
- kubetorch/resources/compute/decorators.py +137 -0
- kubetorch/resources/compute/utils.py +1026 -0
- kubetorch/resources/compute/websocket.py +135 -0
- kubetorch/resources/images/__init__.py +1 -0
- kubetorch/resources/images/image.py +412 -0
- kubetorch/resources/images/images.py +64 -0
- kubetorch/resources/secrets/__init__.py +2 -0
- kubetorch/resources/secrets/kubernetes_secrets_client.py +377 -0
- kubetorch/resources/secrets/provider_secrets/__init__.py +0 -0
- kubetorch/resources/secrets/provider_secrets/anthropic_secret.py +12 -0
- kubetorch/resources/secrets/provider_secrets/aws_secret.py +16 -0
- kubetorch/resources/secrets/provider_secrets/azure_secret.py +14 -0
- kubetorch/resources/secrets/provider_secrets/cohere_secret.py +12 -0
- kubetorch/resources/secrets/provider_secrets/gcp_secret.py +16 -0
- kubetorch/resources/secrets/provider_secrets/github_secret.py +13 -0
- kubetorch/resources/secrets/provider_secrets/huggingface_secret.py +20 -0
- kubetorch/resources/secrets/provider_secrets/kubeconfig_secret.py +12 -0
- kubetorch/resources/secrets/provider_secrets/lambda_secret.py +13 -0
- kubetorch/resources/secrets/provider_secrets/langchain_secret.py +12 -0
- kubetorch/resources/secrets/provider_secrets/openai_secret.py +11 -0
- kubetorch/resources/secrets/provider_secrets/pinecone_secret.py +12 -0
- kubetorch/resources/secrets/provider_secrets/providers.py +92 -0
- kubetorch/resources/secrets/provider_secrets/ssh_secret.py +12 -0
- kubetorch/resources/secrets/provider_secrets/wandb_secret.py +11 -0
- kubetorch/resources/secrets/secret.py +224 -0
- kubetorch/resources/secrets/secret_factory.py +64 -0
- kubetorch/resources/secrets/utils.py +222 -0
- kubetorch/resources/volumes/__init__.py +0 -0
- kubetorch/resources/volumes/volume.py +340 -0
- kubetorch/servers/__init__.py +0 -0
- kubetorch/servers/http/__init__.py +0 -0
- kubetorch/servers/http/distributed_utils.py +2968 -0
- kubetorch/servers/http/http_client.py +802 -0
- kubetorch/servers/http/http_server.py +1622 -0
- kubetorch/servers/http/server_metrics.py +255 -0
- kubetorch/servers/http/utils.py +722 -0
- kubetorch/serving/__init__.py +0 -0
- kubetorch/serving/autoscaling.py +153 -0
- kubetorch/serving/base_service_manager.py +344 -0
- kubetorch/serving/constants.py +77 -0
- kubetorch/serving/deployment_service_manager.py +431 -0
- kubetorch/serving/knative_service_manager.py +487 -0
- kubetorch/serving/raycluster_service_manager.py +526 -0
- kubetorch/serving/service_manager.py +18 -0
- kubetorch/serving/templates/deployment_template.yaml +17 -0
- kubetorch/serving/templates/knative_service_template.yaml +19 -0
- kubetorch/serving/templates/kt_setup_template.sh.j2 +91 -0
- kubetorch/serving/templates/pod_template.yaml +198 -0
- kubetorch/serving/templates/raycluster_service_template.yaml +42 -0
- kubetorch/serving/templates/raycluster_template.yaml +35 -0
- kubetorch/serving/templates/service_template.yaml +21 -0
- kubetorch/serving/templates/workerset_template.yaml +36 -0
- kubetorch/serving/utils.py +344 -0
- kubetorch/utils.py +263 -0
- kubetorch-0.2.5.dist-info/METADATA +75 -0
- kubetorch-0.2.5.dist-info/RECORD +92 -0
- kubetorch-0.2.5.dist-info/WHEEL +4 -0
- kubetorch-0.2.5.dist-info/entry_points.txt +5 -0
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from functools import update_wrapper
|
|
3
|
+
from typing import List, Union
|
|
4
|
+
|
|
5
|
+
from kubetorch.resources.callables.cls.cls import cls
|
|
6
|
+
from kubetorch.resources.callables.fn.fn import fn
|
|
7
|
+
from kubetorch.resources.compute.compute import Compute
|
|
8
|
+
|
|
9
|
+
# Helper class which allows us to chain decorators in a way that allows us to reverse the order of the decorators
|
|
10
|
+
# The `compute` decorator ultimately unwinds the calls to properly construct the Module.
|
|
11
|
+
class PartialModule:
|
|
12
|
+
def __init__(
|
|
13
|
+
self,
|
|
14
|
+
fn_or_cls=None,
|
|
15
|
+
distribute_args=None,
|
|
16
|
+
autoscale_args=None,
|
|
17
|
+
async_=False,
|
|
18
|
+
):
|
|
19
|
+
self.fn_or_cls = fn_or_cls
|
|
20
|
+
self.distribute_args = distribute_args
|
|
21
|
+
self.autoscale_args = autoscale_args
|
|
22
|
+
self.async_ = async_
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
# @kubetorch.compute decorator that the user can use to wrap a function they want to deploy to a cluster,
|
|
26
|
+
# and then deploy it with `kt deploy my_app.py` (we collect all the decorated functions imported in the file
|
|
27
|
+
# to deploy them).
|
|
28
|
+
def compute(get_if_exists: bool = False, reload_prefixes: Union[str, List[str]] = [], **kwargs):
|
|
29
|
+
def decorator(func_or_cls):
|
|
30
|
+
from kubetorch.globals import disable_decorators
|
|
31
|
+
|
|
32
|
+
if disable_decorators:
|
|
33
|
+
return func_or_cls
|
|
34
|
+
|
|
35
|
+
if isinstance(func_or_cls, PartialModule):
|
|
36
|
+
distribute_args = func_or_cls.distribute_args
|
|
37
|
+
autoscale_args = func_or_cls.autoscale_args
|
|
38
|
+
async_ = func_or_cls.async_
|
|
39
|
+
func_or_cls = func_or_cls.fn_or_cls
|
|
40
|
+
else:
|
|
41
|
+
distribute_args = None
|
|
42
|
+
autoscale_args = None
|
|
43
|
+
async_ = False
|
|
44
|
+
|
|
45
|
+
# If we're on the server attempting to load this function or class, just return it as is
|
|
46
|
+
if (
|
|
47
|
+
os.environ.get("KT_CLS_OR_FN_NAME") == func_or_cls.__name__
|
|
48
|
+
and os.environ.get("KT_MODULE_NAME") == func_or_cls.__module__
|
|
49
|
+
):
|
|
50
|
+
return func_or_cls
|
|
51
|
+
|
|
52
|
+
module_name = kwargs.pop("name", None)
|
|
53
|
+
kt_deploy_mode = os.environ.get("KT_CLI_DEPLOY_MODE") == "1"
|
|
54
|
+
|
|
55
|
+
if isinstance(func_or_cls, type):
|
|
56
|
+
new_module = cls(
|
|
57
|
+
func_or_cls,
|
|
58
|
+
name=module_name,
|
|
59
|
+
get_if_exists=get_if_exists,
|
|
60
|
+
reload_prefixes=reload_prefixes,
|
|
61
|
+
)
|
|
62
|
+
else:
|
|
63
|
+
new_module = fn(
|
|
64
|
+
func_or_cls,
|
|
65
|
+
name=module_name,
|
|
66
|
+
get_if_exists=get_if_exists,
|
|
67
|
+
reload_prefixes=reload_prefixes,
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
if async_:
|
|
71
|
+
new_module.async_ = async_
|
|
72
|
+
|
|
73
|
+
if kt_deploy_mode:
|
|
74
|
+
# Create new Compute and pass in remaining kwargs only in kt deploy mode, not when importing
|
|
75
|
+
# Imported kt module will be reloaded from name when called
|
|
76
|
+
new_module.compute = Compute(**kwargs)
|
|
77
|
+
new_module.compute.service_name = new_module.service_name
|
|
78
|
+
if distribute_args:
|
|
79
|
+
distribute_args, distribute_kwargs = distribute_args
|
|
80
|
+
new_module.compute.distribute(*distribute_args, **distribute_kwargs)
|
|
81
|
+
if autoscale_args:
|
|
82
|
+
autoscale_args, autoscale_kwargs = autoscale_args
|
|
83
|
+
new_module.compute.autoscale(*autoscale_args, **autoscale_kwargs)
|
|
84
|
+
|
|
85
|
+
# update_wrapper(new_module, func_or_cls)
|
|
86
|
+
return new_module
|
|
87
|
+
|
|
88
|
+
return decorator
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def async_(func_or_cls):
|
|
92
|
+
from kubetorch.globals import disable_decorators
|
|
93
|
+
|
|
94
|
+
if disable_decorators:
|
|
95
|
+
return func_or_cls
|
|
96
|
+
|
|
97
|
+
# If it's already a PartialModule (from other decorators), update it
|
|
98
|
+
if isinstance(func_or_cls, PartialModule):
|
|
99
|
+
func_or_cls.async_ = True
|
|
100
|
+
return func_or_cls
|
|
101
|
+
|
|
102
|
+
# Otherwise, create a new PartialModule
|
|
103
|
+
partial_module = PartialModule(fn_or_cls=func_or_cls, async_=True)
|
|
104
|
+
update_wrapper(partial_module, func_or_cls)
|
|
105
|
+
return partial_module
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def distribute(*args, **kwargs):
|
|
109
|
+
def decorator(func_or_cls):
|
|
110
|
+
from kubetorch.globals import disable_decorators
|
|
111
|
+
|
|
112
|
+
if disable_decorators:
|
|
113
|
+
return func_or_cls
|
|
114
|
+
|
|
115
|
+
# This is a partial so the order of decorator chaining can be reversed for best aesthetics
|
|
116
|
+
# the deploy method will actually call .distribute on the function or class after it's been deployed
|
|
117
|
+
partial_module = PartialModule(fn_or_cls=func_or_cls, distribute_args=(args, kwargs))
|
|
118
|
+
update_wrapper(partial_module, func_or_cls)
|
|
119
|
+
return partial_module
|
|
120
|
+
|
|
121
|
+
return decorator
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def autoscale(*args, **kwargs):
|
|
125
|
+
def decorator(func_or_cls):
|
|
126
|
+
from kubetorch.globals import disable_decorators
|
|
127
|
+
|
|
128
|
+
if disable_decorators:
|
|
129
|
+
return func_or_cls
|
|
130
|
+
|
|
131
|
+
# This is a partial so the order of decorator chaining can be reversed for best aesthetics
|
|
132
|
+
# the deploy method will actually call .distribute on the function or class after it's been deployed
|
|
133
|
+
partial_module = PartialModule(fn_or_cls=func_or_cls, autoscale_args=(args, kwargs))
|
|
134
|
+
update_wrapper(partial_module, func_or_cls)
|
|
135
|
+
return partial_module
|
|
136
|
+
|
|
137
|
+
return decorator
|