kalavai-client 0.5.9__tar.gz → 0.5.12__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
Files changed (20) hide show
  1. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/PKG-INFO +26 -10
  2. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/README.md +22 -6
  3. kalavai_client-0.5.12/kalavai_client/__init__.py +2 -0
  4. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/kalavai_client/assets/apps.yaml +1 -1
  5. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/kalavai_client/assets/docker-compose-template.yaml +2 -1
  6. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/kalavai_client/cli.py +73 -11
  7. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/kalavai_client/utils.py +6 -3
  8. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/pyproject.toml +2 -2
  9. kalavai_client-0.5.9/kalavai_client/__init__.py +0 -2
  10. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/LICENSE +0 -0
  11. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/kalavai_client/__main__.py +0 -0
  12. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/kalavai_client/assets/__init__.py +0 -0
  13. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/kalavai_client/assets/apps_values.yaml +0 -0
  14. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/kalavai_client/assets/nginx.conf +0 -0
  15. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/kalavai_client/assets/pool_config_template.yaml +0 -0
  16. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/kalavai_client/assets/pool_config_values.yaml +0 -0
  17. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/kalavai_client/assets/user_workspace.yaml +0 -0
  18. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/kalavai_client/assets/user_workspace_values.yaml +0 -0
  19. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/kalavai_client/auth.py +0 -0
  20. {kalavai_client-0.5.9 → kalavai_client-0.5.12}/kalavai_client/cluster.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: kalavai-client
3
- Version: 0.5.9
3
+ Version: 0.5.12
4
4
  Summary: Client app for kalavai platform
5
5
  License: Apache-2.0
6
6
  Keywords: LLM,platform
@@ -8,10 +8,8 @@ Author: Carlos Fernandez Musoles
8
8
  Author-email: carlos@kalavai.net
9
9
  Maintainer: Carlos Fernandez Musoles
10
10
  Maintainer-email: carlos@kalavai.net
11
- Requires-Python: <3.12
11
+ Requires-Python: >=3.4
12
12
  Classifier: License :: OSI Approved :: Apache Software License
13
- Classifier: Programming Language :: Python :: 2
14
- Classifier: Programming Language :: Python :: 2.7
15
13
  Classifier: Programming Language :: Python :: 3
16
14
  Classifier: Programming Language :: Python :: 3.4
17
15
  Classifier: Programming Language :: Python :: 3.5
@@ -21,6 +19,8 @@ Classifier: Programming Language :: Python :: 3.8
21
19
  Classifier: Programming Language :: Python :: 3.9
22
20
  Classifier: Programming Language :: Python :: 3.10
23
21
  Classifier: Programming Language :: Python :: 3.11
22
+ Classifier: Programming Language :: Python :: 3.12
23
+ Classifier: Programming Language :: Python :: 3.13
24
24
  Provides-Extra: dev
25
25
  Requires-Dist: Pillow (==10.3.0)
26
26
  Requires-Dist: anvil-uplink (==0.5.1)
@@ -89,6 +89,7 @@ https://github.com/user-attachments/assets/0d2316f3-79ea-46ac-b41e-8ef720f52672
89
89
 
90
90
  ### News updates
91
91
 
92
+ - 31 January 2025: `kalavai-client` is now a [PyPI package](https://pypi.org/project/kalavai-client/), easier to install than ever!
92
93
  - 27 January 2025: Support for accessing pools from remote computers
93
94
  - 9 January 2025: Added support for [Aphrodite Engine](https://github.com/aphrodite-engine/aphrodite-engine) models
94
95
  - 8 January 2025: Release of [a free, public, shared pool](/docs/docs/public_llm_pool.md) for community LLM deployment
@@ -129,20 +130,24 @@ Not what you were looking for? [Tell us](https://github.com/kalavai-net/kalavai-
129
130
 
130
131
  ## Getting started
131
132
 
132
- The `kalavai` client is the main tool to interact with the Kalavai platform, to create and manage both local and public pools and also to interact with them (e.g. deploy models). Let's go over its installation.
133
+ The `kalavai-client` is the main tool to interact with the Kalavai platform, to create and manage both local and public pools and also to interact with them (e.g. deploy models). Let's go over its installation.
133
134
 
134
- From release **v0.5.0, you can now install `kalavai` client in non-worker computers**. You can run a pool on a set of machines and have the client on a remote computer from which you access the LLM pool. Because the client only requires having python installed, this means more computers are now supported to run it.
135
+ From release **v0.5.0, you can now install `kalavai-client` in non-worker computers**. You can run a pool on a set of machines and have the client on a remote computer from which you access the LLM pool. Because the client only requires having python installed, this means more computers are now supported to run it.
135
136
 
136
137
 
137
- ### Requirements for a worker machine
138
+ ### Requirements
139
+
140
+ For workers sharing resources with the pool:
138
141
 
139
142
  - A laptop, desktop or Virtual Machine
140
143
  - Docker engine installed (for [linux](https://docs.docker.com/engine/install/), [Windows and MacOS](https://docs.docker.com/desktop/)) with [privilege access](https://docs.docker.com/engine/containers/run/#runtime-privilege-and-linux-capabilities).
141
144
 
145
+ > **Support for Windows and MacOS workers is experimental**: kalavai workers run on docker containers that require access to the host network interfaces, thus systems that do not support containers natively (Windows and MacOS) may have difficulties finding each other.
146
+
147
+ Any system that runs python 3.6+ is able to run the `kalavai-client` and therefore connect and operate an LLM pool, [without sharing with the pool](). Your computer won't be adding its capacity to the pool, but it wil be able to deploy jobs and interact with models.
142
148
 
143
- ### Requirements to run the client
144
149
 
145
- - Python 3.10+
150
+ #### Common issues
146
151
 
147
152
  If you see the following error:
148
153
 
@@ -211,6 +216,17 @@ Copy the joining token. On the worker node, run:
211
216
  kalavai pool join <token>
212
217
  ```
213
218
 
219
+ ### 3. Attach more clients
220
+
221
+ You can now connect to an existing pool from any computer -not just from worker nodes. To connect to a pool, run:
222
+
223
+ ```bash
224
+ kalavai pool attach <token>
225
+ ```
226
+
227
+ This won't add the machine as a worker, but you will be able to operate in the pool as if you were. This is ideal for remote access to the pool, and to use the pool from machines that cannot run workers (docker container limitations).
228
+
229
+
214
230
  ### Enough already, let's run stuff!
215
231
 
216
232
  Check our [examples](examples/) to put your new AI pool to good use!
@@ -275,7 +291,7 @@ Anything missing here? Give us a shout in the [discussion board](https://github.
275
291
 
276
292
  ### Requirements
277
293
 
278
- Python version <= 3.12.
294
+ Python version >= 3.6.
279
295
 
280
296
  ```bash
281
297
  sudo add-apt-repository ppa:deadsnakes/ppa
@@ -46,6 +46,7 @@ https://github.com/user-attachments/assets/0d2316f3-79ea-46ac-b41e-8ef720f52672
46
46
 
47
47
  ### News updates
48
48
 
49
+ - 31 January 2025: `kalavai-client` is now a [PyPI package](https://pypi.org/project/kalavai-client/), easier to install than ever!
49
50
  - 27 January 2025: Support for accessing pools from remote computers
50
51
  - 9 January 2025: Added support for [Aphrodite Engine](https://github.com/aphrodite-engine/aphrodite-engine) models
51
52
  - 8 January 2025: Release of [a free, public, shared pool](/docs/docs/public_llm_pool.md) for community LLM deployment
@@ -86,20 +87,24 @@ Not what you were looking for? [Tell us](https://github.com/kalavai-net/kalavai-
86
87
 
87
88
  ## Getting started
88
89
 
89
- The `kalavai` client is the main tool to interact with the Kalavai platform, to create and manage both local and public pools and also to interact with them (e.g. deploy models). Let's go over its installation.
90
+ The `kalavai-client` is the main tool to interact with the Kalavai platform, to create and manage both local and public pools and also to interact with them (e.g. deploy models). Let's go over its installation.
90
91
 
91
- From release **v0.5.0, you can now install `kalavai` client in non-worker computers**. You can run a pool on a set of machines and have the client on a remote computer from which you access the LLM pool. Because the client only requires having python installed, this means more computers are now supported to run it.
92
+ From release **v0.5.0, you can now install `kalavai-client` in non-worker computers**. You can run a pool on a set of machines and have the client on a remote computer from which you access the LLM pool. Because the client only requires having python installed, this means more computers are now supported to run it.
92
93
 
93
94
 
94
- ### Requirements for a worker machine
95
+ ### Requirements
96
+
97
+ For workers sharing resources with the pool:
95
98
 
96
99
  - A laptop, desktop or Virtual Machine
97
100
  - Docker engine installed (for [linux](https://docs.docker.com/engine/install/), [Windows and MacOS](https://docs.docker.com/desktop/)) with [privilege access](https://docs.docker.com/engine/containers/run/#runtime-privilege-and-linux-capabilities).
98
101
 
102
+ > **Support for Windows and MacOS workers is experimental**: kalavai workers run on docker containers that require access to the host network interfaces, thus systems that do not support containers natively (Windows and MacOS) may have difficulties finding each other.
103
+
104
+ Any system that runs python 3.6+ is able to run the `kalavai-client` and therefore connect and operate an LLM pool, [without sharing with the pool](). Your computer won't be adding its capacity to the pool, but it wil be able to deploy jobs and interact with models.
99
105
 
100
- ### Requirements to run the client
101
106
 
102
- - Python 3.10+
107
+ #### Common issues
103
108
 
104
109
  If you see the following error:
105
110
 
@@ -168,6 +173,17 @@ Copy the joining token. On the worker node, run:
168
173
  kalavai pool join <token>
169
174
  ```
170
175
 
176
+ ### 3. Attach more clients
177
+
178
+ You can now connect to an existing pool from any computer -not just from worker nodes. To connect to a pool, run:
179
+
180
+ ```bash
181
+ kalavai pool attach <token>
182
+ ```
183
+
184
+ This won't add the machine as a worker, but you will be able to operate in the pool as if you were. This is ideal for remote access to the pool, and to use the pool from machines that cannot run workers (docker container limitations).
185
+
186
+
171
187
  ### Enough already, let's run stuff!
172
188
 
173
189
  Check our [examples](examples/) to put your new AI pool to good use!
@@ -232,7 +248,7 @@ Anything missing here? Give us a shout in the [discussion board](https://github.
232
248
 
233
249
  ### Requirements
234
250
 
235
- Python version <= 3.12.
251
+ Python version >= 3.6.
236
252
 
237
253
  ```bash
238
254
  sudo add-apt-repository ppa:deadsnakes/ppa
@@ -0,0 +1,2 @@
1
+
2
+ __version__ = "0.5.12"
@@ -139,7 +139,7 @@ releases:
139
139
  - name: replicas
140
140
  value: 2
141
141
  - name: image_tag
142
- value: "v2025.01"
142
+ value: "v2025.01.1"
143
143
  - name: deployment.in_cluster
144
144
  value: "True"
145
145
  - name: deployment.use_auth_key
@@ -1,8 +1,9 @@
1
1
  services:
2
2
  {% if vpn %}
3
3
  {{vpn_name}}:
4
- image: gravitl/netclient:v0.30.0
4
+ image: gravitl/netclient:v0.24.3
5
5
  container_name: {{vpn_name}}
6
+ #privileged: true
6
7
  cap_add:
7
8
  - NET_ADMIN
8
9
  - SYS_MODULE
@@ -337,18 +337,19 @@ def select_token_type():
337
337
  break
338
338
  return {"admin": choice == 0, "user": choice == 1, "worker": choice == 2}
339
339
 
340
- def generate_compose_config(role, node_name, is_public, node_labels=None, pool_ip=None, vpn_token=None, pool_token=None):
340
+ def generate_compose_config(role, node_name, is_public, use_gpus=True, node_labels=None, pool_ip=None, vpn_token=None, pool_token=None):
341
341
  num_gpus = 0
342
- try:
343
- has_gpus = check_gpu_drivers()
344
- if has_gpus:
345
- max_gpus = int(run_cmd("nvidia-smi -L | wc -l").decode())
346
- num_gpus = user_confirm(
347
- question=f"{max_gpus} NVIDIA GPU(s) detected. How many GPUs would you like to include?",
348
- options=range(max_gpus+1)
349
- )
350
- except:
351
- console.log(f"[red]WARNING: error when fetching NVIDIA GPU info. GPUs will not be used on this local machine")
342
+ if use_gpus:
343
+ try:
344
+ has_gpus = check_gpu_drivers()
345
+ if has_gpus:
346
+ max_gpus = int(run_cmd("nvidia-smi -L | wc -l").decode())
347
+ num_gpus = user_confirm(
348
+ question=f"{max_gpus} NVIDIA GPU(s) detected. How many GPUs would you like to include?",
349
+ options=range(max_gpus+1)
350
+ )
351
+ except:
352
+ console.log(f"[red]WARNING: error when fetching NVIDIA GPU info. GPUs will not be used on this local machine")
352
353
  if node_labels is not None:
353
354
  node_labels = " ".join([f"--node-label {key}={value}" for key, value in node_labels.items()])
354
355
  compose_values = {
@@ -1102,6 +1103,10 @@ def pool__attach(token, *others, node_name=None):
1102
1103
  """
1103
1104
  Set creds in token on the local instance
1104
1105
  """
1106
+
1107
+ if node_name is None:
1108
+ node_name = socket.gethostname()
1109
+
1105
1110
  # check that is not attached to another instance
1106
1111
  if os.path.exists(USER_LOCAL_SERVER_FILE):
1107
1112
  option = user_confirm(
@@ -1160,6 +1165,7 @@ def pool__attach(token, *others, node_name=None):
1160
1165
 
1161
1166
  # Generate docker compose recipe
1162
1167
  generate_compose_config(
1168
+ use_gpus=False,
1163
1169
  role="",
1164
1170
  vpn_token=vpn["key"],
1165
1171
  node_name=node_name,
@@ -1510,6 +1516,62 @@ def job__run(template_name, *others, values: str=None, force_namespace: str=None
1510
1516
  console.log(f"[red]Error when connecting to kalavai service: {str(e)}")
1511
1517
  return
1512
1518
 
1519
+ @arguably.command
1520
+ def job__test(local_template_dir, *others, values, defaults, force_namespace: str=None):
1521
+ """
1522
+ Helper to test local templates, useful for development
1523
+ """
1524
+ try:
1525
+ CLUSTER.validate_cluster()
1526
+ except Exception as e:
1527
+ console.log(f"[red]Problems with your pool: {str(e)}")
1528
+ return
1529
+
1530
+ if not os.path.isdir(local_template_dir):
1531
+ console.log(f"[red]--local_template_dir ({local_template_dir}) is not a directory")
1532
+ return
1533
+
1534
+ # load template
1535
+ with open(os.path.join(local_template_dir, "template.yaml"), "r") as f:
1536
+ template_str = f.read()
1537
+
1538
+ # load values
1539
+ if not os.path.isfile(values):
1540
+ console.log(f"[red]--values ({values}) is not a valid local file")
1541
+ return
1542
+ with open(values, "r") as f:
1543
+ values_dict = yaml.safe_load(f)
1544
+ # load defaults
1545
+ if not os.path.isfile(defaults):
1546
+ console.log(f"[red]--defaults ({defaults}) is not a valid local file")
1547
+ return
1548
+ with open(defaults, "r") as f:
1549
+ defaults = f.read()
1550
+
1551
+ # submit custom deployment
1552
+ data = {
1553
+ "template": template_str,
1554
+ "template_values": values_dict,
1555
+ "default_values": defaults
1556
+ }
1557
+ if force_namespace is not None:
1558
+ data["force_namespace"] = force_namespace
1559
+
1560
+ try:
1561
+ result = request_to_server(
1562
+ method="post",
1563
+ endpoint="/v1/deploy_custom_job",
1564
+ data=data,
1565
+ server_creds=USER_LOCAL_SERVER_FILE,
1566
+ user_cookie=USER_COOKIE
1567
+ )
1568
+ console.log("Deployment result:")
1569
+ print(
1570
+ json.dumps(result,indent=3)
1571
+ )
1572
+ except Exception as e:
1573
+ console.log(f"[red]Error when connecting to kalavai service: {str(e)}")
1574
+
1513
1575
 
1514
1576
  @arguably.command
1515
1577
  def job__defaults(template_name, *others):
@@ -284,8 +284,11 @@ def request_to_server(
284
284
  json=data,
285
285
  headers=headers
286
286
  )
287
- result = response.json()
288
- return result
287
+ try:
288
+ result = response.json()
289
+ return result
290
+ except Exception as e:
291
+ raise ValueError(f"Error with HTTP request: {response.text}\n{str(e)}")
289
292
 
290
293
 
291
294
  def generate_table(columns, rows, end_sections=None):
@@ -389,7 +392,7 @@ def resource_path(relative_path: str):
389
392
  last_slash = relative_path.rfind("/")
390
393
  path = relative_path[:last_slash].replace("/", ".")
391
394
  filename = relative_path[last_slash+1:]
392
- resource = importlib.resources.path(path, filename)
395
+ resource = str(importlib.resources.files(path).joinpath(filename))
393
396
  except Exception as e:
394
397
  return None
395
398
  return resource
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "kalavai-client"
3
- version = "0.5.9"
3
+ version = "0.5.12"
4
4
  authors = [
5
5
  {name = "Carlos Fernandez Musoles", email = "carlos@kalavai.net"}
6
6
  ]
@@ -12,7 +12,7 @@ license = "Apache-2.0"
12
12
  license-files = ["LICENSE"]
13
13
  keywords = ["LLM", "platform"]
14
14
  readme = {file = "README.md", content-type = "text/markdown"}
15
- requires-python = "<3.12"
15
+ requires-python = ">=3.4"
16
16
  dependencies = [
17
17
  "requests>= 2.25",
18
18
  "psutil==5.9.8",
@@ -1,2 +0,0 @@
1
-
2
- __version__ = "0.5.9"
File without changes