jetson-examples 0.0.5__py3-none-any.whl → 0.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: jetson-examples
3
- Version: 0.0.5
3
+ Version: 0.0.6
4
4
  Summary: Running Gen AI models and applications on NVIDIA Jetson devices with one-line command
5
5
  Author-email: luozhixin <zhixin.luo@seeed.cc>
6
6
  Project-URL: Homepage, https://github.com/Seeed-Projects/jetson-examples
@@ -1,4 +1,4 @@
1
- reComputer/__init__.py,sha256=S7u1lbuWmM3A3ajykBialmPoJUK6Jg-WmNqM-9OZFdk,22
1
+ reComputer/__init__.py,sha256=QiiYsv0kcJaB8wCWyT-FnI2b6be87HA-CrrIUn8LQhg,22
2
2
  reComputer/main.py,sha256=IOrKYEatTeAfvM1yRNNrZDAfM9LsER334FgOuRVah44,1821
3
3
  reComputer/scripts/check.sh,sha256=cUMwAjHpgJoaD5a8gTLJG7QWjF9CyKPgQ-ewRNK3FD8,127
4
4
  reComputer/scripts/run.sh,sha256=zlVqaCu12VPkecxy5FR0XDxbOeogeGBKuPU0Ow2qJyw,2066
@@ -7,18 +7,20 @@ reComputer/scripts/Sheared-LLaMA-2.7B-ShareGPT/run.sh,sha256=ktndtYTKXLJd6pHi5WN
7
7
  reComputer/scripts/hello-world/readme.md,sha256=LbPVSal_UuOsJe1v4otMfci6UN0gUqecQlynQaHiHME,87
8
8
  reComputer/scripts/hello-world/run.sh,sha256=ZjFioxwmBTtvZoBHJNpHc6AS97HksyXnOnKy3LZTShs,280
9
9
  reComputer/scripts/live-llava/run.sh,sha256=4sWld5d8Fkr9GUVqLH2S1RyOKOjobhjojSzuJu8EuCg,10137
10
+ reComputer/scripts/llama3/run.sh,sha256=i2MAtkWdYq-XaHUHZ13uO-8QeshCEBq32DOi1wCBSeo,226
10
11
  reComputer/scripts/llava/run.sh,sha256=xukwaZz7YKQFc-_LtuBUL6zcywciS0_IQ8OeTAKGe3o,148
11
12
  reComputer/scripts/llava-v1.5-7b/run.sh,sha256=Gsd3vo91bDV8LSuQPnYX82jsWXcbT2maTASA0u4ufBQ,147
12
13
  reComputer/scripts/llava-v1.6-vicuna-7b/run.sh,sha256=89VjiRvleYluLiSXfDOJBPXnkG8g0wDb3y5bFD_Nus0,165
13
14
  reComputer/scripts/nanodb/readme.md,sha256=kzWnCq4qAB95Sssj8mFs_VrH5ju3phSqygLjSRO3ceQ,247
14
15
  reComputer/scripts/nanodb/run.sh,sha256=6MKQgYqDhg-Rvh3Sk4sNm5V2jg1ZgtSVvB2fOjH_KuE,1875
15
16
  reComputer/scripts/nanoowl/run.sh,sha256=at89nn0-95XonUvLf8u6f_rAwTuw2iS7MhnlApdzEQg,161
17
+ reComputer/scripts/ollama/run.sh,sha256=AUEYvW4l8msN-XOouET-l_lYu07CR8ssMNf6u0u2n6I,143
16
18
  reComputer/scripts/stable-diffusion-webui/run.sh,sha256=ralIICACH1AiK2A8JEekUS6Cof4ZGjaRNmPYgPPJybc,57
17
19
  reComputer/scripts/text-generation-webui/run.sh,sha256=WZ2wwuSuXkd6KVQ0lx9KUvNKHx_dN-UBzraV2papdqM,304
18
20
  reComputer/scripts/whisper/run.sh,sha256=u5JPV0XhTM2Q02GOOPkoLUS-jNRE1DWxyW7FhV5qmb4,43
19
- jetson_examples-0.0.5.dist-info/LICENSE,sha256=ac_LOi8ChcJhymEfBulX98Y06wTI2IMcQnqCXZ5yay4,1066
20
- jetson_examples-0.0.5.dist-info/METADATA,sha256=VcOxBLpzkXjUFAw1R9avnl6fg279Yk9LQANWGOzhSeA,3982
21
- jetson_examples-0.0.5.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
22
- jetson_examples-0.0.5.dist-info/entry_points.txt,sha256=5-OdcBifoDjVXE9KjNoN6tQa8l_XSXhdbBEgL2hxeDM,58
23
- jetson_examples-0.0.5.dist-info/top_level.txt,sha256=SI-liiUOkoGwOJfMP7d7k63JKgdcbiEj6DEC8QIKI90,11
24
- jetson_examples-0.0.5.dist-info/RECORD,,
21
+ jetson_examples-0.0.6.dist-info/LICENSE,sha256=ac_LOi8ChcJhymEfBulX98Y06wTI2IMcQnqCXZ5yay4,1066
22
+ jetson_examples-0.0.6.dist-info/METADATA,sha256=1Lk1wZMhjzDIOgcAFEFRyuNc8qf0NNp5PunBGRQDhGc,3982
23
+ jetson_examples-0.0.6.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
24
+ jetson_examples-0.0.6.dist-info/entry_points.txt,sha256=5-OdcBifoDjVXE9KjNoN6tQa8l_XSXhdbBEgL2hxeDM,58
25
+ jetson_examples-0.0.6.dist-info/top_level.txt,sha256=SI-liiUOkoGwOJfMP7d7k63JKgdcbiEj6DEC8QIKI90,11
26
+ jetson_examples-0.0.6.dist-info/RECORD,,
reComputer/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.0.5"
1
+ __version__ = "0.0.6"
@@ -0,0 +1,10 @@
1
+ #!/bin/bash
2
+
3
+ # try stop old server
4
+ docker rm -f ollama
5
+ # start new server
6
+ ./run.sh -d --name ollama $(./autotag ollama)
7
+ # run a client
8
+ ./run.sh $(./autotag ollama) /bin/ollama run llama3
9
+ # clean new server
10
+ docker rm -f ollama
@@ -0,0 +1,7 @@
1
+ #!/bin/bash
2
+
3
+ # try stop old server
4
+ docker rm -f ollama
5
+ # run Front-end
6
+ ./run.sh $(./autotag ollama)
7
+ # user only can access with http://ip:11434