inferencesh 0.4.1__py3-none-any.whl → 0.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of inferencesh might be problematic. Click here for more details.

@@ -4,7 +4,6 @@ import mimetypes
4
4
  import os
5
5
  import urllib.request
6
6
  import urllib.parse
7
- import tempfile
8
7
  import hashlib
9
8
  from pathlib import Path
10
9
  from tqdm import tqdm
@@ -119,12 +118,10 @@ class File(BaseModel):
119
118
  return
120
119
 
121
120
  print(f"Downloading URL: {original_url} to {cache_path}")
122
- tmp_file = None
123
121
  try:
124
- # Download to temporary file first to avoid partial downloads in cache
125
- suffix = os.path.splitext(urllib.parse.urlparse(original_url).path)[1]
126
- tmp_file = tempfile.NamedTemporaryFile(delete=False, suffix=suffix)
127
- self._tmp_path = tmp_file.name
122
+ # Download to a temporary filename in the final directory
123
+ tmp_path = str(cache_path) + '.tmp'
124
+ self._tmp_path = tmp_path
128
125
 
129
126
  # Set up request with user agent
130
127
  headers = {
@@ -176,8 +173,8 @@ class File(BaseModel):
176
173
  # If we read the whole body at once, exit loop
177
174
  break
178
175
 
179
- # Move the temporary file to the cache location
180
- os.replace(self._tmp_path, cache_path)
176
+ # Rename the temporary file to the final name
177
+ os.rename(self._tmp_path, cache_path)
181
178
  self._tmp_path = None # Prevent deletion in __del__
182
179
  self.path = str(cache_path)
183
180
  except (urllib.error.URLError, urllib.error.HTTPError) as e:
@@ -186,7 +183,7 @@ class File(BaseModel):
186
183
  raise RuntimeError(f"Failed to write downloaded file to {self._tmp_path}: {str(e)}")
187
184
  except Exception as e:
188
185
  # Clean up temp file if something went wrong
189
- if tmp_file is not None and hasattr(self, '_tmp_path'):
186
+ if hasattr(self, '_tmp_path') and self._tmp_path:
190
187
  try:
191
188
  os.unlink(self._tmp_path)
192
189
  except (OSError, IOError):
@@ -1,8 +1,7 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: inferencesh
3
- Version: 0.4.1
3
+ Version: 0.4.2
4
4
  Summary: inference.sh Python SDK
5
- Author: Inference Shell Inc.
6
5
  Author-email: "Inference Shell Inc." <hello@inference.sh>
7
6
  Project-URL: Homepage, https://github.com/inference-sh/sdk
8
7
  Project-URL: Bug Tracker, https://github.com/inference-sh/sdk/issues
@@ -21,9 +20,7 @@ Requires-Dist: pytest-cov>=4.0.0; extra == "test"
21
20
  Provides-Extra: async
22
21
  Requires-Dist: aiohttp>=3.9.0; python_version >= "3.8" and extra == "async"
23
22
  Requires-Dist: aiofiles>=23.2.1; python_version >= "3.8" and extra == "async"
24
- Dynamic: author
25
23
  Dynamic: license-file
26
- Dynamic: requires-python
27
24
 
28
25
  # inference.sh sdk
29
26
 
@@ -38,47 +35,91 @@ pip install infsh
38
35
  ## client usage
39
36
 
40
37
  ```python
41
- from infsh import Inference, TaskStatus
38
+ from inferencesh import Inference, TaskStatus
42
39
 
43
- # create client
40
+ # Create client
44
41
  client = Inference(api_key="your-api-key")
45
42
 
46
- # simple usage - wait for result
47
- result = client.run({
48
- "app": "your-app",
49
- "input": {"key": "value"},
50
- "variant": "default"
51
- })
52
- print(f"output: {result['output']}")
53
-
54
- # get task info without waiting
55
- task = client.run(params, wait=False)
56
- print(f"task id: {task['id']}")
57
-
58
- # stream updates (recommended)
59
- for update in client.run(params, stream=True):
60
- status = update.get("status")
61
- print(f"status: {TaskStatus(status).name}")
43
+ # Simple synchronous usage
44
+ try:
45
+ task = client.run({
46
+ "app": "your-app",
47
+ "input": {"key": "value"},
48
+ "infra": "cloud",
49
+ "variant": "default"
50
+ })
62
51
 
63
- if status == TaskStatus.COMPLETED:
64
- print(f"output: {update.get('output')}")
65
- break
66
- elif status == TaskStatus.FAILED:
67
- print(f"error: {update.get('error')}")
68
- break
69
-
70
- # async support
52
+ print(f"Task ID: {task.get('id')}")
53
+
54
+ if task.get("status") == TaskStatus.COMPLETED:
55
+ print("✓ Task completed successfully!")
56
+ print(f"Output: {task.get('output')}")
57
+ else:
58
+ status = task.get("status")
59
+ status_name = TaskStatus(status).name if status is not None else "UNKNOWN"
60
+ print(f"✗ Task did not complete. Final status: {status_name}")
61
+
62
+ except Exception as exc:
63
+ print(f"Error: {type(exc).__name__}: {exc}")
64
+ raise # Re-raise to see full traceback
65
+
66
+ # Streaming updates (recommended)
67
+ try:
68
+ for update in client.run(
69
+ {
70
+ "app": "your-app",
71
+ "input": {"key": "value"},
72
+ "infra": "cloud",
73
+ "variant": "default"
74
+ },
75
+ stream=True # Enable streaming updates
76
+ ):
77
+ status = update.get("status")
78
+ status_name = TaskStatus(status).name if status is not None else "UNKNOWN"
79
+ print(f"Status: {status_name}")
80
+
81
+ if status == TaskStatus.COMPLETED:
82
+ print("✓ Task completed!")
83
+ print(f"Output: {update.get('output')}")
84
+ break
85
+ elif status == TaskStatus.FAILED:
86
+ print(f"✗ Task failed: {update.get('error')}")
87
+ break
88
+ elif status == TaskStatus.CANCELLED:
89
+ print("✗ Task was cancelled")
90
+ break
91
+
92
+ except Exception as exc:
93
+ print(f"Error: {type(exc).__name__}: {exc}")
94
+ raise # Re-raise to see full traceback
95
+
96
+ # Async support
71
97
  async def run_async():
72
- from infsh import AsyncInference
98
+ from inferencesh import AsyncInference
73
99
 
74
100
  client = AsyncInference(api_key="your-api-key")
75
101
 
76
- # simple usage
77
- result = await client.run(params)
102
+ # Simple usage
103
+ result = await client.run({
104
+ "app": "your-app",
105
+ "input": {"key": "value"},
106
+ "infra": "cloud",
107
+ "variant": "default"
108
+ })
78
109
 
79
- # stream updates
80
- async for update in await client.run(params, stream=True):
81
- print(f"status: {TaskStatus(update['status']).name}")
110
+ # Stream updates
111
+ async for update in await client.run(
112
+ {
113
+ "app": "your-app",
114
+ "input": {"key": "value"},
115
+ "infra": "cloud",
116
+ "variant": "default"
117
+ },
118
+ stream=True
119
+ ):
120
+ status = update.get("status")
121
+ status_name = TaskStatus(status).name if status is not None else "UNKNOWN"
122
+ print(f"Status: {status_name}")
82
123
  ```
83
124
 
84
125
  ## file handling
@@ -2,14 +2,14 @@ inferencesh/__init__.py,sha256=dY3l3yCkWoMtGX0gNXgxFnrprFRl6PPWjH8V7Qedx5g,772
2
2
  inferencesh/client.py,sha256=6wTCLqLq-QapvjCjMg8ZE3BQyg8iTL8hv8UU7t-oxmE,39360
3
3
  inferencesh/models/__init__.py,sha256=FDwcdtT6c4hbRitymjmN-hZMlQa8RbKSftkZZyjtUXA,536
4
4
  inferencesh/models/base.py,sha256=4gZQRi8J7y9U6PrGD9pRIehd1MJVJAqGakPQDs2AKFM,3251
5
- inferencesh/models/file.py,sha256=uh1czgk0KFl_9RHTODX0PkdnI42MSU8QMJR_I4lVKI4,10556
5
+ inferencesh/models/file.py,sha256=V8p5JwCzrXQMSHPsXOf8eeGxTHLFQpQqpG7AL3v0wKo,10374
6
6
  inferencesh/models/llm.py,sha256=GLcEkDizBbgcfc-zC719wDe44th3EGf3FpKERjIAPE8,27755
7
7
  inferencesh/utils/__init__.py,sha256=-xiD6uo2XzcrPAWFb_fUbaimmnW4KFKc-8IvBzaxNd4,148
8
8
  inferencesh/utils/download.py,sha256=DRGBudiPVa5bDS35KfR-DYeGRk7gO03WOelnisecwMo,1815
9
9
  inferencesh/utils/storage.py,sha256=E4J8emd4eFKdmdDgAqzz3TpaaDd3n0l8gYlMHuY8yIU,519
10
- inferencesh-0.4.1.dist-info/licenses/LICENSE,sha256=OsgqEWIh2el_QMj0y8O1A5Q5Dl-dxqqYbFE6fszuR4s,1086
11
- inferencesh-0.4.1.dist-info/METADATA,sha256=n6TOxwlvMj3fgl4Bsuq5rSyllKhI6dJT5reXXtCQQRk,4084
12
- inferencesh-0.4.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
13
- inferencesh-0.4.1.dist-info/entry_points.txt,sha256=6IC-fyozAqW3ljsMLGCXxJ0_ui2Jb-2fLHtoH1RTnEE,45
14
- inferencesh-0.4.1.dist-info/top_level.txt,sha256=TSMHg3T1ThMl1HGAWmzBClwOYH1ump5neof9BfHIwaA,12
15
- inferencesh-0.4.1.dist-info/RECORD,,
10
+ inferencesh-0.4.2.dist-info/licenses/LICENSE,sha256=OsgqEWIh2el_QMj0y8O1A5Q5Dl-dxqqYbFE6fszuR4s,1086
11
+ inferencesh-0.4.2.dist-info/METADATA,sha256=7pTRbdMbhqbSc9xGqrwasgYkuHuS1NlcbrN0izdRNvk,5405
12
+ inferencesh-0.4.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
13
+ inferencesh-0.4.2.dist-info/entry_points.txt,sha256=6IC-fyozAqW3ljsMLGCXxJ0_ui2Jb-2fLHtoH1RTnEE,45
14
+ inferencesh-0.4.2.dist-info/top_level.txt,sha256=TSMHg3T1ThMl1HGAWmzBClwOYH1ump5neof9BfHIwaA,12
15
+ inferencesh-0.4.2.dist-info/RECORD,,