inferencesh 0.3.1__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of inferencesh might be problematic. Click here for more details.

@@ -5,11 +5,45 @@ import os
5
5
  import urllib.request
6
6
  import urllib.parse
7
7
  import tempfile
8
+ import hashlib
9
+ from pathlib import Path
8
10
  from tqdm import tqdm
9
11
 
10
12
 
11
13
  class File(BaseModel):
12
14
  """A class representing a file in the inference.sh ecosystem."""
15
+
16
+ @classmethod
17
+ def get_cache_dir(cls) -> Path:
18
+ """Get the cache directory path based on environment variables or default location."""
19
+ if cache_dir := os.environ.get("FILE_CACHE_DIR"):
20
+ path = Path(cache_dir)
21
+ else:
22
+ path = Path.home() / ".cache" / "inferencesh" / "files"
23
+ path.mkdir(parents=True, exist_ok=True)
24
+ return path
25
+
26
+ def _get_cache_path(self, url: str) -> Path:
27
+ """Get the cache path for a URL using a hash-based directory structure."""
28
+ # Parse URL components
29
+ parsed_url = urllib.parse.urlparse(url)
30
+
31
+ # Create hash from URL path and query parameters for uniqueness
32
+ url_components = parsed_url.netloc + parsed_url.path
33
+ if parsed_url.query:
34
+ url_components += '?' + parsed_url.query
35
+ url_hash = hashlib.sha256(url_components.encode()).hexdigest()[:12]
36
+
37
+ # Get filename from URL or use default
38
+ filename = os.path.basename(parsed_url.path)
39
+ if not filename:
40
+ filename = 'download'
41
+
42
+ # Create hash directory in cache
43
+ cache_dir = self.get_cache_dir() / url_hash
44
+ cache_dir.mkdir(exist_ok=True)
45
+
46
+ return cache_dir / filename
13
47
  uri: Optional[str] = Field(default=None) # Original location (URL or file path)
14
48
  path: Optional[str] = None # Resolved local file path
15
49
  content_type: Optional[str] = None # MIME type of the file
@@ -74,11 +108,20 @@ class File(BaseModel):
74
108
  return parsed.scheme in ('http', 'https')
75
109
 
76
110
  def _download_url(self) -> None:
77
- """Download the URL to a temporary file and update the path."""
111
+ """Download the URL to the cache directory and update the path."""
78
112
  original_url = self.uri
113
+ cache_path = self._get_cache_path(original_url)
114
+
115
+ # If file exists in cache, use it
116
+ if cache_path.exists():
117
+ print(f"Using cached file: {cache_path}")
118
+ self.path = str(cache_path)
119
+ return
120
+
121
+ print(f"Downloading URL: {original_url} to {cache_path}")
79
122
  tmp_file = None
80
123
  try:
81
- # Create a temporary file with a suffix based on the URL path
124
+ # Download to temporary file first to avoid partial downloads in cache
82
125
  suffix = os.path.splitext(urllib.parse.urlparse(original_url).path)[1]
83
126
  tmp_file = tempfile.NamedTemporaryFile(delete=False, suffix=suffix)
84
127
  self._tmp_path = tmp_file.name
@@ -133,7 +176,10 @@ class File(BaseModel):
133
176
  # If we read the whole body at once, exit loop
134
177
  break
135
178
 
136
- self.path = self._tmp_path
179
+ # Move the temporary file to the cache location
180
+ os.replace(self._tmp_path, cache_path)
181
+ self._tmp_path = None # Prevent deletion in __del__
182
+ self.path = str(cache_path)
137
183
  except (urllib.error.URLError, urllib.error.HTTPError) as e:
138
184
  raise RuntimeError(f"Failed to download URL {original_url}: {str(e)}")
139
185
  except IOError as e:
@@ -24,16 +24,24 @@ def download(url: str, directory: Union[str, Path, StorageDir]) -> str:
24
24
  dir_path = Path(directory)
25
25
  dir_path.mkdir(exist_ok=True)
26
26
 
27
- # Create hash directory from URL
28
- url_hash = hashlib.sha256(url.encode()).hexdigest()[:12]
29
- hash_dir = dir_path / url_hash
30
- hash_dir.mkdir(exist_ok=True)
27
+ # Parse URL components
28
+ parsed_url = urllib.parse.urlparse(url)
31
29
 
32
- # Keep original filename
33
- filename = os.path.basename(urllib.parse.urlparse(url).path)
30
+ # Create hash from URL path and query parameters for uniqueness
31
+ url_components = parsed_url.netloc + parsed_url.path
32
+ if parsed_url.query:
33
+ url_components += '?' + parsed_url.query
34
+ url_hash = hashlib.sha256(url_components.encode()).hexdigest()[:12]
35
+
36
+ # Keep original filename or use a default
37
+ filename = os.path.basename(parsed_url.path)
34
38
  if not filename:
35
39
  filename = 'download'
36
-
40
+
41
+ # Create hash directory and store file
42
+ hash_dir = dir_path / url_hash
43
+ hash_dir.mkdir(exist_ok=True)
44
+
37
45
  output_path = hash_dir / filename
38
46
 
39
47
  # If file exists in directory and it's not a temp directory, return it
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: inferencesh
3
- Version: 0.3.1
3
+ Version: 0.4.0
4
4
  Summary: inference.sh Python SDK
5
5
  Author: Inference Shell Inc.
6
6
  Author-email: "Inference Shell Inc." <hello@inference.sh>
@@ -2,14 +2,14 @@ inferencesh/__init__.py,sha256=dY3l3yCkWoMtGX0gNXgxFnrprFRl6PPWjH8V7Qedx5g,772
2
2
  inferencesh/client.py,sha256=cm7E-8LxP8jyb7JnANmcBtF1ya2i7sMBW2Pq-Oh-mcM,31318
3
3
  inferencesh/models/__init__.py,sha256=FDwcdtT6c4hbRitymjmN-hZMlQa8RbKSftkZZyjtUXA,536
4
4
  inferencesh/models/base.py,sha256=4gZQRi8J7y9U6PrGD9pRIehd1MJVJAqGakPQDs2AKFM,3251
5
- inferencesh/models/file.py,sha256=0CSbIoFTvGT1CmGnMjkNmGR1N-zg64bmheVUWiCczxE,8714
5
+ inferencesh/models/file.py,sha256=uh1czgk0KFl_9RHTODX0PkdnI42MSU8QMJR_I4lVKI4,10556
6
6
  inferencesh/models/llm.py,sha256=GLcEkDizBbgcfc-zC719wDe44th3EGf3FpKERjIAPE8,27755
7
7
  inferencesh/utils/__init__.py,sha256=-xiD6uo2XzcrPAWFb_fUbaimmnW4KFKc-8IvBzaxNd4,148
8
- inferencesh/utils/download.py,sha256=7n5twvoNYDcFnKJyefImaj2YfzRI7vddQw4usZbj38c,1521
8
+ inferencesh/utils/download.py,sha256=DRGBudiPVa5bDS35KfR-DYeGRk7gO03WOelnisecwMo,1815
9
9
  inferencesh/utils/storage.py,sha256=E4J8emd4eFKdmdDgAqzz3TpaaDd3n0l8gYlMHuY8yIU,519
10
- inferencesh-0.3.1.dist-info/licenses/LICENSE,sha256=OsgqEWIh2el_QMj0y8O1A5Q5Dl-dxqqYbFE6fszuR4s,1086
11
- inferencesh-0.3.1.dist-info/METADATA,sha256=gU67lD_F5c66kAhn375nGfLpP57ueG5PPrlJsvPD8uY,2964
12
- inferencesh-0.3.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
13
- inferencesh-0.3.1.dist-info/entry_points.txt,sha256=6IC-fyozAqW3ljsMLGCXxJ0_ui2Jb-2fLHtoH1RTnEE,45
14
- inferencesh-0.3.1.dist-info/top_level.txt,sha256=TSMHg3T1ThMl1HGAWmzBClwOYH1ump5neof9BfHIwaA,12
15
- inferencesh-0.3.1.dist-info/RECORD,,
10
+ inferencesh-0.4.0.dist-info/licenses/LICENSE,sha256=OsgqEWIh2el_QMj0y8O1A5Q5Dl-dxqqYbFE6fszuR4s,1086
11
+ inferencesh-0.4.0.dist-info/METADATA,sha256=pHnblJABrxy5Iy81hpP7nV-J72Tp2JIUJ6D2UzVbSqo,2964
12
+ inferencesh-0.4.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
13
+ inferencesh-0.4.0.dist-info/entry_points.txt,sha256=6IC-fyozAqW3ljsMLGCXxJ0_ui2Jb-2fLHtoH1RTnEE,45
14
+ inferencesh-0.4.0.dist-info/top_level.txt,sha256=TSMHg3T1ThMl1HGAWmzBClwOYH1ump5neof9BfHIwaA,12
15
+ inferencesh-0.4.0.dist-info/RECORD,,