hippius 0.2.35__py3-none-any.whl → 0.2.38__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hippius
3
- Version: 0.2.35
3
+ Version: 0.2.38
4
4
  Summary: Python SDK and CLI for Hippius blockchain storage
5
5
  Home-page: https://github.com/thenervelab/hippius-sdk
6
6
  Author: Dubs
@@ -1,10 +1,10 @@
1
- hippius_sdk/__init__.py,sha256=TmGtNtxD9_iewlSe_hPs7ugnDJDcjQY6mD3V1dPh7v0,1474
1
+ hippius_sdk/__init__.py,sha256=gn9TSRkyahOIqzJMx6svFcXNOC2etuj_uFCleBzcLIw,1508
2
2
  hippius_sdk/cli.py,sha256=aqKOYSBSWt7UhcpFt7wf9yIPJ3bznpsJ6ehOnuZ4usI,18235
3
3
  hippius_sdk/cli_assets.py,sha256=rjH3Z5A1CQr2d5CIAAAb0WMCjoZZlMWcdo0f93KqluE,635
4
4
  hippius_sdk/cli_handlers.py,sha256=HkZldE8ZDS6WHu8aSoeS_rYZ4kp3F-Kdzu-weY1c0vU,128258
5
5
  hippius_sdk/cli_parser.py,sha256=z7UvgWvvy04ey-R56qZiCqYc_9RaNq1rVDkQyXoK3JU,21100
6
6
  hippius_sdk/cli_rich.py,sha256=_jTBYMdHi2--fIVwoeNi-EtkdOb6Zy_O2TUiGvU3O7s,7324
7
- hippius_sdk/client.py,sha256=JIoKwq3YskuJO4YoRU8hJxVrCnk-t2oP6VDjEwZLIco,22666
7
+ hippius_sdk/client.py,sha256=yS7fKAYIIjLReoUg1hppCD1sRy5MbSS81aLF_JLIZRE,23275
8
8
  hippius_sdk/config.py,sha256=Hf_aUYzG9ylzqauA_ABUSSB5mBTYbp-VtB36VQt2XDw,21981
9
9
  hippius_sdk/db/README.md,sha256=okDeI1qgkaZqXSlJ8L0xIE4UpuxO-qEGPIbXUvSHQjU,2030
10
10
  hippius_sdk/db/env.db.template,sha256=_6hEC3IvkzCDOAzG1_yJUKRUfCTMciNaJUicZpMCat4,217
@@ -13,12 +13,12 @@ hippius_sdk/db/migrations/20241202000001_switch_to_subaccount_encryption.sql,sha
13
13
  hippius_sdk/db/setup_database.sh,sha256=STp03qxkp2RmIVr6YZIcvQQm-_LLUOb6Jobh-52HWmg,3115
14
14
  hippius_sdk/db_utils.py,sha256=-x0rbN0as7Tn3PJPZBYCgreZe52FLH40ppA1TLxsg90,1851
15
15
  hippius_sdk/errors.py,sha256=LScJJmawVAx7aRzqqQguYSkf9iazSjEQEBNlD_GXZ6Y,1589
16
- hippius_sdk/ipfs.py,sha256=22xHOrOer2KHvSMepaUNRMYAK04L-AKyv8sEPSAYg7g,95429
17
- hippius_sdk/ipfs_core.py,sha256=eOOgLoyP9mvwndnCjldnTc7z94ImYCXY3nm7JU3e_Mo,12676
16
+ hippius_sdk/ipfs.py,sha256=bctkg_QW4rclZLDhXgNrM6dj0z-ae2--IWaUTHmdCl8,97356
17
+ hippius_sdk/ipfs_core.py,sha256=X4yO9M_lSNtqiEJu5R-XX97eWzbL7GrN5MOk56p8pJk,12748
18
18
  hippius_sdk/key_storage.py,sha256=SXFd6aGQw9MDLGX2vSBuAY7rdX-k5EvFm63z7_n-8yQ,8148
19
19
  hippius_sdk/substrate.py,sha256=4a7UIE4UqGcDW7luKTBgSDqfb2OIZusB39G1UiRs_YU,50158
20
20
  hippius_sdk/utils.py,sha256=rJ611yvwKSyiBpYU3w-SuyQxoghMGU-ePuslrPv5H5g,7388
21
- hippius-0.2.35.dist-info/METADATA,sha256=u5_HkcKZ2mOZjPV4PO9P4ycVFaYxdmx4-rR5Gks2IyQ,30088
22
- hippius-0.2.35.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
23
- hippius-0.2.35.dist-info/entry_points.txt,sha256=bFAZjW3vndretf9-8s587jA2ebMVI7puhn_lVs8jPc8,149
24
- hippius-0.2.35.dist-info/RECORD,,
21
+ hippius-0.2.38.dist-info/METADATA,sha256=50XF9hBgZ_Cu0jPesNPTDqhSoJb4YPNFKIOG-wFjwD8,30088
22
+ hippius-0.2.38.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
23
+ hippius-0.2.38.dist-info/entry_points.txt,sha256=bFAZjW3vndretf9-8s587jA2ebMVI7puhn_lVs8jPc8,149
24
+ hippius-0.2.38.dist-info/RECORD,,
hippius_sdk/__init__.py CHANGED
@@ -23,14 +23,15 @@ from hippius_sdk.config import (
23
23
  set_encryption_key,
24
24
  set_seed_phrase,
25
25
  )
26
- from hippius_sdk.ipfs import IPFSClient, S3PublishResult, S3DownloadResult
26
+ from hippius_sdk.ipfs import IPFSClient, S3PublishResult, S3PublishPin, S3DownloadResult
27
27
  from hippius_sdk.utils import format_cid, format_size, hex_to_ipfs_cid
28
28
 
29
- __version__ = "0.2.35"
29
+ __version__ = "0.2.38"
30
30
  __all__ = [
31
31
  "HippiusClient",
32
32
  "IPFSClient",
33
33
  "S3PublishResult",
34
+ "S3PublishPin",
34
35
  "S3DownloadResult",
35
36
  "get_config_value",
36
37
  "set_config_value",
hippius_sdk/client.py CHANGED
@@ -3,13 +3,13 @@ Main client for the Hippius SDK.
3
3
  """
4
4
 
5
5
  import base64
6
- from typing import Any, Callable, Dict, List, Optional
6
+ from typing import Any, Callable, Dict, List, Optional, Union
7
7
 
8
8
  import nacl.secret
9
9
  import nacl.utils
10
10
 
11
11
  from hippius_sdk.config import get_config_value, get_encryption_key
12
- from hippius_sdk.ipfs import IPFSClient, S3PublishResult, S3DownloadResult
12
+ from hippius_sdk.ipfs import IPFSClient, S3PublishResult, S3PublishPin, S3DownloadResult
13
13
  from hippius_sdk.substrate import SubstrateClient
14
14
 
15
15
 
@@ -521,10 +521,12 @@ class HippiusClient:
521
521
  encrypt: bool,
522
522
  seed_phrase: str,
523
523
  subaccount_id: str,
524
+ bucket_name: str,
524
525
  store_node: str = "http://localhost:5001",
525
526
  pin_node: str = "https://store.hippius.network",
526
527
  substrate_url: str = "wss://rpc.hippius.network",
527
- ) -> S3PublishResult:
528
+ publish: bool = True,
529
+ ) -> Union[S3PublishResult, S3PublishPin]:
528
530
  """
529
531
  Publish a file to IPFS and the Hippius marketplace in one operation.
530
532
 
@@ -536,12 +538,16 @@ class HippiusClient:
536
538
  file_path: Path to the file to publish
537
539
  encrypt: Whether to encrypt the file before uploading
538
540
  seed_phrase: Seed phrase for blockchain transaction signing
541
+ subaccount_id: The subaccount/account identifier
542
+ bucket_name: The bucket name for key isolation
539
543
  store_node: IPFS node URL for initial upload (default: local node)
540
544
  pin_node: IPFS node URL for backup pinning (default: remote service)
541
- substrate_url: substrate url to use for the storage request.
545
+ substrate_url: substrate url to use for the storage request
546
+ publish: Whether to publish to blockchain (True) or just upload to IPFS (False)
542
547
 
543
548
  Returns:
544
- S3PublishResult: Object containing CID, file info, and transaction hash
549
+ S3PublishResult: Object containing CID, file info, and transaction hash when publish=True
550
+ S3PublishPin: Object containing CID, subaccount, file_path, pin_node, substrate_url when publish=False
545
551
 
546
552
  Raises:
547
553
  HippiusIPFSError: If IPFS operations (add or pin) fail
@@ -554,9 +560,11 @@ class HippiusClient:
554
560
  encrypt,
555
561
  seed_phrase,
556
562
  subaccount_id,
563
+ bucket_name,
557
564
  store_node,
558
565
  pin_node,
559
566
  substrate_url,
567
+ publish,
560
568
  )
561
569
 
562
570
  async def s3_download(
@@ -564,6 +572,7 @@ class HippiusClient:
564
572
  cid: str,
565
573
  output_path: str,
566
574
  subaccount_id: str,
575
+ bucket_name: str,
567
576
  auto_decrypt: bool = True,
568
577
  download_node: str = "http://localhost:5001",
569
578
  ) -> S3DownloadResult:
@@ -571,16 +580,17 @@ class HippiusClient:
571
580
  Download a file from IPFS with automatic decryption.
572
581
 
573
582
  This method uses the download_node for immediate availability and automatically
574
- manages decryption keys per seed phrase:
583
+ manages decryption keys per account+bucket combination:
575
584
  - Downloads the file from the specified download_node (local by default)
576
- - If auto_decrypt=True, attempts to decrypt using stored keys for the seed phrase
585
+ - If auto_decrypt=True, attempts to decrypt using stored keys for the account+bucket
577
586
  - Falls back to client encryption key if key storage is not available
578
587
  - Returns the file in decrypted form if decryption succeeds
579
588
 
580
589
  Args:
581
590
  cid: Content Identifier (CID) of the file to download
582
591
  output_path: Path where the downloaded file will be saved
583
- subaccount_id: The subaccount id as api key
592
+ subaccount_id: The subaccount/account identifier
593
+ bucket_name: The bucket name for key isolation
584
594
  auto_decrypt: Whether to attempt automatic decryption (default: True)
585
595
  download_node: IPFS node URL for download (default: local node)
586
596
 
@@ -593,5 +603,5 @@ class HippiusClient:
593
603
  ValueError: If decryption fails
594
604
  """
595
605
  return await self.ipfs_client.s3_download(
596
- cid, output_path, subaccount_id, auto_decrypt, download_node
606
+ cid, output_path, subaccount_id, bucket_name, auto_decrypt, download_node
597
607
  )
hippius_sdk/ipfs.py CHANGED
@@ -12,7 +12,7 @@ import shutil
12
12
  import tempfile
13
13
  import time
14
14
  import uuid
15
- from typing import Any, Callable, Dict, List, Optional
15
+ from typing import Any, Callable, Dict, List, Optional, Union
16
16
 
17
17
  import httpx
18
18
  from pydantic import BaseModel
@@ -62,6 +62,16 @@ class S3PublishResult(BaseModel):
62
62
  tx_hash: str
63
63
 
64
64
 
65
+ class S3PublishPin(BaseModel):
66
+ """Result model for s3_publish method when publish=False."""
67
+
68
+ cid: str
69
+ subaccount: str
70
+ file_path: str
71
+ pin_node: str
72
+ substrate_url: str
73
+
74
+
65
75
  class S3DownloadResult(BaseModel):
66
76
  """Result model for s3_download method."""
67
77
 
@@ -1954,10 +1964,12 @@ class IPFSClient:
1954
1964
  encrypt: bool,
1955
1965
  seed_phrase: str,
1956
1966
  subaccount_id: str,
1967
+ bucket_name: str,
1957
1968
  store_node: str = "http://localhost:5001",
1958
1969
  pin_node: str = "https://store.hippius.network",
1959
1970
  substrate_url: str = "wss://rpc.hippius.network",
1960
- ) -> S3PublishResult:
1971
+ publish: bool = True,
1972
+ ) -> Union[S3PublishResult, S3PublishPin]:
1961
1973
  """
1962
1974
  Publish a file to IPFS and the Hippius marketplace in one operation.
1963
1975
 
@@ -1966,21 +1978,25 @@ class IPFSClient:
1966
1978
  2. Pins to pin_node (remote) for persistence and backup
1967
1979
  3. Publishes to substrate marketplace
1968
1980
 
1969
- This method automatically manages encryption keys per seed phrase:
1970
- - If encrypt=True, it will get or generate an encryption key for the seed phrase
1981
+ This method automatically manages encryption keys per account+bucket combination:
1982
+ - If encrypt=True, it will get or generate an encryption key for the account+bucket
1971
1983
  - Keys are stored in PostgreSQL and versioned (never deleted)
1972
- - Always uses the most recent key for a seed phrase
1984
+ - Always uses the most recent key for an account+bucket combination
1973
1985
 
1974
1986
  Args:
1975
1987
  file_path: Path to the file to publish
1976
1988
  encrypt: Whether to encrypt the file before uploading
1977
1989
  seed_phrase: Seed phrase for blockchain transaction signing
1990
+ subaccount_id: The subaccount/account identifier
1991
+ bucket_name: The bucket name for key isolation
1978
1992
  store_node: IPFS node URL for initial upload (default: local node)
1979
1993
  pin_node: IPFS node URL for backup pinning (default: remote service)
1980
- substrate_url: the substrate url to connect to for the storage request.
1994
+ substrate_url: the substrate url to connect to for the storage request
1995
+ publish: Whether to publish to blockchain (True) or just upload to IPFS (False)
1981
1996
 
1982
1997
  Returns:
1983
- S3PublishResult: Object containing CID, file info, and transaction hash
1998
+ S3PublishResult: Object containing CID, file info, and transaction hash when publish=True
1999
+ S3PublishPin: Object containing CID, subaccount, file_path, pin_node, substrate_url when publish=False
1984
2000
 
1985
2001
  Raises:
1986
2002
  HippiusIPFSError: If IPFS operations (add or pin) fail
@@ -2008,19 +2024,26 @@ class IPFSClient:
2008
2024
  key_storage_available = False
2009
2025
 
2010
2026
  if key_storage_available:
2011
- # Try to get existing key for this seed phrase
2012
- existing_key_b64 = await get_key_for_subaccount(subaccount_id)
2027
+ # Create combined key identifier from account+bucket
2028
+ account_bucket_key = f"{subaccount_id}:{bucket_name}"
2029
+
2030
+ # Try to get existing key for this account+bucket combination
2031
+ existing_key_b64 = await get_key_for_subaccount(account_bucket_key)
2013
2032
 
2014
2033
  if existing_key_b64:
2015
2034
  # Use existing key
2016
- logger.debug("Using existing encryption key for subaccount")
2035
+ logger.debug(
2036
+ "Using existing encryption key for account+bucket combination"
2037
+ )
2017
2038
  encryption_key_bytes = base64.b64decode(existing_key_b64)
2018
2039
  encryption_key_used = existing_key_b64
2019
2040
  else:
2020
- # Generate and store new key for this subaccount
2021
- logger.info("Generating new encryption key for subaccount")
2041
+ # Generate and store new key for this account+bucket combination
2042
+ logger.info(
2043
+ "Generating new encryption key for account+bucket combination"
2044
+ )
2022
2045
  new_key_b64 = await generate_and_store_key_for_subaccount(
2023
- subaccount_id
2046
+ account_bucket_key
2024
2047
  )
2025
2048
  encryption_key_bytes = base64.b64decode(new_key_b64)
2026
2049
  encryption_key_used = new_key_b64
@@ -2084,60 +2107,71 @@ class IPFSClient:
2084
2107
  f"Failed to pin file to store node {store_node}: {str(e)}"
2085
2108
  )
2086
2109
 
2087
- # Publish to substrate marketplace
2088
- try:
2089
- # Pass the seed phrase directly to avoid password prompts for encrypted config
2090
- substrate_client = SubstrateClient(
2091
- seed_phrase=seed_phrase, url=substrate_url
2092
- )
2093
- logger.info(
2094
- f"Submitting storage request to substrate for file: {filename}, CID: {cid}"
2095
- )
2110
+ # Conditionally publish to substrate marketplace based on publish flag
2111
+ if publish:
2112
+ try:
2113
+ # Pass the seed phrase directly to avoid password prompts for encrypted config
2114
+ substrate_client = SubstrateClient(
2115
+ seed_phrase=seed_phrase, url=substrate_url
2116
+ )
2117
+ logger.info(
2118
+ f"Submitting storage request to substrate for file: {filename}, CID: {cid}"
2119
+ )
2096
2120
 
2097
- tx_hash = await substrate_client.storage_request(
2098
- files=[
2099
- FileInput(
2100
- file_hash=cid,
2101
- file_name=filename,
2102
- )
2103
- ],
2104
- miner_ids=[],
2105
- seed_phrase=seed_phrase,
2106
- )
2121
+ tx_hash = await substrate_client.storage_request(
2122
+ files=[
2123
+ FileInput(
2124
+ file_hash=cid,
2125
+ file_name=filename,
2126
+ )
2127
+ ],
2128
+ miner_ids=[],
2129
+ seed_phrase=seed_phrase,
2130
+ )
2107
2131
 
2108
- logger.debug(f"Substrate call result: {tx_hash}")
2132
+ logger.debug(f"Substrate call result: {tx_hash}")
2133
+
2134
+ # Check if we got a valid transaction hash
2135
+ if not tx_hash or tx_hash == "0x" or len(tx_hash) < 10:
2136
+ logger.error(f"Invalid transaction hash received: {tx_hash}")
2137
+ raise HippiusSubstrateError(
2138
+ f"Invalid transaction hash received: {tx_hash}. This might indicate insufficient credits or transaction failure."
2139
+ )
2109
2140
 
2110
- # Check if we got a valid transaction hash
2111
- if not tx_hash or tx_hash == "0x" or len(tx_hash) < 10:
2112
- logger.error(f"Invalid transaction hash received: {tx_hash}")
2113
- raise HippiusSubstrateError(
2114
- f"Invalid transaction hash received: {tx_hash}. This might indicate insufficient credits or transaction failure."
2141
+ logger.info(
2142
+ f"Successfully published to substrate with transaction: {tx_hash}"
2115
2143
  )
2116
2144
 
2117
- logger.info(
2118
- f"Successfully published to substrate with transaction: {tx_hash}"
2145
+ except Exception as e:
2146
+ logger.error(f"Substrate call failed: {str(e)}")
2147
+ logger.debug(
2148
+ "Possible causes: insufficient credits, network issues, invalid seed phrase, or substrate node unavailability"
2149
+ )
2150
+ raise HippiusSubstrateError(f"Failed to publish to substrate: {str(e)}")
2151
+
2152
+ return S3PublishResult(
2153
+ cid=cid,
2154
+ file_name=filename,
2155
+ size_bytes=size_bytes,
2156
+ encryption_key=encryption_key_used,
2157
+ tx_hash=tx_hash,
2119
2158
  )
2120
-
2121
- except Exception as e:
2122
- logger.error(f"Substrate call failed: {str(e)}")
2123
- logger.debug(
2124
- "Possible causes: insufficient credits, network issues, invalid seed phrase, or substrate node unavailability"
2159
+ else:
2160
+ # Return S3PublishPin with required information when not publishing
2161
+ return S3PublishPin(
2162
+ cid=cid,
2163
+ subaccount=subaccount_id,
2164
+ file_path=file_path,
2165
+ pin_node=pin_node,
2166
+ substrate_url=substrate_url,
2125
2167
  )
2126
- raise HippiusSubstrateError(f"Failed to publish to substrate: {str(e)}")
2127
-
2128
- return S3PublishResult(
2129
- cid=cid,
2130
- file_name=filename,
2131
- size_bytes=size_bytes,
2132
- encryption_key=encryption_key_used,
2133
- tx_hash=tx_hash,
2134
- )
2135
2168
 
2136
2169
  async def s3_download(
2137
2170
  self,
2138
2171
  cid: str,
2139
2172
  output_path: str,
2140
2173
  subaccount_id: str,
2174
+ bucket_name: str,
2141
2175
  auto_decrypt: bool = True,
2142
2176
  download_node: str = "http://localhost:5001",
2143
2177
  ) -> S3DownloadResult:
@@ -2145,16 +2179,17 @@ class IPFSClient:
2145
2179
  Download a file from IPFS with automatic decryption.
2146
2180
 
2147
2181
  This method uses the download_node for immediate availability and automatically
2148
- manages decryption keys per seed phrase:
2182
+ manages decryption keys per account+bucket combination:
2149
2183
  - Downloads the file from the specified download_node (local by default)
2150
- - If auto_decrypt=True, attempts to decrypt using stored keys for the seed phrase
2184
+ - If auto_decrypt=True, attempts to decrypt using stored keys for the account+bucket
2151
2185
  - Falls back to client encryption key if key storage is not available
2152
2186
  - Returns the file in decrypted form if decryption succeeds
2153
2187
 
2154
2188
  Args:
2155
2189
  cid: Content Identifier (CID) of the file to download
2156
2190
  output_path: Path where the downloaded file will be saved
2157
- subaccount_id: The subaccount id as api key
2191
+ subaccount_id: The subaccount/account identifier
2192
+ bucket_name: The bucket name for key isolation
2158
2193
  auto_decrypt: Whether to attempt automatic decryption (default: True)
2159
2194
  download_node: IPFS node URL for download (default: local node)
2160
2195
 
@@ -2233,13 +2268,18 @@ class IPFSClient:
2233
2268
  decryption_successful = False
2234
2269
 
2235
2270
  if key_storage_available:
2236
- # Try to get the encryption key for this seed phrase
2271
+ # Create combined key identifier from account+bucket
2272
+ account_bucket_key = f"{subaccount_id}:{bucket_name}"
2273
+
2274
+ # Try to get the encryption key for this account+bucket combination
2237
2275
  try:
2238
- existing_key_b64 = await get_key_for_subaccount(subaccount_id)
2276
+ existing_key_b64 = await get_key_for_subaccount(
2277
+ account_bucket_key
2278
+ )
2239
2279
 
2240
2280
  if existing_key_b64:
2241
2281
  logger.debug(
2242
- "Found encryption key for subaccount, attempting decryption"
2282
+ "Found encryption key for account+bucket combination, attempting decryption"
2243
2283
  )
2244
2284
  decryption_attempted = True
2245
2285
  encryption_key_used = existing_key_b64
@@ -2273,7 +2313,9 @@ class IPFSClient:
2273
2313
  )
2274
2314
  # Continue to try fallback decryption
2275
2315
  else:
2276
- logger.debug("No encryption key found for seed phrase")
2316
+ logger.debug(
2317
+ "No encryption key found for account+bucket combination"
2318
+ )
2277
2319
 
2278
2320
  except Exception as e:
2279
2321
  logger.debug(f"Error retrieving key from storage: {e}")
hippius_sdk/ipfs_core.py CHANGED
@@ -65,7 +65,8 @@ class AsyncIPFSClient:
65
65
  files = {"file": (filename, file_content, "application/octet-stream")}
66
66
  # Explicitly set wrap-with-directory=false to prevent wrapping in directory
67
67
  response = await self.client.post(
68
- f"{self.api_url}/api/v0/add?wrap-with-directory=false", files=files
68
+ f"{self.api_url}/api/v0/add?wrap-with-directory=false&cid-version=1",
69
+ files=files,
69
70
  )
70
71
  response.raise_for_status()
71
72
  return response.json()
@@ -85,7 +86,8 @@ class AsyncIPFSClient:
85
86
  files = {"file": (filename, data, "application/octet-stream")}
86
87
  # Explicitly set wrap-with-directory=false to prevent wrapping in directory
87
88
  response = await self.client.post(
88
- f"{self.api_url}/api/v0/add?wrap-with-directory=false", files=files
89
+ f"{self.api_url}/api/v0/add?wrap-with-directory=false&cid-version=1",
90
+ files=files,
89
91
  )
90
92
  response.raise_for_status()
91
93
  return response.json()
@@ -346,7 +348,7 @@ class AsyncIPFSClient:
346
348
 
347
349
  # Make the request with directory flags
348
350
  response = await self.client.post(
349
- f"{self.api_url}/api/v0/add?recursive=true&wrap-with-directory=true",
351
+ f"{self.api_url}/api/v0/add?recursive=true&wrap-with-directory=true&cid-version=1",
350
352
  files=files,
351
353
  timeout=300.0, # 5 minute timeout for directory uploads
352
354
  )