poly-hammer-utils 0.0.23__py3-none-any.whl → 0.0.25__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,8 @@
1
+ import os
1
2
  from pathlib import Path
2
3
 
3
4
  RESOURCES_FOLDER = Path(__file__).parent / 'resources'
4
5
 
5
- BLENDER_STARTUP_SCRIPT = RESOURCES_FOLDER / 'scripts' / 'blender' / 'startup.py'
6
+ BLENDER_STARTUP_SCRIPT = RESOURCES_FOLDER / 'scripts' / 'blender' / 'startup.py'
7
+
8
+ ENVIRONMENT = os.environ.get('ENVIRONMENT', 'staging')
@@ -3,10 +3,17 @@ import logging
3
3
  import zipfile
4
4
  import tempfile
5
5
  from pathlib import Path
6
+ from poly_hammer_utils.constants import ENVIRONMENT
6
7
  from poly_hammer_utils.utilities import download_and_unzip_to_folder, download_release_file
7
8
  from poly_hammer_utils.github.release import GitHubRelease
8
9
  from poly_hammer_utils.extension.packager import package_extension, get_addon_version
9
- from poly_hammer_utils.extension.server import update_extension_index
10
+ from poly_hammer_utils.extension.server import update_extension_index, sync_extensions_from_s3
11
+
12
+ BLENDER_EXTENSION_SERVER_S3_BUCKET = 'poly-hammer-portal-staging-app-data'
13
+ if ENVIRONMENT == 'production':
14
+ BLENDER_EXTENSION_SERVER_S3_BUCKET = 'poly-hammer-portal-production-app-data'
15
+
16
+ BLENDER_EXTENSION_SERVER_S3_FOLDER = 'products/blender-extensions/meta_human_dna'
10
17
 
11
18
  logger = logging.getLogger(__name__)
12
19
 
@@ -119,7 +126,14 @@ def create_release(addon_folder: Path, releases_folder: Path):
119
126
 
120
127
  addon_version = get_addon_version(source_folder=addon_folder)
121
128
 
122
- # Create the .zip files for the addon's various platforms
129
+ # First, sync existing extensions from S3
130
+ sync_extensions_from_s3(
131
+ repo_folder=releases_folder,
132
+ bucket=BLENDER_EXTENSION_SERVER_S3_BUCKET,
133
+ s3_prefix=BLENDER_EXTENSION_SERVER_S3_FOLDER,
134
+ )
135
+
136
+ # Create the new .zip files for the addon's various platforms
123
137
  addon_zip_files = package_extension(
124
138
  source_folder=addon_folder,
125
139
  output_folder=releases_folder,
@@ -137,6 +151,7 @@ def create_release(addon_folder: Path, releases_folder: Path):
137
151
  auth_token=token
138
152
  )
139
153
 
154
+ # Update each addon .zip to include the rig logic and core bindings
140
155
  for addon_zip_file in addon_zip_files:
141
156
  platform, arch = parse_blender_extension_zip_info(url=addon_zip_file)
142
157
  files_to_zip = rig_logic_files + core_files
@@ -157,11 +172,11 @@ def create_release(addon_folder: Path, releases_folder: Path):
157
172
  else:
158
173
  logger.error(f"Error: {file_path} not found")
159
174
 
160
- # Generate the extension index and update it and upload addons to S3
175
+ # Generate the updated extension index and upload it and the addons .zip file to S3
161
176
  update_extension_index(
162
177
  repo_folder=releases_folder,
163
- bucket=os.environ['BLENDER_EXTENSION_SERVER_S3_BUCKET'],
164
- s3_folder=os.environ['BLENDER_EXTENSION_SERVER_S3_FOLDER'],
178
+ bucket=BLENDER_EXTENSION_SERVER_S3_BUCKET,
179
+ s3_folder=BLENDER_EXTENSION_SERVER_S3_FOLDER,
165
180
  blender_version=os.environ['BLENDER_VERSION'],
166
181
  docker=True
167
182
  )
@@ -1,9 +1,10 @@
1
1
  import os
2
2
  import sys
3
- import json
4
3
  import logging
5
4
  import boto3
5
+ import httpx
6
6
  from pathlib import Path
7
+ from poly_hammer_utils.constants import ENVIRONMENT
7
8
  from poly_hammer_utils.utilities import shell, get_blender_executable
8
9
 
9
10
  logger = logging.getLogger(__name__)
@@ -12,25 +13,27 @@ ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY_ID')
12
13
  SECRET_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
13
14
  REGION = os.environ.get('AWS_REGION')
14
15
 
15
- def merge_extension_index_from_s3(
16
- local_index_path: Path,
16
+ BASE_URL = 'https://api.portal.staging.polyhammer.com'
17
+ if ENVIRONMENT == 'production':
18
+ BASE_URL = 'https://api.portal.polyhammer.com'
19
+
20
+ PORTAL_API_KEY = os.environ.get('PORTAL_API_KEY')
21
+
22
+ def sync_extensions_from_s3(
23
+ repo_folder: Path,
17
24
  bucket: str,
18
- s3_key: str,
19
- output_path: Path | None = None
20
- ) -> dict:
25
+ s3_prefix: str = ''
26
+ ) -> list[Path]:
21
27
  """
22
- Reads a JSON index file from S3, merges it with a local JSON index file,
23
- and optionally writes the merged result to an output path.
28
+ Downloads all .zip extension files from an S3 folder to a local directory.
24
29
 
25
30
  Args:
26
- local_index_path (Path): Path to the local JSON file to merge.
31
+ repo_folder (Path): Local directory to download files to.
27
32
  bucket (str): The S3 bucket name.
28
- s3_key (str): The object key (path) in the S3 bucket.
29
- output_path (Path, optional): Path to write the merged JSON.
30
- If None, overwrites the local_index_path.
33
+ s3_prefix (str, optional): Prefix (folder path) in S3. Defaults to ''.
31
34
 
32
35
  Returns:
33
- dict: The merged JSON data.
36
+ list[Path]: List of local file paths that were downloaded.
34
37
  """
35
38
  s3_client = boto3.client(
36
39
  's3',
@@ -39,33 +42,31 @@ def merge_extension_index_from_s3(
39
42
  region_name=REGION,
40
43
  )
41
44
 
42
- # Read the JSON file from S3
43
- logger.info(f'Reading index from s3://{bucket}/{s3_key}')
44
- try:
45
- response = s3_client.get_object(Bucket=bucket, Key=s3_key)
46
- s3_data = json.loads(response['Body'].read().decode('utf-8'))
47
- except s3_client.exceptions.NoSuchKey:
48
- logger.warning(f'S3 key "{s3_key}" not found, starting with empty data')
49
- s3_data = {}
50
- except Exception as e:
51
- logger.warning(f'Failed to read from S3: {e}, starting with empty data')
52
- s3_data = {}
45
+ repo_folder.mkdir(parents=True, exist_ok=True)
46
+
47
+ # List all objects in the S3 prefix
48
+ prefix = f'{s3_prefix}/' if s3_prefix and not s3_prefix.endswith('/') else s3_prefix
49
+ logger.info(f'Listing objects in s3://{bucket}/{prefix}')
53
50
 
54
- # Read the local JSON file
55
- logger.info(f'Reading local index from "{local_index_path}"')
56
- with open(local_index_path, 'r', encoding='utf-8') as f:
57
- local_data = json.load(f)
51
+ downloaded_files = []
52
+ paginator = s3_client.get_paginator('list_objects_v2')
58
53
 
59
- # Merge the data (local data takes precedence)
60
- merged_data = {**s3_data, **local_data}
54
+ for page in paginator.paginate(Bucket=bucket, Prefix=prefix):
55
+ for obj in page.get('Contents', []):
56
+ key = obj['Key']
57
+ filename = key.split('/')[-1]
61
58
 
62
- # Write the merged result
63
- output = output_path or local_index_path
64
- logger.info(f'Writing merged index to "{output}"')
65
- with open(output, 'w', encoding='utf-8') as f:
66
- json.dump(merged_data, f, indent=2)
59
+ # Only download .zip files
60
+ if not filename.endswith('.zip'):
61
+ continue
67
62
 
68
- return merged_data
63
+ local_path = repo_folder / filename
64
+ logger.info(f'Downloading s3://{bucket}/{key} to "{local_path}"')
65
+ s3_client.download_file(bucket, key, str(local_path))
66
+ downloaded_files.append(local_path)
67
+
68
+ logger.info(f'Successfully downloaded {len(downloaded_files)} file(s) from S3')
69
+ return downloaded_files
69
70
 
70
71
 
71
72
  def upload_extensions_to_s3(
@@ -74,10 +75,12 @@ def upload_extensions_to_s3(
74
75
  s3_prefix: str = ''
75
76
  ) -> list[str]:
76
77
  """
77
- Uploads extension .zip files and/or index files to S3.
78
+ Uploads extension .zip files and the index.json to S3.
79
+ Only uploads .zip files if they are newer than the S3 version.
80
+ Always uploads the index.json file.
78
81
 
79
82
  Args:
80
- file_paths (list[Path]): List of file paths to upload.
83
+ repo_folder (Path): Local directory containing files to upload.
81
84
  bucket (str): The S3 bucket name.
82
85
  s3_prefix (str, optional): Prefix (folder path) in S3. Defaults to ''.
83
86
 
@@ -91,14 +94,39 @@ def upload_extensions_to_s3(
91
94
  region_name=REGION,
92
95
  )
93
96
 
97
+ # Build a map of S3 object keys to their last modified times
98
+ prefix = f'{s3_prefix}/' if s3_prefix and not s3_prefix.endswith('/') else s3_prefix
99
+ s3_objects = {}
100
+ paginator = s3_client.get_paginator('list_objects_v2')
101
+ for page in paginator.paginate(Bucket=bucket, Prefix=prefix):
102
+ for obj in page.get('Contents', []):
103
+ s3_objects[obj['Key']] = obj['LastModified']
104
+
94
105
  uploaded_keys = []
95
- # Only upload .json and .zip files
106
+ # Collect .json and .zip files
96
107
  file_paths = list(repo_folder.glob('*.json')) + list(repo_folder.glob('*.zip'))
108
+
97
109
  for file_path in file_paths:
98
110
  if not file_path.exists():
99
111
  logger.warning(f'File "{file_path}" does not exist, skipping')
100
112
  continue
101
113
 
114
+ # Build the S3 key
115
+ s3_key = f'{s3_prefix}/{file_path.name}' if s3_prefix else file_path.name
116
+ s3_key = s3_key.lstrip('/')
117
+
118
+ # For .zip files, check if local file is newer than S3 version
119
+ if file_path.suffix == '.zip' and s3_key in s3_objects:
120
+ import datetime
121
+ local_mtime = datetime.datetime.fromtimestamp(
122
+ file_path.stat().st_mtime,
123
+ tz=datetime.timezone.utc
124
+ )
125
+ s3_mtime = s3_objects[s3_key]
126
+ if local_mtime <= s3_mtime:
127
+ logger.info(f'Skipping "{file_path.name}" (not newer than S3 version)')
128
+ continue
129
+
102
130
  # Determine content type
103
131
  if file_path.suffix == '.zip':
104
132
  content_type = 'application/zip'
@@ -107,10 +135,6 @@ def upload_extensions_to_s3(
107
135
  else:
108
136
  content_type = 'application/octet-stream'
109
137
 
110
- # Build the S3 key
111
- s3_key = f'{s3_prefix}/{file_path.name}' if s3_prefix else file_path.name
112
- s3_key = s3_key.lstrip('/')
113
-
114
138
  logger.info(f'Uploading "{file_path}" to s3://{bucket}/{s3_key}')
115
139
  s3_client.upload_file(
116
140
  Filename=str(file_path),
@@ -147,6 +171,34 @@ def generate_extension_index(
147
171
 
148
172
  shell(command)
149
173
 
174
+ def trigger_poly_hammer_portal_extension_index_refresh() -> httpx.Response:
175
+ """Trigger sync of all extensions from S3."""
176
+ logger.info("Triggering extension sync...")
177
+
178
+ url = f"{BASE_URL}/api/v1/admin/extensions/sync"
179
+ headers = {"X-Admin-API-Key": PORTAL_API_KEY}
180
+
181
+ response = httpx.post(url, headers=headers, timeout=60.0)
182
+
183
+ if response.status_code == 200:
184
+ data = response.json()
185
+ logger.info("Sync complete!")
186
+ logger.info(f"Extensions processed: {data['extensions_processed']}")
187
+ logger.info(f"Platforms synced: {data['total_platforms_synced']}")
188
+
189
+ if data.get("errors"):
190
+ logger.error(f"Errors: {data['errors']}")
191
+
192
+ if data.get("details"):
193
+ logger.info("Extension details:")
194
+ for ext in data["details"]:
195
+ logger.info(f"- {ext['extension_id']}: {ext['versions_synced']} versions, {ext['platforms_synced']} platforms")
196
+ else:
197
+ logger.error(f"Failed with status {response.status_code}")
198
+ logger.error(f"Response: {response.text}")
199
+
200
+ return response
201
+
150
202
  def update_extension_index(
151
203
  repo_folder: Path,
152
204
  bucket: str,
@@ -154,23 +206,30 @@ def update_extension_index(
154
206
  blender_version: str = '4.5',
155
207
  docker: bool = False,
156
208
  ):
209
+ """
210
+ Syncs extensions from S3, regenerates the index locally, and uploads
211
+ any new or modified files back to S3.
212
+
213
+ Args:
214
+ repo_folder (Path): Local directory for the extension repository.
215
+ bucket (str): The S3 bucket name.
216
+ s3_folder (str): The folder path in S3.
217
+ blender_version (str, optional): Blender version to use. Defaults to '4.5'.
218
+ docker (bool, optional): Whether to use Docker for index generation. Defaults to False.
219
+ """
220
+ # Step 1: Generate the extension index from all local .zip files
157
221
  generate_extension_index(
158
222
  repo_folder=repo_folder,
159
223
  blender_version=blender_version,
160
224
  docker=docker,
161
225
  )
162
226
 
163
- local_index_path = repo_folder / 'index.json'
164
- merge_extension_index_from_s3(
165
- local_index_path=local_index_path,
166
- bucket=bucket,
167
- s3_key=f'{s3_folder}/index.json',
168
- output_path=local_index_path,
169
- )
170
-
227
+ # Step 2: Upload new/modified .zip files and the updated index.json back to S3
171
228
  upload_extensions_to_s3(
172
229
  repo_folder=repo_folder,
173
230
  bucket=bucket,
174
231
  s3_prefix=s3_folder,
175
232
  )
176
233
 
234
+ # Step 3: Trigger the Poly Hammer Portal to refresh its extension index
235
+ trigger_poly_hammer_portal_extension_index_refresh()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: poly-hammer-utils
3
- Version: 0.0.23
3
+ Version: 0.0.25
4
4
  Summary:
5
5
  Author: Poly Hammer
6
6
  Author-email: info@polyhammer.com
@@ -10,6 +10,7 @@ Classifier: Programming Language :: Python :: 3.11
10
10
  Classifier: Programming Language :: Python :: 3.12
11
11
  Requires-Dist: PyGithub (==2.5.0)
12
12
  Requires-Dist: boto3 (>=1.42.35,<2.0.0)
13
+ Requires-Dist: httpx (>=0.28.1,<0.29.0)
13
14
  Requires-Dist: python-dotenv (>=1.1.1,<2.0.0)
14
15
  Requires-Dist: requirements-parser (>=0.11.0,<0.12.0)
15
16
  Requires-Dist: tomlkit (>=0.14.0,<0.15.0)
@@ -1,16 +1,16 @@
1
1
  poly_hammer_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  poly_hammer_utils/addon/packager.py,sha256=apoZhQpd2QeRucFq53u-L1FPmN4MYuiNrjTETAE9NQg,8435
3
3
  poly_hammer_utils/addon/release.py,sha256=EZirS8_c9S1OEZoOHStgciu_C7vMxBc_-O5P1fM9Qu4,9074
4
- poly_hammer_utils/constants.py,sha256=Q5cCA6H-AJYWdbndCwk3fIoBPHx_9NTgjvY7p-e0grA,162
5
- poly_hammer_utils/extension/meta_human_dna.py,sha256=XPH9w3q3ZALPKRGZ7nHULMHPSr4XYZ5b4FTUOUW56a8,7103
4
+ poly_hammer_utils/constants.py,sha256=4XGiwn7ip50V1zvmpVD9YZYmE6wPhj2Ark2AXQuJvSc,228
5
+ poly_hammer_utils/extension/meta_human_dna.py,sha256=HP5BGYhxwjlPBAPnJxqtKjVI5uWyK3ZTFFLDJ3jNoaw,7740
6
6
  poly_hammer_utils/extension/packager.py,sha256=btYzyU_fa2Qaa6R9eZSojQ5uy3nQ0kDxCf49LnTZrHM,2115
7
- poly_hammer_utils/extension/server.py,sha256=Zu0ao-RBSYo2Do6d3ZGQeBkUUmMTUwWXR0rQ9Y6x7ig,5420
7
+ poly_hammer_utils/extension/server.py,sha256=vuDMqEehgFiFiUy-R28oKzijHo5wIJzW0l66wwPU6I8,8072
8
8
  poly_hammer_utils/github/release.py,sha256=P4vrNvdgWuzJI0mRye09BP6vL5b7kQ5fpuDDnEqJbwg,9951
9
9
  poly_hammer_utils/helpers.py,sha256=f_2fNrydsbIvwD2LuoBj2gjETMP7dFzJtq1_7WMBwNI,1655
10
10
  poly_hammer_utils/launch.py,sha256=Nc9FlFuGtMSjgSLsk6MMMj-FECsOGtfUQlZaUZt_rGU,469
11
11
  poly_hammer_utils/resources/scripts/blender/startup.py,sha256=eUd0VwAHjTwIscVKCbk_0-F7p0jgIEsVYdzpIjlzOK0,2282
12
12
  poly_hammer_utils/resources/scripts/unreal/init_unreal.py,sha256=1-d9IU8ZSAIS3MUANrsGx4ZmqNJ5f8S2k8XJVg0Bghs,693
13
13
  poly_hammer_utils/utilities.py,sha256=7dmZo02HXbWhfmwDHkgVUE_gyq4e-gqb4OdBc2bokbs,6491
14
- poly_hammer_utils-0.0.23.dist-info/METADATA,sha256=QmD8swWvEzq5a80-OocO_Z9d39Psh9Cz3GwDeB9nblY,894
15
- poly_hammer_utils-0.0.23.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
16
- poly_hammer_utils-0.0.23.dist-info/RECORD,,
14
+ poly_hammer_utils-0.0.25.dist-info/METADATA,sha256=4qTBctRnioUIPFOWKdlMakIu8peOj5qzyrzs1FQoJo4,934
15
+ poly_hammer_utils-0.0.25.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
16
+ poly_hammer_utils-0.0.25.dist-info/RECORD,,