aimodelshare 0.1.29__py3-none-any.whl → 0.1.30__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aimodelshare might be problematic. Click here for more details.

aimodelshare/aimsonnx.py CHANGED
@@ -579,7 +579,7 @@ def _keras_to_onnx(model, transfer_learning=None,
579
579
  output_path = os.path.join(temp_dir, 'temp.onnx')
580
580
 
581
581
 
582
- model.save(temp_dir)
582
+ tf.saved_model.save(model, temp_dir)
583
583
 
584
584
  # # Convert the model
585
585
  try:
@@ -76,110 +76,88 @@ def download_layer(layer, layer_count, tmp_img_dir, blobs_resp):
76
76
  return layer_id, layer_dir
77
77
 
78
78
  def pull_image(image_uri):
79
- import os
80
- import requests
81
- import tempfile
82
- import json
83
- import shutil
84
- import tarfile
85
- from aimodelshare.data_sharing.utils import redo_with_write
86
-
87
- image_uri_parts = image_uri.split('/')
88
-
89
- registry = image_uri_parts[0]
90
- image, tag = image_uri_parts[2].split(':')
91
- repository = '/'.join([image_uri_parts[1], image])
92
-
93
- auth_url = get_auth_url(registry)
94
-
95
- # Request manifest with correct Accept header
96
- auth_head = get_auth_head(auth_url, registry, repository)
97
- manifest_url = f'https://{registry}/v2/{repository}/manifests/{tag}'
98
- resp = requests.get(manifest_url, headers=auth_head, verify=False)
99
-
100
- # --- PATCH: Handle manifest list (multi-platform images) ---
101
- if resp.headers.get('Content-Type') == 'application/vnd.docker.distribution.manifest.list.v2+json':
102
- manifest_list = resp.json()
103
-
104
- # Find the first linux/amd64 image (or fallback to first available)
105
- target_manifest = next(
106
- (m for m in manifest_list['manifests']
107
- if m['platform'].get('architecture') == 'amd64' and m['platform'].get('os') == 'linux'),
108
- manifest_list['manifests'][0]
109
- )
110
- digest = target_manifest['digest']
111
-
112
- # Get the actual image manifest now
113
- resp = requests.get(
114
- f'https://{registry}/v2/{repository}/manifests/{digest}',
115
- headers=auth_head,
116
- verify=False
117
- )
118
- # -----------------------------------------------------------
119
-
120
- manifest = resp.json()
121
-
122
- # Safely check and fail early if config key is still missing
123
- if 'config' not in manifest:
124
- raise ValueError("Manifest response missing 'config'. This image may not follow Docker V2 manifest schema.")
125
-
126
- config = manifest['config']['digest']
127
- config_resp = requests.get(f'https://{registry}/v2/{repository}/blobs/{config}', headers=auth_head, verify=False)
128
-
129
- tmp_img_dir = os.path.join(tempfile.gettempdir(), f'tmp_{image}_{tag}')
130
- os.mkdir(tmp_img_dir)
131
-
132
- with open(f'{tmp_img_dir}/{config[7:]}.json', 'wb') as file:
133
- file.write(config_resp.content)
134
-
135
- content = [{
136
- 'Config': config[7:] + '.json',
137
- 'RepoTags': [image_uri],
138
- 'Layers': []
139
- }]
140
-
141
- # Skip first 6 layers? Keep original logic for compatibility
142
- layers = manifest['layers'][6:]
143
- layer_count = 0
144
-
145
- for layer in layers:
146
- layer_count += 1
147
- auth_head = get_auth_head(auth_url, registry, repository)
148
- blobs_resp = requests.get(
149
- f'https://{registry}/v2/{repository}/blobs/{layer["digest"]}',
150
- headers=auth_head,
151
- stream=True,
152
- verify=False
153
- )
154
-
155
- layer_id, layer_dir = download_layer(layer, layer_count, tmp_img_dir, blobs_resp)
156
- content[0]['Layers'].append(layer_id + '/layer.tar')
157
-
158
- json_path = os.path.join(layer_dir, 'json')
159
- with open(json_path, 'w') as file:
160
- if layers[-1]['digest'] == layer['digest']:
161
- json_obj = json.loads(config_resp.content)
162
- json_obj.pop('history', None)
163
- json_obj.pop('rootfs', None)
164
- else:
165
- json_obj = {}
166
- json_obj['id'] = layer_id
167
- file.write(json.dumps(json_obj))
168
-
169
- with open(os.path.join(tmp_img_dir, 'manifest.json'), 'w') as f:
170
- f.write(json.dumps(content))
171
-
172
- repo_dict = {'/'.join(image_uri_parts[:-1]) + '/' + image: {tag: layer_id}}
173
- with open(os.path.join(tmp_img_dir, 'repositories'), 'w') as f:
174
- f.write(json.dumps(repo_dict))
175
-
176
- # Create tar archive from temp image directory
177
- docker_tar = os.path.join(tempfile.gettempdir(), f'{repository.replace("/", "_")}_{tag}.tar')
178
- with tarfile.open(docker_tar, "w") as tar:
179
- tar.add(tmp_img_dir, arcname=os.path.sep)
180
-
181
- shutil.rmtree(tmp_img_dir, onerror=redo_with_write)
182
- return docker_tar
79
+
80
+ image_uri_parts = image_uri.split('/')
81
+
82
+ registry = image_uri_parts[0]
83
+ image, tag = image_uri_parts[2].split(':')
84
+ repository = '/'.join([image_uri_parts[1], image])
85
+
86
+ auth_url = get_auth_url(registry)
87
+
88
+ auth_head = get_auth_head(auth_url, registry, repository)
89
+
90
+ resp = requests.get('https://{}/v2/{}/manifests/{}'.format(registry, repository, tag), headers=auth_head, verify=False)
91
+
92
+ print(resp.json())
93
+ config = resp.json()['config']['digest']
94
+ config_resp = requests.get('https://{}/v2/{}/blobs/{}'.format(registry, repository, config), headers=auth_head, verify=False)
95
+
96
+ tmp_img_dir = tempfile.gettempdir() + '/' + 'tmp_{}_{}'.format(image, tag)
97
+ os.mkdir(tmp_img_dir)
98
+
99
+ file = open('{}/{}.json'.format(tmp_img_dir, config[7:]), 'wb')
100
+ file.write(config_resp.content)
101
+ file.close()
102
+
103
+ content = [{
104
+ 'Config': config[7:] + '.json',
105
+ 'RepoTags': [],
106
+ 'Layers': []
107
+ }]
108
+ content[0]['RepoTags'].append(image_uri)
109
+
110
+ layer_count=0
111
+ layers = resp.json()['layers'][6:]
112
+
113
+ for layer in layers:
114
+
115
+ layer_count += 1
116
+
117
+ auth_head = get_auth_head(auth_url, registry, repository) # done to keep from expiring
118
+ blobs_resp = requests.get('https://{}/v2/{}/blobs/{}'.format(registry, repository, layer['digest']), headers=auth_head, stream=True, verify=False)
119
+
120
+ layer_id, layer_dir = download_layer(layer, layer_count, tmp_img_dir, blobs_resp)
121
+ content[0]['Layers'].append(layer_id + '/layer.tar')
122
+
123
+ # Creating json file
124
+ file = open(layer_dir + '/json', 'w')
125
+
126
+ # last layer = config manifest - history - rootfs
127
+ if layers[-1]['digest'] == layer['digest']:
128
+ json_obj = json.loads(config_resp.content)
129
+ del json_obj['history']
130
+ del json_obj['rootfs']
131
+ else: # other layers json are empty
132
+ json_obj = json.loads('{}')
133
+
134
+ json_obj['id'] = layer_id
135
+ file.write(json.dumps(json_obj))
136
+ file.close()
137
+
138
+ file = open(tmp_img_dir + '/manifest.json', 'w')
139
+ file.write(json.dumps(content))
140
+ file.close()
141
+
142
+ content = {
143
+ '/'.join(image_uri_parts[:-1]) + '/' + image : { tag : layer_id }
144
+ }
145
+
146
+ file = open(tmp_img_dir + '/repositories', 'w')
147
+ file.write(json.dumps(content))
148
+ file.close()
149
+
150
+ # Create image tar and clean tmp folder
151
+ docker_tar = tempfile.gettempdir() + '/' + '_'.join([repository.replace('/', '_'), tag]) + '.tar'
152
+ sys.stdout.flush()
153
+
154
+ tar = tarfile.open(docker_tar, "w")
155
+ tar.add(tmp_img_dir, arcname=os.path.sep)
156
+ tar.close()
157
+
158
+ shutil.rmtree(tmp_img_dir, onerror=redo_with_write)
159
+
160
+ return docker_tar
183
161
 
184
162
 
185
163
  def extract_data_from_image(image_name, file_name, location):
@@ -219,10 +197,11 @@ def import_quickstart_data(tutorial, section="modelplayground"):
219
197
 
220
198
  #Download Quick Start materials
221
199
  if all([tutorial == "flowers", section == "modelplayground"]):
222
- quickstart_repository = "public.ecr.aws/y2e2a1d6/quickstart_materials-repository:latest"
200
+ quickstart_repository = "public.ecr.aws/z5w0c9e9/quickstart_materials-repository:latest"
223
201
  existing_folder = 'flower_competition_data'
202
+
224
203
  if all([tutorial == "flowers", section == "competition"]):
225
- quickstart_repository = "public.ecr.aws/y2e2a1d6/quickstart_flowers_competition-repository:latest"
204
+ quickstart_repository = "public.ecr.aws/z5w0c9e9/quickstart_flowers_competition-repository:latest"
226
205
  existing_folder = 'flower_competition_data'
227
206
 
228
207
  if all([tutorial == "mnist", section == "modelplayground"]):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: aimodelshare
3
- Version: 0.1.29
3
+ Version: 0.1.30
4
4
  Summary: Deploy locally saved machine learning models to a live rest API and web-dashboard. Share it with the world via modelshare.org
5
5
  Home-page: https://www.modelshare.org
6
6
  Author: Michael Parrott
@@ -1,6 +1,6 @@
1
1
  aimodelshare/README.md,sha256=_OMdUIeIYZnpFlKdafM1KNWaANO2nWdx0QpLE_ZC-Qs,2014
2
2
  aimodelshare/__init__.py,sha256=CS0iFxgAic21gBcQE6NSZ-D_ElHw80_A3OWrEYo9Dks,539
3
- aimodelshare/aimsonnx.py,sha256=bHjp2iEBSGKzfScDX4wn9I7lR2wPSMpdgi8qmroz4nY,69282
3
+ aimodelshare/aimsonnx.py,sha256=fmOgHVc6SL1cH9zt2EHiPOVLXGunQuteo6XE4cc_LA0,69298
4
4
  aimodelshare/api.py,sha256=jeCIMbpBllTYi-bPvCdNvI6yHaT3JYakq9fgaz10s_E,34920
5
5
  aimodelshare/aws.py,sha256=jn99R9-N77Qac-_eYm-LaCQUPd-RnE7oVULm9rh-3RY,15232
6
6
  aimodelshare/aws_client.py,sha256=Ce19iwf69BwpuyyJlVN8z1da3c5jf93svsTgx1OWhaA,6784
@@ -31,7 +31,7 @@ aimodelshare/containerization_templates/lambda_function.txt,sha256=nEFoPDXemNcQZ
31
31
  aimodelshare/custom_approach/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
32
32
  aimodelshare/custom_approach/lambda_function.py,sha256=d1HZlgviHZq4mNBKx4q-RCunDK8P8i9DKZcfv6Nmgzc,479
33
33
  aimodelshare/data_sharing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
- aimodelshare/data_sharing/download_data.py,sha256=zZDnZou1x4QXdPSlD-1OfCBow4XoYgq8JzlCy7tobXw,24273
34
+ aimodelshare/data_sharing/download_data.py,sha256=Hd9QUKfC2GQqBuJhzjVIHNYz8rksN7Nl3B52U7WIqxA,22815
35
35
  aimodelshare/data_sharing/share_data.py,sha256=dMOP0-PTSpviOeHi3Nvj-uiq5PlIfk_SN5nN92j4PnI,13964
36
36
  aimodelshare/data_sharing/utils.py,sha256=865lN8-oGFi_U_zRaNnGB8Bd0sC8dN_iI5krZOSt_Ts,236
37
37
  aimodelshare/data_sharing/data_sharing_templates/Dockerfile.txt,sha256=27wmp7b0rXqJQsumhPxCvGHmUcDiiVgrC6i7DmY7KQA,77
@@ -140,11 +140,11 @@ aimodelshare/sam/codepipeline_policies.txt,sha256=267HMXMnbP7qRASkmFZYSx-2HmKf5o
140
140
  aimodelshare/sam/codepipeline_trust_relationship.txt,sha256=yfPYvZlN3fnaIHs7I3ENMMveigIE89mufV9pvR8EQH8,245
141
141
  aimodelshare/sam/spark-class.txt,sha256=chyJBxDzCzlUKXzVQYTzuJ2PXCTwg8_gd1yfnI-xbRw,217
142
142
  aimodelshare/sam/template.txt,sha256=JKSvEOZNaaLalHSx7r9psJg_6LLCb0XLAYi1-jYPu3M,1195
143
- aimodelshare-0.1.29.dist-info/licenses/LICENSE,sha256=JXBYLriXYgTloZs-9CJPZY76dqkuDT5df_HghMnljx8,1134
143
+ aimodelshare-0.1.30.dist-info/licenses/LICENSE,sha256=JXBYLriXYgTloZs-9CJPZY76dqkuDT5df_HghMnljx8,1134
144
144
  tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
145
145
  tests/test_aimsonnx.py,sha256=-GOF1_qXGQaMxHyqK0GPg7dD1meE-S7CZea4pLmBDTk,3906
146
146
  tests/test_playground.py,sha256=vdFWPRrZNQ2poiBOoN3l7HsXB5yc3p3rrrclNYJHnaw,24574
147
- aimodelshare-0.1.29.dist-info/METADATA,sha256=2ow3I21HD8bAggVYmz6TDBn6BhfesGQIsvqhcul_Slw,3523
148
- aimodelshare-0.1.29.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
149
- aimodelshare-0.1.29.dist-info/top_level.txt,sha256=2KJgeHQ0BmZuilB75J203i7W4vri6CON2kdbwk9BNpU,19
150
- aimodelshare-0.1.29.dist-info/RECORD,,
147
+ aimodelshare-0.1.30.dist-info/METADATA,sha256=VsmKA64cn84DXDDgHNTM92DE2chwVJU_mVyN1fm7QHw,3523
148
+ aimodelshare-0.1.30.dist-info/WHEEL,sha256=pxyMxgL8-pra_rKaQ4drOZAegBVuX-G_4nRHjjgWbmo,91
149
+ aimodelshare-0.1.30.dist-info/top_level.txt,sha256=2KJgeHQ0BmZuilB75J203i7W4vri6CON2kdbwk9BNpU,19
150
+ aimodelshare-0.1.30.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (78.1.0)
2
+ Generator: setuptools (79.0.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5