aimodelshare 0.1.27__py3-none-any.whl → 0.1.29__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aimodelshare might be problematic. Click here for more details.

@@ -76,87 +76,111 @@ def download_layer(layer, layer_count, tmp_img_dir, blobs_resp):
76
76
  return layer_id, layer_dir
77
77
 
78
78
  def pull_image(image_uri):
79
+ import os
80
+ import requests
81
+ import tempfile
82
+ import json
83
+ import shutil
84
+ import tarfile
85
+ from aimodelshare.data_sharing.utils import redo_with_write
86
+
87
+ image_uri_parts = image_uri.split('/')
88
+
89
+ registry = image_uri_parts[0]
90
+ image, tag = image_uri_parts[2].split(':')
91
+ repository = '/'.join([image_uri_parts[1], image])
92
+
93
+ auth_url = get_auth_url(registry)
94
+
95
+ # Request manifest with correct Accept header
96
+ auth_head = get_auth_head(auth_url, registry, repository)
97
+ manifest_url = f'https://{registry}/v2/{repository}/manifests/{tag}'
98
+ resp = requests.get(manifest_url, headers=auth_head, verify=False)
99
+
100
+ # --- PATCH: Handle manifest list (multi-platform images) ---
101
+ if resp.headers.get('Content-Type') == 'application/vnd.docker.distribution.manifest.list.v2+json':
102
+ manifest_list = resp.json()
103
+
104
+ # Find the first linux/amd64 image (or fallback to first available)
105
+ target_manifest = next(
106
+ (m for m in manifest_list['manifests']
107
+ if m['platform'].get('architecture') == 'amd64' and m['platform'].get('os') == 'linux'),
108
+ manifest_list['manifests'][0]
109
+ )
110
+ digest = target_manifest['digest']
111
+
112
+ # Get the actual image manifest now
113
+ resp = requests.get(
114
+ f'https://{registry}/v2/{repository}/manifests/{digest}',
115
+ headers=auth_head,
116
+ verify=False
117
+ )
118
+ # -----------------------------------------------------------
119
+
120
+ manifest = resp.json()
121
+
122
+ # Safely check and fail early if config key is still missing
123
+ if 'config' not in manifest:
124
+ raise ValueError("Manifest response missing 'config'. This image may not follow Docker V2 manifest schema.")
125
+
126
+ config = manifest['config']['digest']
127
+ config_resp = requests.get(f'https://{registry}/v2/{repository}/blobs/{config}', headers=auth_head, verify=False)
128
+
129
+ tmp_img_dir = os.path.join(tempfile.gettempdir(), f'tmp_{image}_{tag}')
130
+ os.mkdir(tmp_img_dir)
131
+
132
+ with open(f'{tmp_img_dir}/{config[7:]}.json', 'wb') as file:
133
+ file.write(config_resp.content)
134
+
135
+ content = [{
136
+ 'Config': config[7:] + '.json',
137
+ 'RepoTags': [image_uri],
138
+ 'Layers': []
139
+ }]
140
+
141
+ # Skip first 6 layers? Keep original logic for compatibility
142
+ layers = manifest['layers'][6:]
143
+ layer_count = 0
144
+
145
+ for layer in layers:
146
+ layer_count += 1
147
+ auth_head = get_auth_head(auth_url, registry, repository)
148
+ blobs_resp = requests.get(
149
+ f'https://{registry}/v2/{repository}/blobs/{layer["digest"]}',
150
+ headers=auth_head,
151
+ stream=True,
152
+ verify=False
153
+ )
154
+
155
+ layer_id, layer_dir = download_layer(layer, layer_count, tmp_img_dir, blobs_resp)
156
+ content[0]['Layers'].append(layer_id + '/layer.tar')
157
+
158
+ json_path = os.path.join(layer_dir, 'json')
159
+ with open(json_path, 'w') as file:
160
+ if layers[-1]['digest'] == layer['digest']:
161
+ json_obj = json.loads(config_resp.content)
162
+ json_obj.pop('history', None)
163
+ json_obj.pop('rootfs', None)
164
+ else:
165
+ json_obj = {}
166
+ json_obj['id'] = layer_id
167
+ file.write(json.dumps(json_obj))
168
+
169
+ with open(os.path.join(tmp_img_dir, 'manifest.json'), 'w') as f:
170
+ f.write(json.dumps(content))
171
+
172
+ repo_dict = {'/'.join(image_uri_parts[:-1]) + '/' + image: {tag: layer_id}}
173
+ with open(os.path.join(tmp_img_dir, 'repositories'), 'w') as f:
174
+ f.write(json.dumps(repo_dict))
175
+
176
+ # Create tar archive from temp image directory
177
+ docker_tar = os.path.join(tempfile.gettempdir(), f'{repository.replace("/", "_")}_{tag}.tar')
178
+ with tarfile.open(docker_tar, "w") as tar:
179
+ tar.add(tmp_img_dir, arcname=os.path.sep)
180
+
181
+ shutil.rmtree(tmp_img_dir, onerror=redo_with_write)
182
+ return docker_tar
79
183
 
80
- image_uri_parts = image_uri.split('/')
81
-
82
- registry = image_uri_parts[0]
83
- image, tag = image_uri_parts[2].split(':')
84
- repository = '/'.join([image_uri_parts[1], image])
85
-
86
- auth_url = get_auth_url(registry)
87
-
88
- auth_head = get_auth_head(auth_url, registry, repository)
89
-
90
- resp = requests.get('https://{}/v2/{}/manifests/{}'.format(registry, repository, tag), headers=auth_head, verify=False)
91
-
92
- config = resp.json()['config']['digest']
93
- config_resp = requests.get('https://{}/v2/{}/blobs/{}'.format(registry, repository, config), headers=auth_head, verify=False)
94
-
95
- tmp_img_dir = tempfile.gettempdir() + '/' + 'tmp_{}_{}'.format(image, tag)
96
- os.mkdir(tmp_img_dir)
97
-
98
- file = open('{}/{}.json'.format(tmp_img_dir, config[7:]), 'wb')
99
- file.write(config_resp.content)
100
- file.close()
101
-
102
- content = [{
103
- 'Config': config[7:] + '.json',
104
- 'RepoTags': [],
105
- 'Layers': []
106
- }]
107
- content[0]['RepoTags'].append(image_uri)
108
-
109
- layer_count=0
110
- layers = resp.json()['layers'][6:]
111
-
112
- for layer in layers:
113
-
114
- layer_count += 1
115
-
116
- auth_head = get_auth_head(auth_url, registry, repository) # done to keep from expiring
117
- blobs_resp = requests.get('https://{}/v2/{}/blobs/{}'.format(registry, repository, layer['digest']), headers=auth_head, stream=True, verify=False)
118
-
119
- layer_id, layer_dir = download_layer(layer, layer_count, tmp_img_dir, blobs_resp)
120
- content[0]['Layers'].append(layer_id + '/layer.tar')
121
-
122
- # Creating json file
123
- file = open(layer_dir + '/json', 'w')
124
-
125
- # last layer = config manifest - history - rootfs
126
- if layers[-1]['digest'] == layer['digest']:
127
- json_obj = json.loads(config_resp.content)
128
- del json_obj['history']
129
- del json_obj['rootfs']
130
- else: # other layers json are empty
131
- json_obj = json.loads('{}')
132
-
133
- json_obj['id'] = layer_id
134
- file.write(json.dumps(json_obj))
135
- file.close()
136
-
137
- file = open(tmp_img_dir + '/manifest.json', 'w')
138
- file.write(json.dumps(content))
139
- file.close()
140
-
141
- content = {
142
- '/'.join(image_uri_parts[:-1]) + '/' + image : { tag : layer_id }
143
- }
144
-
145
- file = open(tmp_img_dir + '/repositories', 'w')
146
- file.write(json.dumps(content))
147
- file.close()
148
-
149
- # Create image tar and clean tmp folder
150
- docker_tar = tempfile.gettempdir() + '/' + '_'.join([repository.replace('/', '_'), tag]) + '.tar'
151
- sys.stdout.flush()
152
-
153
- tar = tarfile.open(docker_tar, "w")
154
- tar.add(tmp_img_dir, arcname=os.path.sep)
155
- tar.close()
156
-
157
- shutil.rmtree(tmp_img_dir, onerror=redo_with_write)
158
-
159
- return docker_tar
160
184
 
161
185
  def extract_data_from_image(image_name, file_name, location):
162
186
  tar = tarfile.open(image_name, 'r')
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: aimodelshare
3
- Version: 0.1.27
3
+ Version: 0.1.29
4
4
  Summary: Deploy locally saved machine learning models to a live rest API and web-dashboard. Share it with the world via modelshare.org
5
5
  Home-page: https://www.modelshare.org
6
6
  Author: Michael Parrott
@@ -11,33 +11,43 @@ Classifier: Operating System :: OS Independent
11
11
  Requires-Python: >=3.7
12
12
  Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
- Requires-Dist: boto3 ==1.26.69
15
- Requires-Dist: botocore ==1.29.82
16
- Requires-Dist: scikit-learn ==1.2.2
17
- Requires-Dist: onnx ==1.13.1
18
- Requires-Dist: onnxconverter-common >=1.7.0
14
+ Requires-Dist: boto3==1.34.69
15
+ Requires-Dist: botocore==1.34.69
16
+ Requires-Dist: scikit-learn==1.6.0
17
+ Requires-Dist: onnx==1.14.1
18
+ Requires-Dist: onnxconverter-common==1.14.0
19
19
  Requires-Dist: regex
20
- Requires-Dist: keras2onnx >=1.7.0
21
- Requires-Dist: tensorflow >=2.12
22
- Requires-Dist: tf2onnx
23
- Requires-Dist: skl2onnx >=1.14.0
24
- Requires-Dist: onnxruntime >=1.7.0
25
- Requires-Dist: torch >=1.8.1
26
- Requires-Dist: pydot ==1.3.0
27
- Requires-Dist: importlib-resources ==5.10.0
28
- Requires-Dist: onnxmltools >=1.6.1
29
- Requires-Dist: Pympler ==0.9
30
- Requires-Dist: docker ==5.0.0
31
- Requires-Dist: wget ==3.2
32
- Requires-Dist: PyJWT >=2.4.0
33
- Requires-Dist: seaborn >=0.11.2
34
- Requires-Dist: astunparse ==1.6.3
35
- Requires-Dist: shortuuid >=1.0.8
36
- Requires-Dist: psutil >=5.9.1
37
- Requires-Dist: pathlib >=1.0.1
38
- Requires-Dist: protobuf >=3.20.1
39
- Requires-Dist: dill
40
- Requires-Dist: scikeras
20
+ Requires-Dist: keras2onnx==1.7.0
21
+ Requires-Dist: tensorflow==2.18.0
22
+ Requires-Dist: tf2onnx==1.16.1
23
+ Requires-Dist: skl2onnx==1.18.0
24
+ Requires-Dist: onnxruntime==1.17.1
25
+ Requires-Dist: torch==2.6.0
26
+ Requires-Dist: pydot==1.4.2
27
+ Requires-Dist: importlib-resources==6.1.1
28
+ Requires-Dist: onnxmltools==1.11.0
29
+ Requires-Dist: Pympler==1.0.1
30
+ Requires-Dist: docker==6.1.3
31
+ Requires-Dist: wget==3.2
32
+ Requires-Dist: PyJWT==2.8.0
33
+ Requires-Dist: seaborn==0.13.2
34
+ Requires-Dist: astunparse==1.6.3
35
+ Requires-Dist: shortuuid==1.0.11
36
+ Requires-Dist: psutil==5.9.5
37
+ Requires-Dist: pathlib
38
+ Requires-Dist: protobuf==3.20.3
39
+ Requires-Dist: dill==0.3.7
40
+ Requires-Dist: scikeras==0.11.0
41
+ Dynamic: author
42
+ Dynamic: author-email
43
+ Dynamic: classifier
44
+ Dynamic: description
45
+ Dynamic: description-content-type
46
+ Dynamic: home-page
47
+ Dynamic: license-file
48
+ Dynamic: requires-dist
49
+ Dynamic: requires-python
50
+ Dynamic: summary
41
51
 
42
52
 
43
53
  # aimodelshare
@@ -31,7 +31,7 @@ aimodelshare/containerization_templates/lambda_function.txt,sha256=nEFoPDXemNcQZ
31
31
  aimodelshare/custom_approach/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
32
32
  aimodelshare/custom_approach/lambda_function.py,sha256=d1HZlgviHZq4mNBKx4q-RCunDK8P8i9DKZcfv6Nmgzc,479
33
33
  aimodelshare/data_sharing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
- aimodelshare/data_sharing/download_data.py,sha256=difmxSdzUmNTKF4V2UmpzIL0KXeaw5gFOVbWkKz_Fiw,22799
34
+ aimodelshare/data_sharing/download_data.py,sha256=zZDnZou1x4QXdPSlD-1OfCBow4XoYgq8JzlCy7tobXw,24273
35
35
  aimodelshare/data_sharing/share_data.py,sha256=dMOP0-PTSpviOeHi3Nvj-uiq5PlIfk_SN5nN92j4PnI,13964
36
36
  aimodelshare/data_sharing/utils.py,sha256=865lN8-oGFi_U_zRaNnGB8Bd0sC8dN_iI5krZOSt_Ts,236
37
37
  aimodelshare/data_sharing/data_sharing_templates/Dockerfile.txt,sha256=27wmp7b0rXqJQsumhPxCvGHmUcDiiVgrC6i7DmY7KQA,77
@@ -140,11 +140,11 @@ aimodelshare/sam/codepipeline_policies.txt,sha256=267HMXMnbP7qRASkmFZYSx-2HmKf5o
140
140
  aimodelshare/sam/codepipeline_trust_relationship.txt,sha256=yfPYvZlN3fnaIHs7I3ENMMveigIE89mufV9pvR8EQH8,245
141
141
  aimodelshare/sam/spark-class.txt,sha256=chyJBxDzCzlUKXzVQYTzuJ2PXCTwg8_gd1yfnI-xbRw,217
142
142
  aimodelshare/sam/template.txt,sha256=JKSvEOZNaaLalHSx7r9psJg_6LLCb0XLAYi1-jYPu3M,1195
143
+ aimodelshare-0.1.29.dist-info/licenses/LICENSE,sha256=JXBYLriXYgTloZs-9CJPZY76dqkuDT5df_HghMnljx8,1134
143
144
  tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
144
145
  tests/test_aimsonnx.py,sha256=-GOF1_qXGQaMxHyqK0GPg7dD1meE-S7CZea4pLmBDTk,3906
145
146
  tests/test_playground.py,sha256=vdFWPRrZNQ2poiBOoN3l7HsXB5yc3p3rrrclNYJHnaw,24574
146
- aimodelshare-0.1.27.dist-info/LICENSE,sha256=JXBYLriXYgTloZs-9CJPZY76dqkuDT5df_HghMnljx8,1134
147
- aimodelshare-0.1.27.dist-info/METADATA,sha256=TB1PkkljHxK-16DLj4NzZc1y4Yjm5Q2ttE6ovVZYbwE,3304
148
- aimodelshare-0.1.27.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
149
- aimodelshare-0.1.27.dist-info/top_level.txt,sha256=2KJgeHQ0BmZuilB75J203i7W4vri6CON2kdbwk9BNpU,19
150
- aimodelshare-0.1.27.dist-info/RECORD,,
147
+ aimodelshare-0.1.29.dist-info/METADATA,sha256=2ow3I21HD8bAggVYmz6TDBn6BhfesGQIsvqhcul_Slw,3523
148
+ aimodelshare-0.1.29.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
149
+ aimodelshare-0.1.29.dist-info/top_level.txt,sha256=2KJgeHQ0BmZuilB75J203i7W4vri6CON2kdbwk9BNpU,19
150
+ aimodelshare-0.1.29.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: setuptools (78.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5