aimodelshare 0.3.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (171) hide show
  1. aimodelshare/README.md +26 -0
  2. aimodelshare/__init__.py +100 -0
  3. aimodelshare/aimsonnx.py +2381 -0
  4. aimodelshare/api.py +836 -0
  5. aimodelshare/auth.py +163 -0
  6. aimodelshare/aws.py +511 -0
  7. aimodelshare/aws_client.py +173 -0
  8. aimodelshare/base_image.py +154 -0
  9. aimodelshare/bucketpolicy.py +106 -0
  10. aimodelshare/color_mappings/color_mapping_keras.csv +121 -0
  11. aimodelshare/color_mappings/color_mapping_pytorch.csv +117 -0
  12. aimodelshare/containerisation.py +244 -0
  13. aimodelshare/containerization.py +712 -0
  14. aimodelshare/containerization_templates/Dockerfile.txt +8 -0
  15. aimodelshare/containerization_templates/Dockerfile_PySpark.txt +23 -0
  16. aimodelshare/containerization_templates/buildspec.txt +14 -0
  17. aimodelshare/containerization_templates/lambda_function.txt +40 -0
  18. aimodelshare/custom_approach/__init__.py +1 -0
  19. aimodelshare/custom_approach/lambda_function.py +17 -0
  20. aimodelshare/custom_eval_metrics.py +103 -0
  21. aimodelshare/data_sharing/__init__.py +0 -0
  22. aimodelshare/data_sharing/data_sharing_templates/Dockerfile.txt +3 -0
  23. aimodelshare/data_sharing/data_sharing_templates/__init__.py +1 -0
  24. aimodelshare/data_sharing/data_sharing_templates/buildspec.txt +15 -0
  25. aimodelshare/data_sharing/data_sharing_templates/codebuild_policies.txt +129 -0
  26. aimodelshare/data_sharing/data_sharing_templates/codebuild_trust_relationship.txt +12 -0
  27. aimodelshare/data_sharing/download_data.py +620 -0
  28. aimodelshare/data_sharing/share_data.py +373 -0
  29. aimodelshare/data_sharing/utils.py +8 -0
  30. aimodelshare/deploy_custom_lambda.py +246 -0
  31. aimodelshare/documentation/Makefile +20 -0
  32. aimodelshare/documentation/karma_sphinx_theme/__init__.py +28 -0
  33. aimodelshare/documentation/karma_sphinx_theme/_version.py +2 -0
  34. aimodelshare/documentation/karma_sphinx_theme/breadcrumbs.html +70 -0
  35. aimodelshare/documentation/karma_sphinx_theme/layout.html +172 -0
  36. aimodelshare/documentation/karma_sphinx_theme/search.html +50 -0
  37. aimodelshare/documentation/karma_sphinx_theme/searchbox.html +14 -0
  38. aimodelshare/documentation/karma_sphinx_theme/static/css/custom.css +2 -0
  39. aimodelshare/documentation/karma_sphinx_theme/static/css/custom.css.map +1 -0
  40. aimodelshare/documentation/karma_sphinx_theme/static/css/theme.css +2751 -0
  41. aimodelshare/documentation/karma_sphinx_theme/static/css/theme.css.map +1 -0
  42. aimodelshare/documentation/karma_sphinx_theme/static/css/theme.min.css +2 -0
  43. aimodelshare/documentation/karma_sphinx_theme/static/css/theme.min.css.map +1 -0
  44. aimodelshare/documentation/karma_sphinx_theme/static/font/fontello.eot +0 -0
  45. aimodelshare/documentation/karma_sphinx_theme/static/font/fontello.svg +32 -0
  46. aimodelshare/documentation/karma_sphinx_theme/static/font/fontello.ttf +0 -0
  47. aimodelshare/documentation/karma_sphinx_theme/static/font/fontello.woff +0 -0
  48. aimodelshare/documentation/karma_sphinx_theme/static/font/fontello.woff2 +0 -0
  49. aimodelshare/documentation/karma_sphinx_theme/static/js/theme.js +68 -0
  50. aimodelshare/documentation/karma_sphinx_theme/theme.conf +9 -0
  51. aimodelshare/documentation/make.bat +35 -0
  52. aimodelshare/documentation/requirements.txt +2 -0
  53. aimodelshare/documentation/source/about.rst +18 -0
  54. aimodelshare/documentation/source/advanced_features.rst +137 -0
  55. aimodelshare/documentation/source/competition.rst +218 -0
  56. aimodelshare/documentation/source/conf.py +58 -0
  57. aimodelshare/documentation/source/create_credentials.rst +86 -0
  58. aimodelshare/documentation/source/example_notebooks.rst +132 -0
  59. aimodelshare/documentation/source/functions.rst +151 -0
  60. aimodelshare/documentation/source/gettingstarted.rst +390 -0
  61. aimodelshare/documentation/source/images/creds1.png +0 -0
  62. aimodelshare/documentation/source/images/creds2.png +0 -0
  63. aimodelshare/documentation/source/images/creds3.png +0 -0
  64. aimodelshare/documentation/source/images/creds4.png +0 -0
  65. aimodelshare/documentation/source/images/creds5.png +0 -0
  66. aimodelshare/documentation/source/images/creds_file_example.png +0 -0
  67. aimodelshare/documentation/source/images/predict_tab.png +0 -0
  68. aimodelshare/documentation/source/index.rst +110 -0
  69. aimodelshare/documentation/source/modelplayground.rst +132 -0
  70. aimodelshare/exceptions.py +11 -0
  71. aimodelshare/generatemodelapi.py +1270 -0
  72. aimodelshare/iam/codebuild_policy.txt +129 -0
  73. aimodelshare/iam/codebuild_trust_relationship.txt +12 -0
  74. aimodelshare/iam/lambda_policy.txt +15 -0
  75. aimodelshare/iam/lambda_trust_relationship.txt +12 -0
  76. aimodelshare/json_templates/__init__.py +1 -0
  77. aimodelshare/json_templates/api_json.txt +155 -0
  78. aimodelshare/json_templates/auth/policy.txt +1 -0
  79. aimodelshare/json_templates/auth/role.txt +1 -0
  80. aimodelshare/json_templates/eval/policy.txt +1 -0
  81. aimodelshare/json_templates/eval/role.txt +1 -0
  82. aimodelshare/json_templates/function/policy.txt +1 -0
  83. aimodelshare/json_templates/function/role.txt +1 -0
  84. aimodelshare/json_templates/integration_response.txt +5 -0
  85. aimodelshare/json_templates/lambda_policy_1.txt +15 -0
  86. aimodelshare/json_templates/lambda_policy_2.txt +8 -0
  87. aimodelshare/json_templates/lambda_role_1.txt +12 -0
  88. aimodelshare/json_templates/lambda_role_2.txt +16 -0
  89. aimodelshare/leaderboard.py +174 -0
  90. aimodelshare/main/1.txt +132 -0
  91. aimodelshare/main/1B.txt +112 -0
  92. aimodelshare/main/2.txt +153 -0
  93. aimodelshare/main/3.txt +134 -0
  94. aimodelshare/main/4.txt +128 -0
  95. aimodelshare/main/5.txt +109 -0
  96. aimodelshare/main/6.txt +105 -0
  97. aimodelshare/main/7.txt +144 -0
  98. aimodelshare/main/8.txt +142 -0
  99. aimodelshare/main/__init__.py +1 -0
  100. aimodelshare/main/authorization.txt +275 -0
  101. aimodelshare/main/eval_classification.txt +79 -0
  102. aimodelshare/main/eval_lambda.txt +1709 -0
  103. aimodelshare/main/eval_regression.txt +80 -0
  104. aimodelshare/main/lambda_function.txt +8 -0
  105. aimodelshare/main/nst.txt +149 -0
  106. aimodelshare/model.py +1543 -0
  107. aimodelshare/modeluser.py +215 -0
  108. aimodelshare/moral_compass/README.md +408 -0
  109. aimodelshare/moral_compass/__init__.py +65 -0
  110. aimodelshare/moral_compass/_version.py +3 -0
  111. aimodelshare/moral_compass/api_client.py +601 -0
  112. aimodelshare/moral_compass/apps/__init__.py +69 -0
  113. aimodelshare/moral_compass/apps/ai_consequences.py +540 -0
  114. aimodelshare/moral_compass/apps/bias_detective.py +714 -0
  115. aimodelshare/moral_compass/apps/ethical_revelation.py +898 -0
  116. aimodelshare/moral_compass/apps/fairness_fixer.py +889 -0
  117. aimodelshare/moral_compass/apps/judge.py +888 -0
  118. aimodelshare/moral_compass/apps/justice_equity_upgrade.py +853 -0
  119. aimodelshare/moral_compass/apps/mc_integration_helpers.py +820 -0
  120. aimodelshare/moral_compass/apps/model_building_game.py +1104 -0
  121. aimodelshare/moral_compass/apps/model_building_game_beginner.py +687 -0
  122. aimodelshare/moral_compass/apps/moral_compass_challenge.py +858 -0
  123. aimodelshare/moral_compass/apps/session_auth.py +254 -0
  124. aimodelshare/moral_compass/apps/shared_activity_styles.css +349 -0
  125. aimodelshare/moral_compass/apps/tutorial.py +481 -0
  126. aimodelshare/moral_compass/apps/what_is_ai.py +853 -0
  127. aimodelshare/moral_compass/challenge.py +365 -0
  128. aimodelshare/moral_compass/config.py +187 -0
  129. aimodelshare/placeholders/model.onnx +0 -0
  130. aimodelshare/placeholders/preprocessor.zip +0 -0
  131. aimodelshare/playground.py +1968 -0
  132. aimodelshare/postprocessormodules.py +157 -0
  133. aimodelshare/preprocessormodules.py +373 -0
  134. aimodelshare/pyspark/1.txt +195 -0
  135. aimodelshare/pyspark/1B.txt +181 -0
  136. aimodelshare/pyspark/2.txt +220 -0
  137. aimodelshare/pyspark/3.txt +204 -0
  138. aimodelshare/pyspark/4.txt +187 -0
  139. aimodelshare/pyspark/5.txt +178 -0
  140. aimodelshare/pyspark/6.txt +174 -0
  141. aimodelshare/pyspark/7.txt +211 -0
  142. aimodelshare/pyspark/8.txt +206 -0
  143. aimodelshare/pyspark/__init__.py +1 -0
  144. aimodelshare/pyspark/authorization.txt +258 -0
  145. aimodelshare/pyspark/eval_classification.txt +79 -0
  146. aimodelshare/pyspark/eval_lambda.txt +1441 -0
  147. aimodelshare/pyspark/eval_regression.txt +80 -0
  148. aimodelshare/pyspark/lambda_function.txt +8 -0
  149. aimodelshare/pyspark/nst.txt +213 -0
  150. aimodelshare/python/my_preprocessor.py +58 -0
  151. aimodelshare/readme.md +26 -0
  152. aimodelshare/reproducibility.py +181 -0
  153. aimodelshare/sam/Dockerfile.txt +8 -0
  154. aimodelshare/sam/Dockerfile_PySpark.txt +24 -0
  155. aimodelshare/sam/__init__.py +1 -0
  156. aimodelshare/sam/buildspec.txt +11 -0
  157. aimodelshare/sam/codebuild_policies.txt +129 -0
  158. aimodelshare/sam/codebuild_trust_relationship.txt +12 -0
  159. aimodelshare/sam/codepipeline_policies.txt +173 -0
  160. aimodelshare/sam/codepipeline_trust_relationship.txt +12 -0
  161. aimodelshare/sam/spark-class.txt +2 -0
  162. aimodelshare/sam/template.txt +54 -0
  163. aimodelshare/tools.py +103 -0
  164. aimodelshare/utils/__init__.py +78 -0
  165. aimodelshare/utils/optional_deps.py +38 -0
  166. aimodelshare/utils.py +57 -0
  167. aimodelshare-0.3.7.dist-info/METADATA +298 -0
  168. aimodelshare-0.3.7.dist-info/RECORD +171 -0
  169. aimodelshare-0.3.7.dist-info/WHEEL +5 -0
  170. aimodelshare-0.3.7.dist-info/licenses/LICENSE +5 -0
  171. aimodelshare-0.3.7.dist-info/top_level.txt +1 -0
@@ -0,0 +1,373 @@
1
+ #import docker
2
+ import os
3
+ import shutil
4
+ import importlib.resources as pkg_resources
5
+ #import importlib_resources as pkg_resources
6
+ from . import data_sharing_templates
7
+ from string import Template
8
+ import zipfile
9
+ import time
10
+ import json
11
+ import boto3
12
+ import tempfile
13
+ import requests
14
+ import uuid
15
+
16
+ delay=3
17
+
18
+ def create_bucket(s3_client, bucket_name, region):
19
+ try:
20
+ response=s3_client.head_bucket(Bucket=bucket_name)
21
+ except:
22
+ if(region=="us-east-1"):
23
+ response = s3_client.create_bucket(
24
+ ACL="private",
25
+ Bucket=bucket_name
26
+ )
27
+ else:
28
+ location={'LocationConstraint': region}
29
+ response=s3_client.create_bucket(
30
+ ACL="private",
31
+ Bucket=bucket_name,
32
+ CreateBucketConfiguration=location
33
+ )
34
+ return response
35
+
36
+
37
+
38
+ def create_iam_role(iam_client, role_name, trust_relationship):
39
+ response = iam_client.create_role(RoleName=role_name, AssumeRolePolicyDocument=json.dumps(trust_relationship))
40
+ time.sleep(delay)
41
+
42
+ def create_iam_policy(iam_client, account_id, policy_name, policy):
43
+ policy_arn = "arn:aws:iam::" + account_id + ":policy/" + policy_name
44
+ response = iam_client.create_policy(PolicyName=policy_name, PolicyDocument=json.dumps(policy))
45
+ time.sleep(delay)
46
+
47
+ def attach_policy_to_role(iam_client, account_id, role_name, policy_name):
48
+ policy_arn = "arn:aws:iam::" + account_id + ":policy/" + policy_name
49
+ response = iam_client.attach_role_policy(RoleName = role_name, PolicyArn = policy_arn)
50
+ time.sleep(delay)
51
+
52
+ def create_docker_folder_local(dataset_dir, dataset_name, python_version):
53
+
54
+ tmp_dataset_dir = tempfile.gettempdir() + '/' + '/'.join(['tmp_dataset_dir', dataset_name])
55
+
56
+ tmp_dataset = tempfile.gettempdir() + '/' + 'tmp_dataset_dir'
57
+
58
+ os.mkdir(tmp_dataset)
59
+
60
+ shutil.copytree(dataset_dir, tmp_dataset_dir)
61
+
62
+ #data = pkg_resources.read_text(data_sharing_templates, 'Dockerfile.txt')
63
+ with open(os.path.join('data_sharing_templates', 'Dockerfile.txt'), 'r') as file:
64
+ data = file.read()
65
+
66
+ template = Template(data)
67
+ newdata = template.substitute(
68
+ python_version=python_version)
69
+ with open(os.path.join(tmp_dataset, 'Dockerfile'), 'w') as file:
70
+ file.write(newdata)
71
+
72
+ def create_docker_folder_codebuild(dataset_dir, dataset_name, template_folder, region, registry_uri, repository, dataset_tag, python_version):
73
+
74
+ tmp_dataset_dir = tempfile.gettempdir() + '/' + '/'.join(['tmp_dataset_dir', dataset_name])
75
+
76
+ tmp_dataset = tempfile.gettempdir() + '/' + 'tmp_dataset_dir'
77
+ tmp_dataset = tmp_dataset.replace("/tmp//tmp/","/tmp/")
78
+
79
+
80
+ if os.path.exists(tmp_dataset):
81
+ shutil.rmtree(tmp_dataset)
82
+ os.makedirs(tmp_dataset)
83
+
84
+ if dataset_dir:
85
+ shutil.copytree(dataset_dir, tmp_dataset_dir)
86
+
87
+ template_folder= template_folder.replace("/tmp//tmp/","/tmp/")
88
+ os.mkdir(template_folder)
89
+
90
+ data = pkg_resources.read_text(data_sharing_templates, 'Dockerfile.txt')
91
+ #with open(os.path.join('data_sharing_templates', 'Dockerfile.txt'), 'r') as file:
92
+ # data = file.read()
93
+
94
+ template = Template(data)
95
+ newdata = template.substitute(
96
+ python_version=python_version)
97
+ with open(os.path.join(template_folder, 'Dockerfile'), 'w') as file:
98
+ file.write(newdata)
99
+
100
+ data = pkg_resources.read_text(data_sharing_templates, 'buildspec.txt')
101
+ #with open(os.path.join('data_sharing_templates', 'buildspec.txt'), 'r') as file:
102
+ # data = file.read()
103
+
104
+ template = Template(data)
105
+ newdata = template.substitute(
106
+ aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID_AIMS'],
107
+ aws_secret_access_key=os.environ['AWS_SECRET_ACCESS_KEY_AIMS'],
108
+ region='region',
109
+ registry_uri=registry_uri,
110
+ repository=repository,
111
+ dataset_tag=dataset_tag)
112
+ with open(os.path.join(template_folder, 'buildspec.yml'), 'w') as file:
113
+ file.write(newdata)
114
+
115
+ response = shutil.copytree(tmp_dataset, '/'.join([template_folder, 'tmp_dataset_dir']))
116
+
117
+ def get_all_file_paths(directory):
118
+ file_paths = []
119
+ for root, directories, files in os.walk(directory):
120
+ for filename in files:
121
+ filepath = os.path.join(root, filename)
122
+ file_paths.append(filepath)
123
+ return file_paths
124
+
125
+ file_paths = get_all_file_paths(template_folder)
126
+
127
+ template_folder_len = len(template_folder)
128
+
129
+ with zipfile.ZipFile(''.join([template_folder, '.zip']),'w') as zip:
130
+ for file in file_paths:
131
+ try:
132
+ zip.write(file, file[template_folder_len:])
133
+ except:
134
+ pass
135
+
136
+ shutil.rmtree(tmp_dataset)
137
+
138
+ shutil.rmtree(template_folder)
139
+
140
+ #def share_data_local(dataset_dir, tag='latest', python_version='3.8'):
141
+
142
+ #create_docker_folder_local(dataset_dir)
143
+
144
+ #client = docker.from_env()
145
+
146
+ #client.images.build(path='./tmp_dataset_folder', tag=tag)
147
+
148
+ #shutil.rmtree('tmp_dataset_dir')
149
+
150
+ # send to ecr
151
+
152
+ # client.images.
153
+
154
+ # client.push(repository,
155
+
156
+ # ecr_client.create_repository
157
+
158
+ # docker tag aimodelshare-base-image:latest 517169013426.dkr.ecr.us-east-1.amazonaws.com/aimodelshare-base-image:latest
159
+ # aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws
160
+ # docker push public.ecr.aws/y2e2a1d6/aimodelshare-image-classification-public
161
+
162
+ def share_data_codebuild(account_id, region, dataset_dir, dataset_tag='latest', python_version='3.8'):
163
+
164
+ print('Uploading your data. Please wait for a confirmation message.')
165
+
166
+ region = 'us-east-1'
167
+
168
+ session = boto3.session.Session(aws_access_key_id=os.environ.get("AWS_ACCESS_KEY_ID_AIMS"),
169
+ aws_secret_access_key = os.environ.get("AWS_SECRET_ACCESS_KEY_AIMS"),
170
+ region_name=region)
171
+
172
+ flag = 0
173
+
174
+ if dataset_dir:
175
+ dataset_name = dataset_dir.replace(" ", "_")
176
+ else:
177
+ dataset_name = "placeholder_data"
178
+ dataset_name=dataset_name.replace("/tmp/","")
179
+ repository=dataset_name+'-repository'
180
+
181
+ template_folder=tempfile.gettempdir() + '/' + dataset_name+'_'+dataset_tag
182
+ template_folder= template_folder.replace("/tmp//tmp/","/tmp/")
183
+ codebuild_role_name=dataset_name+'-codebuild-role'
184
+
185
+ codebuild_policies_name=dataset_name+'-codebuild-policies'
186
+
187
+ codebuild_dataset_name=dataset_name+'-upload'
188
+
189
+
190
+ s3_client = session.client('s3', region_name=region)
191
+
192
+ bucket_name = "aimodelshare"+str(account_id)+"sharedata"
193
+
194
+ create_bucket(s3_client, bucket_name, region)
195
+
196
+ s3_resource = session.resource('s3', region_name=region)
197
+
198
+ bucket_versioning = s3_resource.BucketVersioning(bucket_name)
199
+ response = bucket_versioning.enable()
200
+
201
+ # ecr = session.client('ecr')
202
+
203
+ ecr = session.client('ecr-public')
204
+
205
+ registry_uri = ecr.describe_registries()['registries'][0]['registryUri']
206
+
207
+ try:
208
+ response = ecr.create_repository(
209
+ repositoryName=repository
210
+ )
211
+ except:
212
+ pass
213
+
214
+ create_docker_folder_codebuild(dataset_dir, dataset_name, template_folder, region, registry_uri, repository, dataset_tag, python_version)
215
+
216
+ iam = session.client('iam')
217
+
218
+ codebuild_trust_relationship = json.loads(pkg_resources.read_text(data_sharing_templates, 'codebuild_trust_relationship.txt'))
219
+ try:
220
+ create_iam_role(iam, codebuild_role_name, codebuild_trust_relationship)
221
+ except:
222
+ None
223
+
224
+ codebuild_policies = json.loads(pkg_resources.read_text(data_sharing_templates, 'codebuild_policies.txt'))
225
+ try:
226
+ create_iam_policy(iam, account_id, codebuild_policies_name, codebuild_policies)
227
+ except:
228
+ None
229
+
230
+ try:
231
+ attach_policy_to_role(iam, account_id, codebuild_role_name, codebuild_policies_name)
232
+ except:
233
+ None
234
+
235
+ s3_client = session.client('s3')
236
+ s3_client.upload_file(''.join([template_folder, '.zip']),
237
+ bucket_name,
238
+ ''.join([dataset_name+'_'+dataset_tag, '.zip']))
239
+
240
+ codebuild = session.client('codebuild')
241
+
242
+ try:
243
+ response = codebuild.create_project(
244
+ name=codebuild_dataset_name,
245
+ source={
246
+ 'type': 'S3',
247
+ 'location': bucket_name + '/' + dataset_name+'_'+dataset_tag + '.zip'
248
+ },
249
+ artifacts={
250
+ 'type': 'NO_ARTIFACTS',
251
+ },
252
+ environment={
253
+ 'type': 'LINUX_CONTAINER',
254
+ 'image': 'aws/codebuild/standard:5.0',
255
+ 'computeType': 'BUILD_GENERAL1_SMALL',
256
+ 'privilegedMode': True
257
+ },
258
+ serviceRole=codebuild_role_name
259
+ )
260
+ except:
261
+ response = codebuild.delete_project(
262
+ name=codebuild_dataset_name
263
+ )
264
+ response = codebuild.create_project(
265
+ name=codebuild_dataset_name,
266
+ source={
267
+ 'type': 'S3',
268
+ 'location': bucket_name + '/' + dataset_name+'_'+dataset_tag + '.zip'
269
+ },
270
+ artifacts={
271
+ 'type': 'NO_ARTIFACTS',
272
+ },
273
+ environment={
274
+ 'type': 'LINUX_CONTAINER',
275
+ 'image': 'aws/codebuild/standard:5.0',
276
+ 'computeType': 'BUILD_GENERAL1_SMALL',
277
+ 'privilegedMode': True
278
+ },
279
+ serviceRole=codebuild_role_name
280
+ )
281
+
282
+ response = codebuild.start_build(
283
+ projectName=codebuild_dataset_name
284
+ )
285
+
286
+ os.remove(template_folder+'.zip')
287
+
288
+ return {"ecr_uri": registry_uri + '/' + repository + ':' + dataset_tag}
289
+
290
+ def share_dataset(data_directory="folder_file_path",classification="default", private="FALSE"):
291
+ data_directory=str(data_directory).lower()
292
+ aishare_datasetname = input("Enter dataset name:")
293
+ aishare_datadescription = input(
294
+ "Enter data description (i.e.- filenames denoting training and test data, file types, and any subfolders where files are stored):")
295
+ aishare_datatags = input(
296
+ "Enter tags to help users find your data (i.e.- flower dataset, image, supervised learning, classification")
297
+ datalicense=input("Insert license (Optional): ")
298
+ datacitation=input("Insert citation (Optional): ")
299
+ modelplaygroundurl=input("Insert AI Model Share model playground url (Optional): ")
300
+ problemdomain=input("Enter a number signifying your dataset problem domain or data type: 1 = Image 2 = Video 3 = Text 4 = Tabular 5 = Neural Style Transfer 6 = Object Detection 7 = Other \n")
301
+
302
+ optiondict={"1":"Image", "2":"Video","3":"Text","4":"Tabular", "5": "Audio","6":"Neural Style Transfer", "7": "Object Detection", "8":"Other"}
303
+ problemdomainfinal=optiondict.get(problemdomain,"Other")
304
+
305
+
306
+ user_session = boto3.session.Session(aws_access_key_id=os.environ.get("AWS_ACCESS_KEY_ID_AIMS"),
307
+ aws_secret_access_key = os.environ.get("AWS_SECRET_ACCESS_KEY_AIMS"),
308
+ region_name=os.environ.get("AWS_REGION_AIMS"))
309
+ account_number = user_session.client(
310
+ 'sts').get_caller_identity().get('Account')
311
+
312
+ datauri=share_data_codebuild(account_number,os.environ.get("AWS_REGION"),data_directory)
313
+
314
+
315
+ #TODO: Replace redis data code with new api post call, see helper code below from competition api
316
+ #TODO: add "dataset:id" as models are ingested on the backend
317
+ bodydata = {"dataowner": os.environ.get("username"), # change this to first and last name
318
+ "dataname":aishare_datasetname ,
319
+ 'datadescription':aishare_datadescription,
320
+ 'datatags':aishare_datatags,
321
+ 'dataecruri':datauri['ecr_uri'],
322
+ 'datalicense':datalicense,
323
+ 'datacitation':datacitation,
324
+ 'classification':classification,
325
+ "modelplaygroundurl": modelplaygroundurl,
326
+ "Private": private,
327
+ "delete": "FALSE",
328
+ "problemdomain":problemdomainfinal}
329
+
330
+ # datasets api
331
+ headers_with_authentication = {'Content-Type': 'application/json', 'authorizationToken': os.environ.get("JWT_AUTHORIZATION_TOKEN"), 'Access-Control-Allow-Headers':
332
+ 'Content-Type,X-Amz-Date,authorizationToken,Access-Control-Allow-Origin,X-Api-Key,X-Amz-Security-Token,Authorization', 'Access-Control-Allow-Origin': '*'}
333
+ # modeltoapi lambda function invoked through below url to return new prediction api in response
334
+ response=requests.post("https://jyz9nn0joe.execute-api.us-east-1.amazonaws.com/dev/modeldata",
335
+ json=bodydata, headers=headers_with_authentication)
336
+ return "Your dataset has been shared to modelshare.org."
337
+
338
+ def delete_dataset(ecr_uri):
339
+
340
+ session = boto3.session.Session(aws_access_key_id=os.environ.get("AWS_ACCESS_KEY_ID_AIMS"),
341
+ aws_secret_access_key = os.environ.get("AWS_SECRET_ACCESS_KEY_AIMS"),
342
+ region_name=os.environ.get("AWS_REGION_AIMS"))
343
+
344
+ ecr_client = session.client('ecr-public')
345
+
346
+ repository_image = ecr_uri.split('/')[2]
347
+
348
+ repository = repository_image.split(':')[0]
349
+ image = repository_image.split(':')[1]
350
+
351
+ response = ecr_client.batch_delete_image(
352
+ repositoryName=repository,
353
+ imageIds=[
354
+ {
355
+ 'imageTag': image
356
+ }
357
+ ]
358
+ )
359
+
360
+ image_details = ecr_client.describe_images(
361
+ repositoryName=repository
362
+ )
363
+
364
+ print("The dataset accessible at '" + ecr_uri + "' has been deleted successfully.")
365
+
366
+ if len(image_details['imageDetails'])==0:
367
+ response = ecr_client.delete_repository(
368
+ repositoryName=repository
369
+ )
370
+
371
+ print("The registry '" + repository + " has been deleted successfully.")
372
+
373
+ return
@@ -0,0 +1,8 @@
1
+ import stat # needed for file stat
2
+ import os
3
+
4
+ # arguments: the function that failed, the path
5
+ # it failed on, and the error that occurred.
6
+ def redo_with_write(redo_func, path, err):
7
+ os.chmod(path, stat.S_IWRITE)
8
+ redo_func(path)
@@ -0,0 +1,246 @@
1
+ import boto3
2
+ import os
3
+ import requests
4
+ import uuid
5
+ import json
6
+ import math
7
+ import time
8
+ import datetime
9
+ import onnx
10
+ import tempfile
11
+ import shutil
12
+ import sys
13
+ import pickle
14
+ import tempfile
15
+
16
+ from aimodelshare.tools import extract_varnames_fromtrainingdata, _get_extension_from_filepath
17
+ from aimodelshare.aws import get_s3_iam_client, run_function_on_lambda, get_token, get_aws_token, get_aws_client
18
+ from aimodelshare.bucketpolicy import _custom_upload_policy
19
+ from aimodelshare.exceptions import AuthorizationError, AWSAccessError, AWSUploadError
20
+ from aimodelshare.api import create_prediction_api
21
+ from aimodelshare.api import get_api_json
22
+
23
+ from aimodelshare.preprocessormodules import upload_preprocessor
24
+ from aimodelshare.model import _get_predictionmodel_key, _extract_model_metadata
25
+
26
+ # what is private here
27
+ # what if deployment_dir is not present? Not handled yet
28
+ # Return section and private parameter documentation left
29
+ # output_list_exampledata list being accessed by which key in response?
30
+ # create file_objects in temp_dir, abstract from user
31
+ # convert example output from list to json, in frontend display value of key of result
32
+
33
+ def create_bucket(s3_client, bucket_name, region):
34
+ try:
35
+ response=s3_client.head_bucket(Bucket=bucket_name)
36
+ except:
37
+ if(region=="us-east-1"):
38
+ response = s3_client.create_bucket(
39
+ ACL="private",
40
+ Bucket=bucket_name
41
+ )
42
+ else:
43
+ location={'LocationConstraint': region}
44
+ response=s3_client.create_bucket(
45
+ ACL="private",
46
+ Bucket=bucket_name,
47
+ CreateBucketConfiguration=location
48
+ )
49
+ return response
50
+
51
+
52
+ def deploy_custom_lambda(input_json_exampledata, output_json_exampledata, lambda_filepath, deployment_dir, private, custom_libraries):
53
+
54
+ """
55
+ Deploys an AWS Lambda function based on the predict() function specified in the lambda_filepath .py file
56
+ Inputs : 6
57
+ Output : Information about the deployed API
58
+
59
+ -----------
60
+
61
+ Parameters :
62
+
63
+ input_json_exampledata : JSON object [REQUIRED]
64
+ JSON object representing the structure of input that Lambda expects to receive
65
+
66
+ output_json_exampledata : List of element(s) [REQUIRED]
67
+ List of element(s) representing the output that the Lambda will return
68
+
69
+ lambda_filepath : String [REQUIRED]
70
+ Expects relative/absolute path to the .py file containing the predict() function,
71
+ imports to all other custom libraries that are defined by the user and used by the
72
+ predict() function can be placed in the deployment_dir directory
73
+
74
+ deployment_dir : String
75
+ Expects relative/absolute path to the directory containing all the files being used by the
76
+ predict() function in the lambda_filepath .py file
77
+
78
+ private : string
79
+
80
+ custom_libraries : String of libraries ("library_1,library_2")
81
+ Expects a list of strings denoting libraries required for Lambda to work
82
+ Strings must be libraries present in PyPi
83
+ Installation will follow pattern - pip install <library_name>
84
+
85
+ -----------
86
+
87
+ Returns
88
+
89
+ api_info : prints statements with generated live prediction API details
90
+ also prints steps to update the model submissions by the user/team
91
+ """
92
+
93
+ temp_dir = tempfile.gettempdir()
94
+
95
+ file_objects_folder_path = os.path.join(temp_dir, 'file_objects')
96
+
97
+ # if 'file_objects' is not the name of deployment directory deployment_dir, 'file_objects' directory
98
+ # is created and contents of the deployment_dir directory are copied to 'file_objects' directory
99
+ if deployment_dir != file_objects_folder_path:
100
+ if os.path.exists(file_objects_folder_path):
101
+ shutil.rmtree(file_objects_folder_path)
102
+ shutil.copytree(deployment_dir, file_objects_folder_path)
103
+
104
+ # if 'custom_lambda.py' is not the name of the custom lambda .py file lambda_filepath, 'custom_lambda.py' file
105
+ # is created and contents of lambda_filepath .py file is written into 'custom_lambda.py'
106
+ if lambda_filepath != 'custom_lambda.py':
107
+ with open(lambda_filepath, 'r') as in_f:
108
+ with open('custom_lambda.py', 'w') as out_f:
109
+ out_f.write(in_f.read())
110
+
111
+ # create json and upload to API folder in S3 for displaying
112
+ json_exampledata = {
113
+ "input_json_exampledata": input_json_exampledata,
114
+ "output_json_exampledata": output_json_exampledata
115
+ }
116
+
117
+ with open(os.path.join(temp_dir, 'exampledata.json'), 'w') as f:
118
+ json.dump(json_exampledata, f)
119
+
120
+ aws_access_key_id = str(os.environ.get("AWS_ACCESS_KEY_ID_AIMS"))
121
+ aws_secret_access_key = str(os.environ.get("AWS_SECRET_ACCESS_KEY_AIMS"))
122
+ region_name = str(os.environ.get("AWS_REGION_AIMS"))
123
+
124
+ ### COMMENTS - TO DO
125
+ api_json= get_api_json() # why is this required
126
+ user_client = boto3.client( # creating apigateway client
127
+ 'apigateway',
128
+ aws_access_key_id=aws_access_key_id,
129
+ aws_secret_access_key=aws_secret_access_key,
130
+ region_name=region_name
131
+ )
132
+ response2 = user_client.import_rest_api( # what is being imported
133
+ failOnWarnings = True,
134
+ parameters = {'endpointConfigurationTypes': 'REGIONAL'},
135
+ body = api_json
136
+ )
137
+ ###
138
+
139
+ start = datetime.datetime.now()
140
+
141
+ api_id = response2['id']
142
+ now = datetime.datetime.now()
143
+ s3, iam, region = get_s3_iam_client(aws_access_key_id, aws_secret_access_key, region_name)
144
+ create_bucket(s3['client'], os.environ.get("BUCKET_NAME"), region)
145
+
146
+ apiurl = create_prediction_api(None, str(api_id), 'custom', 'FALSE', [], api_id, "TRUE", custom_libraries)
147
+
148
+ print("\n\nWe need some information about your model before we can generate your API.\n")
149
+ aishare_modelname = input("Name your deployment: ")
150
+ aishare_modeldescription = input("Describe your deployment: ")
151
+ aishare_modelevaluation = input("Describe your deployment's performance (OPTIONAL): ")
152
+ aishare_tags = input("Enter comma-separated search categories for your deployment (OPTIONAL): ")
153
+ aishare_apicalls = 0
154
+ print('')
155
+ # unpack user credentials
156
+ unique_model_id = str(api_id)
157
+ bucket_name = os.environ.get("BUCKET_NAME")
158
+
159
+ # why is this being done here, can it not be abstracted
160
+
161
+ categorical="FALSE" # categorical is being used where, for what
162
+ bodydata = {
163
+ "id": int(math.log(1/((time.time()*1000000)))*100000000000000),
164
+ "unique_model_id": unique_model_id,
165
+ "apideveloper": os.environ.get("username"), # change this to first and last name
166
+ "apimodeldescription": aishare_modeldescription,
167
+ "apimodelevaluation": aishare_modelevaluation,
168
+ "apimodeltype": 'custom',
169
+ # getting rid of extra quotes that screw up dynamodb string search on apiurls
170
+ "apiurl": apiurl['body'].strip('\"'),
171
+ "bucket_name": bucket_name,
172
+ "version": 1,
173
+ "modelname": aishare_modelname,
174
+ "tags": aishare_tags,
175
+ "Private": private,
176
+ "Categorical": categorical,
177
+ "delete": "FALSE",
178
+ }
179
+
180
+ # Get the response
181
+ headers_with_authentication = {'Content-Type': 'application/json', 'authorizationToken': os.environ.get("JWT_AUTHORIZATION_TOKEN"), 'Access-Control-Allow-Headers':
182
+ 'Content-Type,X-Amz-Date,authorizationToken,Access-Control-Allow-Origin,X-Api-Key,X-Amz-Security-Token,Authorization', 'Access-Control-Allow-Origin': '*'}
183
+
184
+ # modeltoapi lambda function invoked through below url to return new prediction api in response
185
+ requests.post("https://bhrdesksak.execute-api.us-east-1.amazonaws.com/dev/modeldata",
186
+ json=bodydata, headers=headers_with_authentication)
187
+
188
+ # Get the response
189
+ headers_with_authentication = {'Content-Type': 'application/json', 'authorizationToken': os.environ.get("JWT_AUTHORIZATION_TOKEN"), 'Access-Control-Allow-Headers':
190
+ 'Content-Type,X-Amz-Date,authorizationToken,Access-Control-Allow-Origin,X-Api-Key,X-Amz-Security-Token,Authorization', 'Access-Control-Allow-Origin': '*'}
191
+ # modeltoapi lambda function invoked through below url to return new prediction api in response
192
+ response = requests.post("https://bhrdesksak.execute-api.us-east-1.amazonaws.com/dev/modeldata",
193
+ json=bodydata, headers=headers_with_authentication)
194
+ response_string = response.text
195
+ response_string = response_string[1:-1]
196
+
197
+ end = datetime.datetime.now() # end timer
198
+ difference = (end - start).total_seconds()
199
+ finalresult2 = "Your AI Model Share API was created in " + \
200
+ str(int(difference)) + " seconds." + " API Url: " + apiurl['body']
201
+ s3, iam, region = get_s3_iam_client(os.environ.get("AWS_ACCESS_KEY_ID"), os.environ.get("AWS_SECRET_ACCESS_KEY"), os.environ.get("AWS_REGION"))
202
+ policy_response = iam["client"].get_policy(
203
+ PolicyArn=os.environ.get("POLICY_ARN")
204
+ )
205
+ user_policy = iam["resource"].UserPolicy(
206
+ os.environ.get("IAM_USERNAME"), policy_response['Policy']['PolicyName'])
207
+ response = iam["client"].detach_user_policy(
208
+ UserName= os.environ.get("IAM_USERNAME"),
209
+ PolicyArn=os.environ.get("POLICY_ARN")
210
+ )
211
+ # add new policy that only allows file upload to bucket
212
+ policy = iam["resource"].Policy(os.environ.get("POLICY_ARN"))
213
+ response = policy.delete()
214
+ s3upload_policy = _custom_upload_policy(bucket_name, unique_model_id)
215
+ s3uploadpolicy_name = 'temporaryaccessAImodelsharePolicy' + \
216
+ str(uuid.uuid1().hex)
217
+ s3uploadpolicy_response = iam["client"].create_policy(
218
+ PolicyName=s3uploadpolicy_name,
219
+ PolicyDocument=json.dumps(s3upload_policy)
220
+ )
221
+ user = iam["resource"].User(os.environ.get("IAM_USERNAME"))
222
+ response = user.attach_policy(
223
+ PolicyArn=s3uploadpolicy_response['Policy']['Arn']
224
+ )
225
+ finalresultteams3info = "Your team members can submit improved models to your prediction api using the update_model_version() function."
226
+ api_info = finalresult2+"\n"
227
+
228
+ # Build output {{{
229
+ final_message = ("Follow this link to explore your Model Playground's functionality\n"
230
+ "You can make predictions with the cURL functionality and access example code from the Programmatic tab.\n")
231
+ web_dashboard_url = ("https://www.modelshare.org/detail/"+ response_string)
232
+
233
+ end = datetime.datetime.now()
234
+ difference = (end - start).total_seconds()
235
+ finalresult2 = "Success! Your Model Playground was created in " + \
236
+ str(int(difference)) + " seconds."
237
+
238
+ print(api_info)
239
+
240
+ print("\n\n" + final_message + web_dashboard_url)
241
+
242
+ return
243
+
244
+ __all__ = [
245
+ deploy_custom_lambda
246
+ ]
@@ -0,0 +1,20 @@
1
+ # Minimal makefile for Sphinx documentation
2
+ #
3
+
4
+ # You can set these variables from the command line, and also
5
+ # from the environment for the first two.
6
+ SPHINXOPTS ?=
7
+ SPHINXBUILD ?= sphinx-build
8
+ SOURCEDIR = source
9
+ BUILDDIR = build
10
+
11
+ # Put it first so that "make" without argument is like "make help".
12
+ help:
13
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14
+
15
+ .PHONY: help Makefile
16
+
17
+ # Catch-all target: route all unknown targets to Sphinx using the new
18
+ # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19
+ %: Makefile
20
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
@@ -0,0 +1,28 @@
1
+ import os
2
+
3
+ from karma_sphinx_theme import _version as version
4
+
5
+
6
+ def get_path():
7
+ """
8
+ Shortcut for users whose theme is next to their conf.py.
9
+ """
10
+ # Theme directory is defined as our parent directory
11
+ return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
12
+
13
+
14
+ def update_context(app, pagename, templatename, context, doctree):
15
+ context['karma_sphinx_theme_version'] = version.__version__
16
+
17
+ def setup(app):
18
+ """ add_html_theme is new in Sphinx 1.6+ """
19
+
20
+ if hasattr(app, 'add_html_theme'):
21
+ theme_path = os.path.abspath(os.path.dirname(__file__))
22
+ app.add_html_theme('karma_sphinx_theme', theme_path)
23
+ app.connect('html-page-context', update_context)
24
+
25
+ return {
26
+ 'version': version.__version__,
27
+ 'parallel_read_safe': True
28
+ }
@@ -0,0 +1,2 @@
1
+ __version_info__ = (0, 0, 8)
2
+ __version__ = '.'.join(map(str, __version_info__))