aimodelshare 0.3.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aimodelshare/README.md +26 -0
- aimodelshare/__init__.py +100 -0
- aimodelshare/aimsonnx.py +2381 -0
- aimodelshare/api.py +836 -0
- aimodelshare/auth.py +163 -0
- aimodelshare/aws.py +511 -0
- aimodelshare/aws_client.py +173 -0
- aimodelshare/base_image.py +154 -0
- aimodelshare/bucketpolicy.py +106 -0
- aimodelshare/color_mappings/color_mapping_keras.csv +121 -0
- aimodelshare/color_mappings/color_mapping_pytorch.csv +117 -0
- aimodelshare/containerisation.py +244 -0
- aimodelshare/containerization.py +712 -0
- aimodelshare/containerization_templates/Dockerfile.txt +8 -0
- aimodelshare/containerization_templates/Dockerfile_PySpark.txt +23 -0
- aimodelshare/containerization_templates/buildspec.txt +14 -0
- aimodelshare/containerization_templates/lambda_function.txt +40 -0
- aimodelshare/custom_approach/__init__.py +1 -0
- aimodelshare/custom_approach/lambda_function.py +17 -0
- aimodelshare/custom_eval_metrics.py +103 -0
- aimodelshare/data_sharing/__init__.py +0 -0
- aimodelshare/data_sharing/data_sharing_templates/Dockerfile.txt +3 -0
- aimodelshare/data_sharing/data_sharing_templates/__init__.py +1 -0
- aimodelshare/data_sharing/data_sharing_templates/buildspec.txt +15 -0
- aimodelshare/data_sharing/data_sharing_templates/codebuild_policies.txt +129 -0
- aimodelshare/data_sharing/data_sharing_templates/codebuild_trust_relationship.txt +12 -0
- aimodelshare/data_sharing/download_data.py +620 -0
- aimodelshare/data_sharing/share_data.py +373 -0
- aimodelshare/data_sharing/utils.py +8 -0
- aimodelshare/deploy_custom_lambda.py +246 -0
- aimodelshare/documentation/Makefile +20 -0
- aimodelshare/documentation/karma_sphinx_theme/__init__.py +28 -0
- aimodelshare/documentation/karma_sphinx_theme/_version.py +2 -0
- aimodelshare/documentation/karma_sphinx_theme/breadcrumbs.html +70 -0
- aimodelshare/documentation/karma_sphinx_theme/layout.html +172 -0
- aimodelshare/documentation/karma_sphinx_theme/search.html +50 -0
- aimodelshare/documentation/karma_sphinx_theme/searchbox.html +14 -0
- aimodelshare/documentation/karma_sphinx_theme/static/css/custom.css +2 -0
- aimodelshare/documentation/karma_sphinx_theme/static/css/custom.css.map +1 -0
- aimodelshare/documentation/karma_sphinx_theme/static/css/theme.css +2751 -0
- aimodelshare/documentation/karma_sphinx_theme/static/css/theme.css.map +1 -0
- aimodelshare/documentation/karma_sphinx_theme/static/css/theme.min.css +2 -0
- aimodelshare/documentation/karma_sphinx_theme/static/css/theme.min.css.map +1 -0
- aimodelshare/documentation/karma_sphinx_theme/static/font/fontello.eot +0 -0
- aimodelshare/documentation/karma_sphinx_theme/static/font/fontello.svg +32 -0
- aimodelshare/documentation/karma_sphinx_theme/static/font/fontello.ttf +0 -0
- aimodelshare/documentation/karma_sphinx_theme/static/font/fontello.woff +0 -0
- aimodelshare/documentation/karma_sphinx_theme/static/font/fontello.woff2 +0 -0
- aimodelshare/documentation/karma_sphinx_theme/static/js/theme.js +68 -0
- aimodelshare/documentation/karma_sphinx_theme/theme.conf +9 -0
- aimodelshare/documentation/make.bat +35 -0
- aimodelshare/documentation/requirements.txt +2 -0
- aimodelshare/documentation/source/about.rst +18 -0
- aimodelshare/documentation/source/advanced_features.rst +137 -0
- aimodelshare/documentation/source/competition.rst +218 -0
- aimodelshare/documentation/source/conf.py +58 -0
- aimodelshare/documentation/source/create_credentials.rst +86 -0
- aimodelshare/documentation/source/example_notebooks.rst +132 -0
- aimodelshare/documentation/source/functions.rst +151 -0
- aimodelshare/documentation/source/gettingstarted.rst +390 -0
- aimodelshare/documentation/source/images/creds1.png +0 -0
- aimodelshare/documentation/source/images/creds2.png +0 -0
- aimodelshare/documentation/source/images/creds3.png +0 -0
- aimodelshare/documentation/source/images/creds4.png +0 -0
- aimodelshare/documentation/source/images/creds5.png +0 -0
- aimodelshare/documentation/source/images/creds_file_example.png +0 -0
- aimodelshare/documentation/source/images/predict_tab.png +0 -0
- aimodelshare/documentation/source/index.rst +110 -0
- aimodelshare/documentation/source/modelplayground.rst +132 -0
- aimodelshare/exceptions.py +11 -0
- aimodelshare/generatemodelapi.py +1270 -0
- aimodelshare/iam/codebuild_policy.txt +129 -0
- aimodelshare/iam/codebuild_trust_relationship.txt +12 -0
- aimodelshare/iam/lambda_policy.txt +15 -0
- aimodelshare/iam/lambda_trust_relationship.txt +12 -0
- aimodelshare/json_templates/__init__.py +1 -0
- aimodelshare/json_templates/api_json.txt +155 -0
- aimodelshare/json_templates/auth/policy.txt +1 -0
- aimodelshare/json_templates/auth/role.txt +1 -0
- aimodelshare/json_templates/eval/policy.txt +1 -0
- aimodelshare/json_templates/eval/role.txt +1 -0
- aimodelshare/json_templates/function/policy.txt +1 -0
- aimodelshare/json_templates/function/role.txt +1 -0
- aimodelshare/json_templates/integration_response.txt +5 -0
- aimodelshare/json_templates/lambda_policy_1.txt +15 -0
- aimodelshare/json_templates/lambda_policy_2.txt +8 -0
- aimodelshare/json_templates/lambda_role_1.txt +12 -0
- aimodelshare/json_templates/lambda_role_2.txt +16 -0
- aimodelshare/leaderboard.py +174 -0
- aimodelshare/main/1.txt +132 -0
- aimodelshare/main/1B.txt +112 -0
- aimodelshare/main/2.txt +153 -0
- aimodelshare/main/3.txt +134 -0
- aimodelshare/main/4.txt +128 -0
- aimodelshare/main/5.txt +109 -0
- aimodelshare/main/6.txt +105 -0
- aimodelshare/main/7.txt +144 -0
- aimodelshare/main/8.txt +142 -0
- aimodelshare/main/__init__.py +1 -0
- aimodelshare/main/authorization.txt +275 -0
- aimodelshare/main/eval_classification.txt +79 -0
- aimodelshare/main/eval_lambda.txt +1709 -0
- aimodelshare/main/eval_regression.txt +80 -0
- aimodelshare/main/lambda_function.txt +8 -0
- aimodelshare/main/nst.txt +149 -0
- aimodelshare/model.py +1543 -0
- aimodelshare/modeluser.py +215 -0
- aimodelshare/moral_compass/README.md +408 -0
- aimodelshare/moral_compass/__init__.py +65 -0
- aimodelshare/moral_compass/_version.py +3 -0
- aimodelshare/moral_compass/api_client.py +601 -0
- aimodelshare/moral_compass/apps/__init__.py +69 -0
- aimodelshare/moral_compass/apps/ai_consequences.py +540 -0
- aimodelshare/moral_compass/apps/bias_detective.py +714 -0
- aimodelshare/moral_compass/apps/ethical_revelation.py +898 -0
- aimodelshare/moral_compass/apps/fairness_fixer.py +889 -0
- aimodelshare/moral_compass/apps/judge.py +888 -0
- aimodelshare/moral_compass/apps/justice_equity_upgrade.py +853 -0
- aimodelshare/moral_compass/apps/mc_integration_helpers.py +820 -0
- aimodelshare/moral_compass/apps/model_building_game.py +1104 -0
- aimodelshare/moral_compass/apps/model_building_game_beginner.py +687 -0
- aimodelshare/moral_compass/apps/moral_compass_challenge.py +858 -0
- aimodelshare/moral_compass/apps/session_auth.py +254 -0
- aimodelshare/moral_compass/apps/shared_activity_styles.css +349 -0
- aimodelshare/moral_compass/apps/tutorial.py +481 -0
- aimodelshare/moral_compass/apps/what_is_ai.py +853 -0
- aimodelshare/moral_compass/challenge.py +365 -0
- aimodelshare/moral_compass/config.py +187 -0
- aimodelshare/placeholders/model.onnx +0 -0
- aimodelshare/placeholders/preprocessor.zip +0 -0
- aimodelshare/playground.py +1968 -0
- aimodelshare/postprocessormodules.py +157 -0
- aimodelshare/preprocessormodules.py +373 -0
- aimodelshare/pyspark/1.txt +195 -0
- aimodelshare/pyspark/1B.txt +181 -0
- aimodelshare/pyspark/2.txt +220 -0
- aimodelshare/pyspark/3.txt +204 -0
- aimodelshare/pyspark/4.txt +187 -0
- aimodelshare/pyspark/5.txt +178 -0
- aimodelshare/pyspark/6.txt +174 -0
- aimodelshare/pyspark/7.txt +211 -0
- aimodelshare/pyspark/8.txt +206 -0
- aimodelshare/pyspark/__init__.py +1 -0
- aimodelshare/pyspark/authorization.txt +258 -0
- aimodelshare/pyspark/eval_classification.txt +79 -0
- aimodelshare/pyspark/eval_lambda.txt +1441 -0
- aimodelshare/pyspark/eval_regression.txt +80 -0
- aimodelshare/pyspark/lambda_function.txt +8 -0
- aimodelshare/pyspark/nst.txt +213 -0
- aimodelshare/python/my_preprocessor.py +58 -0
- aimodelshare/readme.md +26 -0
- aimodelshare/reproducibility.py +181 -0
- aimodelshare/sam/Dockerfile.txt +8 -0
- aimodelshare/sam/Dockerfile_PySpark.txt +24 -0
- aimodelshare/sam/__init__.py +1 -0
- aimodelshare/sam/buildspec.txt +11 -0
- aimodelshare/sam/codebuild_policies.txt +129 -0
- aimodelshare/sam/codebuild_trust_relationship.txt +12 -0
- aimodelshare/sam/codepipeline_policies.txt +173 -0
- aimodelshare/sam/codepipeline_trust_relationship.txt +12 -0
- aimodelshare/sam/spark-class.txt +2 -0
- aimodelshare/sam/template.txt +54 -0
- aimodelshare/tools.py +103 -0
- aimodelshare/utils/__init__.py +78 -0
- aimodelshare/utils/optional_deps.py +38 -0
- aimodelshare/utils.py +57 -0
- aimodelshare-0.3.7.dist-info/METADATA +298 -0
- aimodelshare-0.3.7.dist-info/RECORD +171 -0
- aimodelshare-0.3.7.dist-info/WHEEL +5 -0
- aimodelshare-0.3.7.dist-info/licenses/LICENSE +5 -0
- aimodelshare-0.3.7.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import zipfile
|
|
3
|
+
import sys
|
|
4
|
+
import pickle
|
|
5
|
+
import tempfile
|
|
6
|
+
import dill
|
|
7
|
+
import importlib
|
|
8
|
+
import inspect
|
|
9
|
+
|
|
10
|
+
# how to import a postprocessor from a zipfile into a tempfile then into the current session
|
|
11
|
+
def import_postprocessor(filepath):
|
|
12
|
+
#postprocessor fxn should always be named "postprocessor" to work properly in aimodelshare process.
|
|
13
|
+
import tempfile
|
|
14
|
+
from zipfile import ZipFile
|
|
15
|
+
import inspect
|
|
16
|
+
import os
|
|
17
|
+
import pickle
|
|
18
|
+
import string
|
|
19
|
+
|
|
20
|
+
#create temporary folder
|
|
21
|
+
temp_dir=tempfile.gettempdir()
|
|
22
|
+
|
|
23
|
+
# Create a ZipFile Object and load sample.zip in it
|
|
24
|
+
with ZipFile(filepath, 'r') as zipObj:
|
|
25
|
+
# Extract all the contents of zip file in current directory
|
|
26
|
+
zipObj.extractall(temp_dir)
|
|
27
|
+
|
|
28
|
+
folderpath=os.path.dirname(os.path.abspath(filepath))
|
|
29
|
+
file_name=os.path.basename(filepath)
|
|
30
|
+
import os
|
|
31
|
+
pickle_file_list=[]
|
|
32
|
+
for file in os.listdir(temp_dir):
|
|
33
|
+
if file.endswith(".pkl"):
|
|
34
|
+
pickle_file_list.append(os.path.join(temp_dir, file))
|
|
35
|
+
for i in pickle_file_list:
|
|
36
|
+
objectname=str(os.path.basename(i)).replace(".pkl","")
|
|
37
|
+
objects={objectname:""}
|
|
38
|
+
globals()[objectname]=pickle.load(open(str(i), "rb" ) )
|
|
39
|
+
# First import postprocessor function to session from postprocessor.py
|
|
40
|
+
exec(open(os.path.join(temp_dir,'postprocessor.py')).read(),globals())
|
|
41
|
+
try:
|
|
42
|
+
# clean up temp directory files for future runs
|
|
43
|
+
os.remove(os.path.join(temp_dir,"postprocessor.py"))
|
|
44
|
+
except:
|
|
45
|
+
pass
|
|
46
|
+
try:
|
|
47
|
+
for i in pickle_file_list:
|
|
48
|
+
objectname=str(i)+".pkl"
|
|
49
|
+
os.remove(os.path.join(temp_dir,objectname))
|
|
50
|
+
except:
|
|
51
|
+
pass
|
|
52
|
+
return postprocessor
|
|
53
|
+
|
|
54
|
+
import os
|
|
55
|
+
|
|
56
|
+
def export_postprocessor(postprocessor_fxn,directory, globs=globals()):
|
|
57
|
+
#postprocessor fxn should always be named "postprocessor" to work properly in aimodelshare process.
|
|
58
|
+
try:
|
|
59
|
+
import tempfile
|
|
60
|
+
from zipfile import ZipFile
|
|
61
|
+
import inspect
|
|
62
|
+
import os
|
|
63
|
+
|
|
64
|
+
globals().update(postprocessor_fxn.__globals__)
|
|
65
|
+
|
|
66
|
+
folderpath=directory
|
|
67
|
+
|
|
68
|
+
#create temporary folder
|
|
69
|
+
temp_dir=tempfile.gettempdir()
|
|
70
|
+
try:
|
|
71
|
+
os.remove(os.path.join(folderpath,"postprocessor.zip"))
|
|
72
|
+
except:
|
|
73
|
+
pass
|
|
74
|
+
#save function code within temporary folder
|
|
75
|
+
source = inspect.getsource(postprocessor_fxn)
|
|
76
|
+
with open(os.path.join(temp_dir,"postprocessor.py"), "w") as f:
|
|
77
|
+
f.write(source)
|
|
78
|
+
|
|
79
|
+
# create a ZipFile object
|
|
80
|
+
zipObj = ZipFile(os.path.join(folderpath,"postprocessor.zip"), 'w')
|
|
81
|
+
# Add postprocessor function to the zipfile
|
|
82
|
+
zipObj.write(os.path.join(temp_dir,"postprocessor.py"),"postprocessor.py")
|
|
83
|
+
|
|
84
|
+
#getting list of global variables used in function
|
|
85
|
+
|
|
86
|
+
import inspect
|
|
87
|
+
function_objects=list(inspect.getclosurevars(postprocessor_fxn).globals.keys())
|
|
88
|
+
|
|
89
|
+
import sys
|
|
90
|
+
modulenames = ["sklearn","keras","tensorflow","cv2","resize","pytorch"]
|
|
91
|
+
function_objects_nomodules = [i for i in function_objects if i not in list(modulenames)]
|
|
92
|
+
|
|
93
|
+
def savetopickle(function_objects_listelement):
|
|
94
|
+
import pickle
|
|
95
|
+
pickle.dump(globals()[function_objects_listelement], open( os.path.join(temp_dir,function_objects_listelement+".pkl"), "wb" ) )
|
|
96
|
+
return function_objects_listelement
|
|
97
|
+
|
|
98
|
+
savedpostprocessorobjectslist = list(map(savetopickle, function_objects_nomodules))
|
|
99
|
+
|
|
100
|
+
# take savedpostprocessorobjectslist pkl files saved to tempdir to zipfile
|
|
101
|
+
import pickle
|
|
102
|
+
import string
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
for i in savedpostprocessorobjectslist:
|
|
106
|
+
objectname=str(i)+".pkl"
|
|
107
|
+
zipObj.write(os.path.join(temp_dir,objectname),objectname)
|
|
108
|
+
|
|
109
|
+
# close the Zip File
|
|
110
|
+
zipObj.close()
|
|
111
|
+
|
|
112
|
+
try:
|
|
113
|
+
# clean up temp directory files for future runs
|
|
114
|
+
os.remove(os.path.join(temp_dir,"postprocessor.py"))
|
|
115
|
+
|
|
116
|
+
for i in savedpostprocessorobjectslist:
|
|
117
|
+
objectname=str(i)+".pkl"
|
|
118
|
+
os.remove(os.path.join(temp_dir,objectname))
|
|
119
|
+
except:
|
|
120
|
+
pass
|
|
121
|
+
|
|
122
|
+
except Exception as e:
|
|
123
|
+
print(e)
|
|
124
|
+
|
|
125
|
+
def upload_postprocessor(postprocessor_path, client, bucket, model_id, model_version):
|
|
126
|
+
|
|
127
|
+
try:
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
# Check the postprocessor {{{
|
|
131
|
+
if not os.path.exists(postprocessor_path):
|
|
132
|
+
raise FileNotFoundError(
|
|
133
|
+
f"The postprocessor file at {postprocessor_path} does not exist"
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
file_name = os.path.basename(postprocessor_path)
|
|
138
|
+
file_name, file_ext = os.path.splitext(file_name)
|
|
139
|
+
|
|
140
|
+
from zipfile import ZipFile
|
|
141
|
+
dir_zip = postprocessor_path
|
|
142
|
+
|
|
143
|
+
#zipObj = ZipFile(os.path.join("./postprocessor.zip"), 'a')
|
|
144
|
+
#/Users/aishwarya/Downloads/aimodelshare-master
|
|
145
|
+
client["client"].upload_file(dir_zip, bucket, model_id + "/runtime_postprocessor" + ".zip")
|
|
146
|
+
except Exception as e:
|
|
147
|
+
print(e)
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
__all__ = [
|
|
153
|
+
import_postprocessor,
|
|
154
|
+
export_postprocessor,
|
|
155
|
+
upload_postprocessor,
|
|
156
|
+
|
|
157
|
+
]
|
|
@@ -0,0 +1,373 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import zipfile
|
|
3
|
+
import sys
|
|
4
|
+
import pickle
|
|
5
|
+
import tempfile
|
|
6
|
+
import importlib
|
|
7
|
+
import inspect
|
|
8
|
+
import shutil
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
#from aimodelshare.python.my_preprocessor import *
|
|
11
|
+
|
|
12
|
+
# how to import a preprocessor from a zipfile into a tempfile then into the current session
|
|
13
|
+
def import_preprocessor(filepath):
|
|
14
|
+
"""
|
|
15
|
+
Import preprocessor function to session from zip file
|
|
16
|
+
Inputs: 1
|
|
17
|
+
Output: preprocessor function
|
|
18
|
+
|
|
19
|
+
Parameters:
|
|
20
|
+
-----------
|
|
21
|
+
`filepath`: ``string``
|
|
22
|
+
value - absolute path to preprocessor file
|
|
23
|
+
[REQUIRED] to be set by the user
|
|
24
|
+
"./preprocessor.zip"
|
|
25
|
+
file is generated using export_preprocessor function from the AI Modelshare library
|
|
26
|
+
preprocessor function should always be named 'preprocessor' to work properly in aimodelshare process
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
--------
|
|
30
|
+
imports preprocessor function to session
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
#preprocessor fxn should always be named "preprocessor" to work properly in aimodelshare process.
|
|
34
|
+
import tempfile
|
|
35
|
+
from zipfile import ZipFile
|
|
36
|
+
import inspect
|
|
37
|
+
import os
|
|
38
|
+
import pickle
|
|
39
|
+
import string
|
|
40
|
+
|
|
41
|
+
#create temporary folder
|
|
42
|
+
temp_dir = tempfile.mkdtemp()
|
|
43
|
+
|
|
44
|
+
# Create a ZipFile Object and load sample.zip in it
|
|
45
|
+
with ZipFile(filepath, 'r') as zipObj:
|
|
46
|
+
# Extract all the contents of zip file in current directory
|
|
47
|
+
zipObj.extractall(temp_dir)
|
|
48
|
+
|
|
49
|
+
folderpath = os.path.dirname(os.path.abspath(filepath))
|
|
50
|
+
file_name = os.path.basename(filepath)
|
|
51
|
+
|
|
52
|
+
pickle_file_list = []
|
|
53
|
+
zip_file_list = []
|
|
54
|
+
for file in os.listdir(temp_dir):
|
|
55
|
+
if file.endswith(".pkl"):
|
|
56
|
+
pickle_file_list.append(os.path.join(temp_dir, file))
|
|
57
|
+
if file.endswith(".zip"):
|
|
58
|
+
zip_file_list.append(os.path.join(temp_dir, file))
|
|
59
|
+
|
|
60
|
+
for i in pickle_file_list:
|
|
61
|
+
objectname=str(os.path.basename(i)).replace(".pkl", "")
|
|
62
|
+
objects={objectname:""}
|
|
63
|
+
globals()[objectname]=pickle.load(open(str(i), "rb" ) )
|
|
64
|
+
|
|
65
|
+
# Need spark session and context to instantiate model object
|
|
66
|
+
# zip_file_list is only used by pyspark
|
|
67
|
+
if len(zip_file_list):
|
|
68
|
+
try:
|
|
69
|
+
from pyspark.sql import SparkSession
|
|
70
|
+
except:
|
|
71
|
+
raise("Error: Please install pyspark to enable pyspark features")
|
|
72
|
+
|
|
73
|
+
spark = SparkSession \
|
|
74
|
+
.builder \
|
|
75
|
+
.appName('Pyspark Model') \
|
|
76
|
+
.getOrCreate()
|
|
77
|
+
|
|
78
|
+
for i in zip_file_list:
|
|
79
|
+
objectnames = str(os.path.basename(i)).replace(".zip", "").split("__")
|
|
80
|
+
dir_path = i.replace(".zip", "")
|
|
81
|
+
Path(dir_path).mkdir(parents=True, exist_ok=True)
|
|
82
|
+
|
|
83
|
+
# Create a ZipFile Object and load module.zip in it
|
|
84
|
+
with ZipFile(i, 'r') as zipObj:
|
|
85
|
+
# Extract all the contents of zip file in current directory
|
|
86
|
+
zipObj.extractall(dir_path)
|
|
87
|
+
|
|
88
|
+
preprocessor_type = objectnames[0].split("_")[0]
|
|
89
|
+
objectname = objectnames[1]
|
|
90
|
+
from aimodelshare.aimsonnx import pyspark_model_from_string
|
|
91
|
+
preprocessor_class = pyspark_model_from_string(preprocessor_type)
|
|
92
|
+
if preprocessor_type == "PipelineModel":
|
|
93
|
+
preprocessor_model = preprocessor_class(stages=None)
|
|
94
|
+
else:
|
|
95
|
+
preprocessor_model = preprocessor_class()
|
|
96
|
+
|
|
97
|
+
preprocessor_model = preprocessor_model.load(dir_path)
|
|
98
|
+
globals()[objectname] = preprocessor_model
|
|
99
|
+
|
|
100
|
+
# First import preprocessor function to session from preprocessor.py
|
|
101
|
+
exec(open(os.path.join(temp_dir, 'preprocessor.py')).read(),globals())
|
|
102
|
+
try:
|
|
103
|
+
# clean up temp directory files for future runs
|
|
104
|
+
os.remove(os.path.join(temp_dir, "preprocessor.py"))
|
|
105
|
+
except:
|
|
106
|
+
pass
|
|
107
|
+
|
|
108
|
+
try:
|
|
109
|
+
for file in pickle_file_list:
|
|
110
|
+
os.remove(file)
|
|
111
|
+
|
|
112
|
+
for file in zip_file_list:
|
|
113
|
+
os.remove(file)
|
|
114
|
+
except:
|
|
115
|
+
pass
|
|
116
|
+
|
|
117
|
+
return preprocessor
|
|
118
|
+
|
|
119
|
+
def _test_object_serialization(obj, obj_name):
|
|
120
|
+
"""
|
|
121
|
+
Test if an object can be serialized with pickle.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
obj: Object to test
|
|
125
|
+
obj_name: Name of the object for error reporting
|
|
126
|
+
|
|
127
|
+
Returns:
|
|
128
|
+
tuple: (success: bool, error_msg: str or None)
|
|
129
|
+
"""
|
|
130
|
+
import pickle
|
|
131
|
+
|
|
132
|
+
try:
|
|
133
|
+
pickle.dumps(obj)
|
|
134
|
+
return True, None
|
|
135
|
+
except Exception as e:
|
|
136
|
+
return False, f"{type(e).__name__}: {str(e)}"
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def export_preprocessor(preprocessor_fxn,directory, globs=globals()):
|
|
140
|
+
"""
|
|
141
|
+
Exports preprocessor and related objects into zip file for model deployment
|
|
142
|
+
Inputs: 2
|
|
143
|
+
Output: zipfile named 'preprocessor.zip'
|
|
144
|
+
|
|
145
|
+
Parameters:
|
|
146
|
+
-----------
|
|
147
|
+
`preprocessor_fxn`: name of preprocessor function
|
|
148
|
+
Preprocessor function should always be named "preprocessor" to work properly in aimodelshare process.
|
|
149
|
+
`directory`: ``string`` folderpath to preprocessor function
|
|
150
|
+
use "" to reference current working directory
|
|
151
|
+
|
|
152
|
+
Returns:
|
|
153
|
+
--------
|
|
154
|
+
file named 'preprocessor.zip' in the correct format for model deployment
|
|
155
|
+
"""
|
|
156
|
+
#preprocessor fxn should always be named "preprocessor" to work properly in aimodelshare process.
|
|
157
|
+
try:
|
|
158
|
+
import tempfile
|
|
159
|
+
from zipfile import ZipFile
|
|
160
|
+
import inspect
|
|
161
|
+
import os
|
|
162
|
+
|
|
163
|
+
globals().update(preprocessor_fxn.__globals__)
|
|
164
|
+
|
|
165
|
+
folderpath=directory
|
|
166
|
+
|
|
167
|
+
#create temporary folder
|
|
168
|
+
temp_dir=tempfile.mkdtemp()
|
|
169
|
+
try:
|
|
170
|
+
os.remove(os.path.join(folderpath, "preprocessor.zip"))
|
|
171
|
+
except:
|
|
172
|
+
pass
|
|
173
|
+
|
|
174
|
+
#save function code within temporary folder
|
|
175
|
+
source = inspect.getsource(preprocessor_fxn)
|
|
176
|
+
with open(os.path.join(temp_dir, "preprocessor.py"), "w") as f:
|
|
177
|
+
f.write(source)
|
|
178
|
+
|
|
179
|
+
# create a ZipFile object
|
|
180
|
+
zipObj = ZipFile(os.path.join(folderpath, "preprocessor.zip"), 'w')
|
|
181
|
+
# Add preprocessor function to the zipfile
|
|
182
|
+
zipObj.write(os.path.join(temp_dir,"preprocessor.py"),"preprocessor.py")
|
|
183
|
+
|
|
184
|
+
#getting list of global variables used in function
|
|
185
|
+
|
|
186
|
+
import inspect
|
|
187
|
+
function_objects=list(inspect.getclosurevars(preprocessor_fxn).globals.keys())
|
|
188
|
+
|
|
189
|
+
import sys
|
|
190
|
+
import importlib.util
|
|
191
|
+
modulenames = ["sklearn","keras","tensorflow","cv2","resize","pytorch","librosa","pyspark"]
|
|
192
|
+
|
|
193
|
+
# List all standard libraries not covered by sys.builtin_module_names
|
|
194
|
+
paths = (os.path.abspath(p) for p in sys.path)
|
|
195
|
+
stdlib = {
|
|
196
|
+
p for p in paths
|
|
197
|
+
if p.startswith((sys.prefix))
|
|
198
|
+
and 'site-packages' not in p
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
# Exclude standard libraries
|
|
202
|
+
for module_name in function_objects:
|
|
203
|
+
try:
|
|
204
|
+
if module_name in sys.builtin_module_names:
|
|
205
|
+
modulenames.append(module_name)
|
|
206
|
+
continue
|
|
207
|
+
|
|
208
|
+
# Use importlib.util instead of deprecated imp
|
|
209
|
+
spec = importlib.util.find_spec(module_name)
|
|
210
|
+
if spec and spec.origin:
|
|
211
|
+
module_path = spec.origin
|
|
212
|
+
if os.path.dirname(module_path) in stdlib:
|
|
213
|
+
modulenames.append(module_name)
|
|
214
|
+
except Exception as e:
|
|
215
|
+
# print(e)
|
|
216
|
+
continue
|
|
217
|
+
|
|
218
|
+
function_objects_nomodules = [i for i in function_objects if i not in list(modulenames)]
|
|
219
|
+
|
|
220
|
+
def savetopickle(function_objects_listelement):
|
|
221
|
+
import pickle
|
|
222
|
+
pickle.dump(globals()[function_objects_listelement], open( os.path.join(temp_dir,function_objects_listelement+".pkl"), "wb" ) )
|
|
223
|
+
return function_objects_listelement
|
|
224
|
+
|
|
225
|
+
def save_to_zip(function_objects_listelement):
|
|
226
|
+
model_name_path = str(globals()[function_objects_listelement]) + "__" + function_objects_listelement
|
|
227
|
+
temp_path = os.path.join(temp_dir, model_name_path)
|
|
228
|
+
try:
|
|
229
|
+
shutil.rmtree(temp_path)
|
|
230
|
+
except:
|
|
231
|
+
pass
|
|
232
|
+
|
|
233
|
+
if not os.path.exists(temp_path):
|
|
234
|
+
os.mkdir(temp_path)
|
|
235
|
+
|
|
236
|
+
globals()[function_objects_listelement].write().overwrite().save(temp_path)
|
|
237
|
+
|
|
238
|
+
# calling function to get all file paths in the directory
|
|
239
|
+
from aimodelshare.aimsonnx import get_pyspark_model_files_paths
|
|
240
|
+
file_paths = get_pyspark_model_files_paths(temp_path)
|
|
241
|
+
|
|
242
|
+
temp_zip_path = os.path.join(temp_dir, model_name_path + ".zip")
|
|
243
|
+
with ZipFile(temp_zip_path,'w') as zip:
|
|
244
|
+
# writing each file one by one
|
|
245
|
+
for file in file_paths:
|
|
246
|
+
zip.write(os.path.join(temp_path, file), file)
|
|
247
|
+
|
|
248
|
+
# cleanup
|
|
249
|
+
try:
|
|
250
|
+
shutil.rmtree(temp_path)
|
|
251
|
+
except:
|
|
252
|
+
pass
|
|
253
|
+
|
|
254
|
+
return model_name_path
|
|
255
|
+
|
|
256
|
+
export_methods = []
|
|
257
|
+
savedpreprocessorobjectslist = []
|
|
258
|
+
failed_objects = [] # Track failed serializations for better diagnostics
|
|
259
|
+
|
|
260
|
+
for function_objects_nomodule in function_objects_nomodules:
|
|
261
|
+
try:
|
|
262
|
+
savedpreprocessorobjectslist.append(savetopickle(function_objects_nomodule))
|
|
263
|
+
export_methods.append("pickle")
|
|
264
|
+
except Exception as e:
|
|
265
|
+
# Track this failure for diagnostics
|
|
266
|
+
can_serialize, error_msg = _test_object_serialization(
|
|
267
|
+
globals().get(function_objects_nomodule),
|
|
268
|
+
function_objects_nomodule
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
try:
|
|
272
|
+
os.remove(os.path.join(temp_dir, function_objects_nomodule+".pkl"))
|
|
273
|
+
except:
|
|
274
|
+
pass
|
|
275
|
+
# print("Try .zip export approach")
|
|
276
|
+
try:
|
|
277
|
+
savedpreprocessorobjectslist.append(save_to_zip(function_objects_nomodule))
|
|
278
|
+
export_methods.append("zip")
|
|
279
|
+
except Exception as zip_e:
|
|
280
|
+
# Both pickle and zip failed - record this
|
|
281
|
+
failed_objects.append({
|
|
282
|
+
'name': function_objects_nomodule,
|
|
283
|
+
'type': type(globals().get(function_objects_nomodule, None)).__name__,
|
|
284
|
+
'pickle_error': str(e),
|
|
285
|
+
'zip_error': str(zip_e)
|
|
286
|
+
})
|
|
287
|
+
# print(e)
|
|
288
|
+
pass
|
|
289
|
+
|
|
290
|
+
# take savedpreprocessorobjectslist pkl & zip files saved to tempdir to zipfile
|
|
291
|
+
import pickle
|
|
292
|
+
import string
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
for i, value in enumerate(savedpreprocessorobjectslist):
|
|
296
|
+
if export_methods[i] == "pickle":
|
|
297
|
+
objectname = str(value) + ".pkl"
|
|
298
|
+
elif export_methods[i] == "zip":
|
|
299
|
+
objectname = str(value) + ".zip"
|
|
300
|
+
zipObj.write(os.path.join(temp_dir, objectname), objectname)
|
|
301
|
+
|
|
302
|
+
# close the Zip File
|
|
303
|
+
zipObj.close()
|
|
304
|
+
|
|
305
|
+
# If any critical objects failed to serialize, raise an error with details
|
|
306
|
+
if failed_objects:
|
|
307
|
+
failed_names = [obj['name'] for obj in failed_objects]
|
|
308
|
+
error_details = "\n".join([
|
|
309
|
+
f" - {obj['name']} (type: {obj['type']}): {obj['pickle_error'][:100]}"
|
|
310
|
+
for obj in failed_objects
|
|
311
|
+
])
|
|
312
|
+
raise RuntimeError(
|
|
313
|
+
f"Preprocessor export encountered serialization failures for {len(failed_objects)} closure variable(s): "
|
|
314
|
+
f"{', '.join(failed_names)}.\n\nDetails:\n{error_details}\n\n"
|
|
315
|
+
f"These objects are referenced by your preprocessor function but cannot be serialized. "
|
|
316
|
+
f"Common causes include open file handles, database connections, or thread locks."
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
try:
|
|
320
|
+
# clean up temp directory files for future runs
|
|
321
|
+
os.remove(os.path.join(temp_dir,"preprocessor.py"))
|
|
322
|
+
|
|
323
|
+
for i, value in enumerate(savedpreprocessorobjectslist):
|
|
324
|
+
if export_methods[i] == "pickle":
|
|
325
|
+
objectname = str(value) + ".pkl"
|
|
326
|
+
elif export_methods[i] == "zip":
|
|
327
|
+
objectname = str(value) + ".zip"
|
|
328
|
+
os.remove(os.path.join(temp_dir, objectname))
|
|
329
|
+
except:
|
|
330
|
+
pass
|
|
331
|
+
|
|
332
|
+
except Exception as e:
|
|
333
|
+
# Re-raise RuntimeError with preserved message
|
|
334
|
+
if isinstance(e, RuntimeError):
|
|
335
|
+
raise
|
|
336
|
+
print(e)
|
|
337
|
+
|
|
338
|
+
return print("Your preprocessor is now saved to 'preprocessor.zip'")
|
|
339
|
+
|
|
340
|
+
def upload_preprocessor(preprocessor_path, client, bucket, model_id, model_version):
|
|
341
|
+
|
|
342
|
+
try:
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
# Check the preprocessor {{{
|
|
346
|
+
if not os.path.exists(preprocessor_path):
|
|
347
|
+
raise FileNotFoundError(
|
|
348
|
+
f"The preprocessor file at {preprocessor_path} does not exist"
|
|
349
|
+
)
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
file_name = os.path.basename(preprocessor_path)
|
|
353
|
+
file_name, file_ext = os.path.splitext(file_name)
|
|
354
|
+
|
|
355
|
+
from zipfile import ZipFile
|
|
356
|
+
dir_zip = preprocessor_path
|
|
357
|
+
|
|
358
|
+
#zipObj = ZipFile(os.path.join("./preprocessor.zip"), 'a')
|
|
359
|
+
#/Users/aishwarya/Downloads/aimodelshare-master
|
|
360
|
+
client["client"].upload_file(dir_zip, bucket, model_id + "/runtime_preprocessor" + ".zip")
|
|
361
|
+
except Exception as e:
|
|
362
|
+
print(e)
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
__all__ = [
|
|
368
|
+
import_preprocessor,
|
|
369
|
+
export_preprocessor,
|
|
370
|
+
upload_preprocessor,
|
|
371
|
+
|
|
372
|
+
]
|
|
373
|
+
|