aimodelshare 0.1.12__py3-none-any.whl → 0.1.64__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of aimodelshare might be problematic. Click here for more details.
- aimodelshare/__init__.py +94 -14
- aimodelshare/aimsonnx.py +417 -262
- aimodelshare/api.py +7 -6
- aimodelshare/auth.py +163 -0
- aimodelshare/aws.py +4 -4
- aimodelshare/base_image.py +1 -1
- aimodelshare/containerisation.py +1 -1
- aimodelshare/data_sharing/download_data.py +145 -88
- aimodelshare/generatemodelapi.py +7 -6
- aimodelshare/main/eval_lambda.txt +81 -13
- aimodelshare/model.py +493 -197
- aimodelshare/modeluser.py +89 -1
- aimodelshare/moral_compass/README.md +408 -0
- aimodelshare/moral_compass/__init__.py +37 -0
- aimodelshare/moral_compass/_version.py +3 -0
- aimodelshare/moral_compass/api_client.py +601 -0
- aimodelshare/moral_compass/apps/__init__.py +26 -0
- aimodelshare/moral_compass/apps/ai_consequences.py +297 -0
- aimodelshare/moral_compass/apps/judge.py +299 -0
- aimodelshare/moral_compass/apps/tutorial.py +198 -0
- aimodelshare/moral_compass/apps/what_is_ai.py +426 -0
- aimodelshare/moral_compass/challenge.py +365 -0
- aimodelshare/moral_compass/config.py +187 -0
- aimodelshare/playground.py +26 -14
- aimodelshare/preprocessormodules.py +60 -6
- aimodelshare/reproducibility.py +20 -5
- aimodelshare/utils/__init__.py +78 -0
- aimodelshare/utils/optional_deps.py +38 -0
- aimodelshare-0.1.64.dist-info/METADATA +298 -0
- {aimodelshare-0.1.12.dist-info → aimodelshare-0.1.64.dist-info}/RECORD +33 -22
- {aimodelshare-0.1.12.dist-info → aimodelshare-0.1.64.dist-info}/WHEEL +1 -1
- aimodelshare-0.1.64.dist-info/licenses/LICENSE +5 -0
- {aimodelshare-0.1.12.dist-info → aimodelshare-0.1.64.dist-info}/top_level.txt +0 -1
- aimodelshare-0.1.12.dist-info/LICENSE +0 -22
- aimodelshare-0.1.12.dist-info/METADATA +0 -68
- tests/__init__.py +0 -0
- tests/test_aimsonnx.py +0 -135
- tests/test_playground.py +0 -721
|
@@ -116,6 +116,26 @@ def import_preprocessor(filepath):
|
|
|
116
116
|
|
|
117
117
|
return preprocessor
|
|
118
118
|
|
|
119
|
+
def _test_object_serialization(obj, obj_name):
|
|
120
|
+
"""
|
|
121
|
+
Test if an object can be serialized with pickle.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
obj: Object to test
|
|
125
|
+
obj_name: Name of the object for error reporting
|
|
126
|
+
|
|
127
|
+
Returns:
|
|
128
|
+
tuple: (success: bool, error_msg: str or None)
|
|
129
|
+
"""
|
|
130
|
+
import pickle
|
|
131
|
+
|
|
132
|
+
try:
|
|
133
|
+
pickle.dumps(obj)
|
|
134
|
+
return True, None
|
|
135
|
+
except Exception as e:
|
|
136
|
+
return False, f"{type(e).__name__}: {str(e)}"
|
|
137
|
+
|
|
138
|
+
|
|
119
139
|
def export_preprocessor(preprocessor_fxn,directory, globs=globals()):
|
|
120
140
|
"""
|
|
121
141
|
Exports preprocessor and related objects into zip file for model deployment
|
|
@@ -167,7 +187,7 @@ def export_preprocessor(preprocessor_fxn,directory, globs=globals()):
|
|
|
167
187
|
function_objects=list(inspect.getclosurevars(preprocessor_fxn).globals.keys())
|
|
168
188
|
|
|
169
189
|
import sys
|
|
170
|
-
import
|
|
190
|
+
import importlib.util
|
|
171
191
|
modulenames = ["sklearn","keras","tensorflow","cv2","resize","pytorch","librosa","pyspark"]
|
|
172
192
|
|
|
173
193
|
# List all standard libraries not covered by sys.builtin_module_names
|
|
@@ -185,9 +205,12 @@ def export_preprocessor(preprocessor_fxn,directory, globs=globals()):
|
|
|
185
205
|
modulenames.append(module_name)
|
|
186
206
|
continue
|
|
187
207
|
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
208
|
+
# Use importlib.util instead of deprecated imp
|
|
209
|
+
spec = importlib.util.find_spec(module_name)
|
|
210
|
+
if spec and spec.origin:
|
|
211
|
+
module_path = spec.origin
|
|
212
|
+
if os.path.dirname(module_path) in stdlib:
|
|
213
|
+
modulenames.append(module_name)
|
|
191
214
|
except Exception as e:
|
|
192
215
|
# print(e)
|
|
193
216
|
continue
|
|
@@ -232,12 +255,19 @@ def export_preprocessor(preprocessor_fxn,directory, globs=globals()):
|
|
|
232
255
|
|
|
233
256
|
export_methods = []
|
|
234
257
|
savedpreprocessorobjectslist = []
|
|
258
|
+
failed_objects = [] # Track failed serializations for better diagnostics
|
|
259
|
+
|
|
235
260
|
for function_objects_nomodule in function_objects_nomodules:
|
|
236
261
|
try:
|
|
237
262
|
savedpreprocessorobjectslist.append(savetopickle(function_objects_nomodule))
|
|
238
263
|
export_methods.append("pickle")
|
|
239
264
|
except Exception as e:
|
|
240
|
-
#
|
|
265
|
+
# Track this failure for diagnostics
|
|
266
|
+
can_serialize, error_msg = _test_object_serialization(
|
|
267
|
+
globals().get(function_objects_nomodule),
|
|
268
|
+
function_objects_nomodule
|
|
269
|
+
)
|
|
270
|
+
|
|
241
271
|
try:
|
|
242
272
|
os.remove(os.path.join(temp_dir, function_objects_nomodule+".pkl"))
|
|
243
273
|
except:
|
|
@@ -246,7 +276,14 @@ def export_preprocessor(preprocessor_fxn,directory, globs=globals()):
|
|
|
246
276
|
try:
|
|
247
277
|
savedpreprocessorobjectslist.append(save_to_zip(function_objects_nomodule))
|
|
248
278
|
export_methods.append("zip")
|
|
249
|
-
except Exception as
|
|
279
|
+
except Exception as zip_e:
|
|
280
|
+
# Both pickle and zip failed - record this
|
|
281
|
+
failed_objects.append({
|
|
282
|
+
'name': function_objects_nomodule,
|
|
283
|
+
'type': type(globals().get(function_objects_nomodule, None)).__name__,
|
|
284
|
+
'pickle_error': str(e),
|
|
285
|
+
'zip_error': str(zip_e)
|
|
286
|
+
})
|
|
250
287
|
# print(e)
|
|
251
288
|
pass
|
|
252
289
|
|
|
@@ -265,6 +302,20 @@ def export_preprocessor(preprocessor_fxn,directory, globs=globals()):
|
|
|
265
302
|
# close the Zip File
|
|
266
303
|
zipObj.close()
|
|
267
304
|
|
|
305
|
+
# If any critical objects failed to serialize, raise an error with details
|
|
306
|
+
if failed_objects:
|
|
307
|
+
failed_names = [obj['name'] for obj in failed_objects]
|
|
308
|
+
error_details = "\n".join([
|
|
309
|
+
f" - {obj['name']} (type: {obj['type']}): {obj['pickle_error'][:100]}"
|
|
310
|
+
for obj in failed_objects
|
|
311
|
+
])
|
|
312
|
+
raise RuntimeError(
|
|
313
|
+
f"Preprocessor export encountered serialization failures for {len(failed_objects)} closure variable(s): "
|
|
314
|
+
f"{', '.join(failed_names)}.\n\nDetails:\n{error_details}\n\n"
|
|
315
|
+
f"These objects are referenced by your preprocessor function but cannot be serialized. "
|
|
316
|
+
f"Common causes include open file handles, database connections, or thread locks."
|
|
317
|
+
)
|
|
318
|
+
|
|
268
319
|
try:
|
|
269
320
|
# clean up temp directory files for future runs
|
|
270
321
|
os.remove(os.path.join(temp_dir,"preprocessor.py"))
|
|
@@ -279,6 +330,9 @@ def export_preprocessor(preprocessor_fxn,directory, globs=globals()):
|
|
|
279
330
|
pass
|
|
280
331
|
|
|
281
332
|
except Exception as e:
|
|
333
|
+
# Re-raise RuntimeError with preserved message
|
|
334
|
+
if isinstance(e, RuntimeError):
|
|
335
|
+
raise
|
|
282
336
|
print(e)
|
|
283
337
|
|
|
284
338
|
return print("Your preprocessor is now saved to 'preprocessor.zip'")
|
aimodelshare/reproducibility.py
CHANGED
|
@@ -3,11 +3,22 @@ import sys
|
|
|
3
3
|
import json
|
|
4
4
|
import random
|
|
5
5
|
import tempfile
|
|
6
|
-
import pkg_resources
|
|
7
6
|
import requests
|
|
8
7
|
|
|
9
8
|
import numpy as np
|
|
10
|
-
|
|
9
|
+
|
|
10
|
+
# TensorFlow is optional - only needed for reproducibility setup with TF models
|
|
11
|
+
try:
|
|
12
|
+
import tensorflow as tf
|
|
13
|
+
_TF_AVAILABLE = True
|
|
14
|
+
except ImportError:
|
|
15
|
+
_TF_AVAILABLE = False
|
|
16
|
+
tf = None
|
|
17
|
+
|
|
18
|
+
try:
|
|
19
|
+
import importlib.metadata as md
|
|
20
|
+
except ImportError: # pragma: no cover
|
|
21
|
+
import importlib_metadata as md
|
|
11
22
|
|
|
12
23
|
from aimodelshare.aws import get_s3_iam_client, run_function_on_lambda, get_aws_client
|
|
13
24
|
|
|
@@ -44,9 +55,13 @@ def export_reproducibility_env(seed, directory, mode="gpu"):
|
|
|
44
55
|
else:
|
|
45
56
|
raise Exception("Error: unknown 'mode' value, expected 'gpu' or 'cpu'")
|
|
46
57
|
|
|
47
|
-
|
|
48
|
-
installed_packages_list =
|
|
49
|
-
|
|
58
|
+
# Get installed packages using importlib.metadata
|
|
59
|
+
installed_packages_list = []
|
|
60
|
+
for dist in md.distributions():
|
|
61
|
+
name = dist.metadata.get("Name") or "unknown"
|
|
62
|
+
version = dist.version
|
|
63
|
+
installed_packages_list.append(f"{name}=={version}")
|
|
64
|
+
installed_packages_list = sorted(installed_packages_list)
|
|
50
65
|
|
|
51
66
|
data["session_runtime_info"] = {
|
|
52
67
|
"installed_packages": installed_packages_list,
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"""Utility modules for aimodelshare."""
|
|
2
|
+
import os
|
|
3
|
+
import sys
|
|
4
|
+
import shutil
|
|
5
|
+
import tempfile
|
|
6
|
+
import functools
|
|
7
|
+
import warnings
|
|
8
|
+
from typing import Type
|
|
9
|
+
|
|
10
|
+
from .optional_deps import check_optional
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def delete_files_from_temp_dir(temp_dir_file_deletion_list):
|
|
14
|
+
temp_dir = tempfile.gettempdir()
|
|
15
|
+
for file_name in temp_dir_file_deletion_list:
|
|
16
|
+
file_path = os.path.join(temp_dir, file_name)
|
|
17
|
+
if os.path.exists(file_path):
|
|
18
|
+
os.remove(file_path)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def delete_folder(folder_path):
|
|
22
|
+
if os.path.exists(folder_path):
|
|
23
|
+
shutil.rmtree(folder_path)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def make_folder(folder_path):
|
|
27
|
+
os.makedirs(folder_path, exist_ok=True)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class HiddenPrints:
|
|
31
|
+
"""Context manager that suppresses stdout and stderr (used for silencing noisy outputs)."""
|
|
32
|
+
def __enter__(self):
|
|
33
|
+
self._original_stdout = sys.stdout
|
|
34
|
+
self._original_stderr = sys.stderr
|
|
35
|
+
self._devnull_stdout = open(os.devnull, 'w')
|
|
36
|
+
self._devnull_stderr = open(os.devnull, 'w')
|
|
37
|
+
sys.stdout = self._devnull_stdout
|
|
38
|
+
sys.stderr = self._devnull_stderr
|
|
39
|
+
return self
|
|
40
|
+
|
|
41
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
42
|
+
sys.stdout = self._original_stdout
|
|
43
|
+
sys.stderr = self._original_stderr
|
|
44
|
+
self._devnull_stdout.close()
|
|
45
|
+
self._devnull_stderr.close()
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def ignore_warning(warning: Type[Warning]):
|
|
49
|
+
"""
|
|
50
|
+
Ignore a given warning occurring during method execution.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
warning (Warning): warning type to ignore.
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
the inner function
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
def inner(func):
|
|
60
|
+
@functools.wraps(func)
|
|
61
|
+
def wrapper(*args, **kwargs):
|
|
62
|
+
with warnings.catch_warnings():
|
|
63
|
+
warnings.filterwarnings("ignore", category=warning)
|
|
64
|
+
return func(*args, **kwargs)
|
|
65
|
+
|
|
66
|
+
return wrapper
|
|
67
|
+
|
|
68
|
+
return inner
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
__all__ = [
|
|
72
|
+
"check_optional",
|
|
73
|
+
"HiddenPrints",
|
|
74
|
+
"ignore_warning",
|
|
75
|
+
"delete_files_from_temp_dir",
|
|
76
|
+
"delete_folder",
|
|
77
|
+
"make_folder",
|
|
78
|
+
]
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""Optional dependency checking utilities."""
|
|
2
|
+
import os
|
|
3
|
+
import importlib.util
|
|
4
|
+
import warnings
|
|
5
|
+
|
|
6
|
+
_DEF_SUPPRESS_ENV = "AIMODELSHARE_SUPPRESS_OPTIONAL_WARNINGS"
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def check_optional(name: str, feature_label: str, suppress_env: str = _DEF_SUPPRESS_ENV) -> bool:
|
|
10
|
+
"""Check if an optional dependency is available.
|
|
11
|
+
|
|
12
|
+
Print a single warning (via warnings) if missing and suppression env var is not set.
|
|
13
|
+
Returns True if available, False otherwise.
|
|
14
|
+
|
|
15
|
+
Parameters
|
|
16
|
+
----------
|
|
17
|
+
name : str
|
|
18
|
+
The name of the module to check (e.g., 'xgboost', 'pyspark')
|
|
19
|
+
feature_label : str
|
|
20
|
+
A human-readable label for the feature that requires this dependency
|
|
21
|
+
suppress_env : str, optional
|
|
22
|
+
Environment variable name to check for suppression (default: AIMODELSHARE_SUPPRESS_OPTIONAL_WARNINGS)
|
|
23
|
+
|
|
24
|
+
Returns
|
|
25
|
+
-------
|
|
26
|
+
bool
|
|
27
|
+
True if the module is available, False otherwise
|
|
28
|
+
"""
|
|
29
|
+
spec = importlib.util.find_spec(name)
|
|
30
|
+
if spec is None:
|
|
31
|
+
if not os.environ.get(suppress_env):
|
|
32
|
+
warnings.warn(
|
|
33
|
+
f"{feature_label} support unavailable. Install `{name}` to enable.",
|
|
34
|
+
category=UserWarning,
|
|
35
|
+
stacklevel=2,
|
|
36
|
+
)
|
|
37
|
+
return False
|
|
38
|
+
return True
|
|
@@ -0,0 +1,298 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: aimodelshare
|
|
3
|
+
Version: 0.1.64
|
|
4
|
+
Summary: Deploy locally saved machine learning models to a live REST API and integrated dashboard.
|
|
5
|
+
Author-email: Michael Parrott <mikedparrott@modelshare.ai>
|
|
6
|
+
License:
|
|
7
|
+
Proprietary License
|
|
8
|
+
|
|
9
|
+
Copyright (c) 2025 Model Share Labs,Inc. (And all affiliated organizations and individuals)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
Keywords: machine-learning,deployment,api,onnx,tensorflow,pytorch
|
|
13
|
+
Classifier: Programming Language :: Python :: 3
|
|
14
|
+
Classifier: License :: Other/Proprietary License
|
|
15
|
+
Classifier: Operating System :: OS Independent
|
|
16
|
+
Requires-Python: >=3.10
|
|
17
|
+
Description-Content-Type: text/markdown
|
|
18
|
+
License-File: LICENSE
|
|
19
|
+
Requires-Dist: numpy>=1.23.0
|
|
20
|
+
Requires-Dist: pandas>=1.5.0
|
|
21
|
+
Requires-Dist: requests
|
|
22
|
+
Requires-Dist: urllib3
|
|
23
|
+
Requires-Dist: boto3
|
|
24
|
+
Requires-Dist: onnx
|
|
25
|
+
Requires-Dist: onnxmltools
|
|
26
|
+
Requires-Dist: onnxruntime
|
|
27
|
+
Requires-Dist: skl2onnx
|
|
28
|
+
Requires-Dist: tf2onnx
|
|
29
|
+
Requires-Dist: scikit-learn>=1.2.0
|
|
30
|
+
Requires-Dist: scikeras
|
|
31
|
+
Requires-Dist: shortuuid
|
|
32
|
+
Requires-Dist: Pympler
|
|
33
|
+
Requires-Dist: wget
|
|
34
|
+
Requires-Dist: PyJWT[crypto]<3,>=2.8
|
|
35
|
+
Requires-Dist: pydot
|
|
36
|
+
Requires-Dist: regex
|
|
37
|
+
Requires-Dist: psutil
|
|
38
|
+
Requires-Dist: dill
|
|
39
|
+
Requires-Dist: IPython
|
|
40
|
+
Provides-Extra: visual
|
|
41
|
+
Requires-Dist: graphviz; extra == "visual"
|
|
42
|
+
Provides-Extra: tensorflow
|
|
43
|
+
Requires-Dist: tensorflow==2.19.0; extra == "tensorflow"
|
|
44
|
+
Requires-Dist: keras2onnx; extra == "tensorflow"
|
|
45
|
+
Provides-Extra: pytorch
|
|
46
|
+
Requires-Dist: torch; extra == "pytorch"
|
|
47
|
+
Provides-Extra: ui
|
|
48
|
+
Requires-Dist: gradio>=4.0.0; extra == "ui"
|
|
49
|
+
Provides-Extra: full
|
|
50
|
+
Requires-Dist: tensorflow==2.19.0; extra == "full"
|
|
51
|
+
Requires-Dist: keras2onnx; extra == "full"
|
|
52
|
+
Requires-Dist: torch; extra == "full"
|
|
53
|
+
Requires-Dist: graphviz; extra == "full"
|
|
54
|
+
Requires-Dist: gradio>=4.0.0; extra == "full"
|
|
55
|
+
Provides-Extra: test
|
|
56
|
+
Requires-Dist: pytest; extra == "test"
|
|
57
|
+
Requires-Dist: pytest-cov; extra == "test"
|
|
58
|
+
Dynamic: license-file
|
|
59
|
+
|
|
60
|
+
<p align="center"><img width="40%" src="docs/aimodshare_banner.jpg" /></p>
|
|
61
|
+
|
|
62
|
+
### The mission of the AI Model Share Platform is to provide a trusted non profit repository for machine learning model prediction APIs (python library + integrated website at modelshare.org). A beta version of the platform is currently being used by Columbia University students, faculty, and staff to test and improve platform functionality.
|
|
63
|
+
|
|
64
|
+
### In a matter of seconds, data scientists can launch a model into this infrastructure and end-users the world over will be able to engage their machine learning models.
|
|
65
|
+
|
|
66
|
+
* ***Launch machine learning models into scalable production ready prediction REST APIs using a single Python function.***
|
|
67
|
+
|
|
68
|
+
* ***Details about each model, how to use the model's API, and the model's author(s) are deployed simultaneously into a searchable website at modelshare.org.***
|
|
69
|
+
|
|
70
|
+
* ***Deployed models receive an individual Model Playground listing information about all deployed models. Each of these pages includes a fully functional prediction dashboard that allows end-users to input text, tabular, or image data and receive live predictions.***
|
|
71
|
+
|
|
72
|
+
* ***Moreover, users can build on model playgrounds by 1) creating ML model competitions, 2) uploading Jupyter notebooks to share code, 3) sharing model architectures and 4) sharing data... with all shared artifacts automatically creating a data science user portfolio.***
|
|
73
|
+
|
|
74
|
+
# Use aimodelshare Python library to deploy your model, create a new ML competition, and more.
|
|
75
|
+
* [Tutorials for deploying models](https://www.modelshare.org/search/deploy?search=ALL&problemdomain=ALL&gettingstartedguide=TRUE&pythonlibrariesused=ALL&tags=ALL&pageNum=1).
|
|
76
|
+
|
|
77
|
+
# Find model playground web-dashboards to generate predictions now.
|
|
78
|
+
* [View deployed models and generate predictions at modelshare.org](https://www.modelshare.org)
|
|
79
|
+
|
|
80
|
+
# Installation
|
|
81
|
+
|
|
82
|
+
## Install using PyPi
|
|
83
|
+
|
|
84
|
+
```
|
|
85
|
+
pip install aimodelshare
|
|
86
|
+
```
|
|
87
|
+
|
|
88
|
+
## Install on Anaconda
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
#### Conda/Mamba Install ( For Mac and Linux Users Only , Windows Users should use pip method ) :
|
|
92
|
+
|
|
93
|
+
Make sure you have conda version >=4.9
|
|
94
|
+
|
|
95
|
+
You can check your conda version with:
|
|
96
|
+
|
|
97
|
+
```
|
|
98
|
+
conda --version
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
To update conda use:
|
|
102
|
+
|
|
103
|
+
```
|
|
104
|
+
conda update conda
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
Installing `aimodelshare` from the `conda-forge` channel can be achieved by adding `conda-forge` to your channels with:
|
|
108
|
+
|
|
109
|
+
```
|
|
110
|
+
conda config --add channels conda-forge
|
|
111
|
+
conda config --set channel_priority strict
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
Once the `conda-forge` channel has been enabled, `aimodelshare` can be installed with `conda`:
|
|
115
|
+
|
|
116
|
+
```
|
|
117
|
+
conda install aimodelshare
|
|
118
|
+
```
|
|
119
|
+
|
|
120
|
+
or with `mamba`:
|
|
121
|
+
|
|
122
|
+
```
|
|
123
|
+
mamba install aimodelshare
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
# Moral Compass: Dynamic Metric Support for AI Ethics Challenges
|
|
127
|
+
|
|
128
|
+
The Moral Compass system now supports tracking multiple performance metrics for fairness-focused AI challenges. Track accuracy, demographic parity, equal opportunity, and other fairness metrics simultaneously.
|
|
129
|
+
|
|
130
|
+
## Quick Start with Multi-Metric Tracking
|
|
131
|
+
|
|
132
|
+
```python
|
|
133
|
+
from aimodelshare.moral_compass import ChallengeManager
|
|
134
|
+
|
|
135
|
+
# Create a challenge manager
|
|
136
|
+
manager = ChallengeManager(
|
|
137
|
+
table_id="fairness-challenge-2024",
|
|
138
|
+
username="your_username"
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
# Track multiple metrics
|
|
142
|
+
manager.set_metric("accuracy", 0.85, primary=True)
|
|
143
|
+
manager.set_metric("demographic_parity", 0.92)
|
|
144
|
+
manager.set_metric("equal_opportunity", 0.88)
|
|
145
|
+
|
|
146
|
+
# Track progress
|
|
147
|
+
manager.set_progress(tasks_completed=3, total_tasks=5)
|
|
148
|
+
|
|
149
|
+
# Sync to leaderboard
|
|
150
|
+
result = manager.sync()
|
|
151
|
+
print(f"Moral compass score: {result['moralCompassScore']:.4f}")
|
|
152
|
+
```
|
|
153
|
+
|
|
154
|
+
## Moral Compass Score Formula
|
|
155
|
+
|
|
156
|
+
```
|
|
157
|
+
moralCompassScore = primaryMetricValue × ((tasksCompleted + questionsCorrect) / (totalTasks + totalQuestions))
|
|
158
|
+
```
|
|
159
|
+
|
|
160
|
+
This combines:
|
|
161
|
+
- **Performance**: Your primary metric value (e.g., fairness score)
|
|
162
|
+
- **Progress**: Your completion rate across tasks and questions
|
|
163
|
+
|
|
164
|
+
## Features
|
|
165
|
+
|
|
166
|
+
- **Multiple Metrics**: Track accuracy, fairness, robustness, and custom metrics
|
|
167
|
+
- **Primary Metric Selection**: Choose which metric drives leaderboard ranking
|
|
168
|
+
- **Progress Tracking**: Monitor task and question completion
|
|
169
|
+
- **Automatic Scoring**: Server-side computation of moral compass scores
|
|
170
|
+
- **Leaderboard Sorting**: Automatic ranking by moral compass score
|
|
171
|
+
- **Backward Compatible**: Existing users without metrics continue to work
|
|
172
|
+
|
|
173
|
+
## Example: Justice & Equity Challenge
|
|
174
|
+
|
|
175
|
+
See [Justice & Equity Challenge Example](docs/justice_equity_challenge_example.md) for detailed examples including:
|
|
176
|
+
- Multi-metric fairness tracking
|
|
177
|
+
- Progressive challenge completion
|
|
178
|
+
- Leaderboard queries
|
|
179
|
+
- Custom fairness criteria
|
|
180
|
+
|
|
181
|
+
## API Methods
|
|
182
|
+
|
|
183
|
+
### ChallengeManager
|
|
184
|
+
|
|
185
|
+
```python
|
|
186
|
+
from aimodelshare.moral_compass import ChallengeManager
|
|
187
|
+
|
|
188
|
+
manager = ChallengeManager(table_id="my-table", username="user1")
|
|
189
|
+
|
|
190
|
+
# Set metrics
|
|
191
|
+
manager.set_metric("accuracy", 0.90, primary=True)
|
|
192
|
+
manager.set_metric("fairness", 0.95)
|
|
193
|
+
|
|
194
|
+
# Set progress
|
|
195
|
+
manager.set_progress(tasks_completed=4, total_tasks=5)
|
|
196
|
+
|
|
197
|
+
# Preview score locally
|
|
198
|
+
score = manager.get_local_score()
|
|
199
|
+
|
|
200
|
+
# Sync to server
|
|
201
|
+
result = manager.sync()
|
|
202
|
+
```
|
|
203
|
+
|
|
204
|
+
### API Client
|
|
205
|
+
|
|
206
|
+
```python
|
|
207
|
+
from aimodelshare.moral_compass import MoralcompassApiClient
|
|
208
|
+
|
|
209
|
+
client = MoralcompassApiClient()
|
|
210
|
+
|
|
211
|
+
# Update moral compass with metrics
|
|
212
|
+
result = client.update_moral_compass(
|
|
213
|
+
table_id="my-table",
|
|
214
|
+
username="user1",
|
|
215
|
+
metrics={"accuracy": 0.90, "fairness": 0.95},
|
|
216
|
+
primary_metric="fairness",
|
|
217
|
+
tasks_completed=4,
|
|
218
|
+
total_tasks=5
|
|
219
|
+
)
|
|
220
|
+
```
|
|
221
|
+
|
|
222
|
+
## Documentation
|
|
223
|
+
|
|
224
|
+
- [Full API Documentation](aimodelshare/moral_compass/README.md)
|
|
225
|
+
- [Justice & Equity Challenge Examples](docs/justice_equity_challenge_example.md)
|
|
226
|
+
- [Integration Tests](tests/test_moral_compass_client_minimal.py)
|
|
227
|
+
|
|
228
|
+
## Moral Compass API URL Configuration
|
|
229
|
+
|
|
230
|
+
The Moral Compass API client requires a base URL to connect to the REST API. The URL is resolved in the following order:
|
|
231
|
+
|
|
232
|
+
### For CI/CD Environments
|
|
233
|
+
|
|
234
|
+
In GitHub Actions workflows, the `MORAL_COMPASS_API_BASE_URL` environment variable is automatically exported from Terraform outputs:
|
|
235
|
+
|
|
236
|
+
```yaml
|
|
237
|
+
- name: Initialize Terraform and get API URL
|
|
238
|
+
working-directory: infra
|
|
239
|
+
run: |
|
|
240
|
+
terraform init
|
|
241
|
+
terraform workspace select dev || terraform workspace new dev
|
|
242
|
+
API_URL=$(terraform output -raw api_base_url)
|
|
243
|
+
echo "MORAL_COMPASS_API_BASE_URL=$API_URL" >> $GITHUB_ENV
|
|
244
|
+
```
|
|
245
|
+
|
|
246
|
+
### For Local Development
|
|
247
|
+
|
|
248
|
+
When developing locally, the API client attempts to resolve the URL in this order:
|
|
249
|
+
|
|
250
|
+
1. **Environment variable** - Set `MORAL_COMPASS_API_BASE_URL` or `AIMODELSHARE_API_BASE_URL`:
|
|
251
|
+
```bash
|
|
252
|
+
export MORAL_COMPASS_API_BASE_URL="https://api.example.com/v1"
|
|
253
|
+
```
|
|
254
|
+
|
|
255
|
+
2. **Cached Terraform outputs** - The client looks for `infra/terraform_outputs.json`
|
|
256
|
+
|
|
257
|
+
3. **Terraform command** - As a fallback, executes `terraform output -raw api_base_url` in the `infra/` directory
|
|
258
|
+
|
|
259
|
+
### Graceful Test Skipping
|
|
260
|
+
|
|
261
|
+
Integration tests that require the Moral Compass API will skip gracefully if the URL cannot be resolved, rather than failing. This allows the test suite to run in environments where the infrastructure is not available (e.g., forks without access to AWS resources).
|
|
262
|
+
|
|
263
|
+
# Resource Cleanup
|
|
264
|
+
|
|
265
|
+
During testing, aimodelshare creates AWS resources including API Gateway REST APIs (playgrounds) and IAM users. To manage and clean up these resources:
|
|
266
|
+
|
|
267
|
+
## Cleanup Script
|
|
268
|
+
|
|
269
|
+
Use the interactive cleanup script to identify and delete test resources:
|
|
270
|
+
|
|
271
|
+
```bash
|
|
272
|
+
# Preview resources without deleting (safe)
|
|
273
|
+
python scripts/cleanup_test_resources.py --dry-run
|
|
274
|
+
|
|
275
|
+
# Interactive cleanup
|
|
276
|
+
python scripts/cleanup_test_resources.py
|
|
277
|
+
|
|
278
|
+
# Cleanup in a specific region
|
|
279
|
+
python scripts/cleanup_test_resources.py --region us-west-2
|
|
280
|
+
```
|
|
281
|
+
|
|
282
|
+
The script will:
|
|
283
|
+
- List all API Gateway REST APIs (playgrounds) in the region
|
|
284
|
+
- List IAM users created by the test framework (prefix: `temporaryaccessAImodelshare`)
|
|
285
|
+
- Show associated resources (policies, access keys)
|
|
286
|
+
- Allow you to select which resources to delete
|
|
287
|
+
- Safely delete selected resources with proper cleanup order
|
|
288
|
+
|
|
289
|
+
## GitHub Action
|
|
290
|
+
|
|
291
|
+
You can also trigger the cleanup workflow from the GitHub Actions tab:
|
|
292
|
+
|
|
293
|
+
1. Go to **Actions** → **Cleanup Test Resources**
|
|
294
|
+
2. Click **Run workflow**
|
|
295
|
+
3. Select **dry-run** mode to preview resources
|
|
296
|
+
4. Review the output and run locally to delete resources
|
|
297
|
+
|
|
298
|
+
For complete documentation, see [CLEANUP_RESOURCES.md](CLEANUP_RESOURCES.md).
|
|
@@ -1,25 +1,26 @@
|
|
|
1
1
|
aimodelshare/README.md,sha256=_OMdUIeIYZnpFlKdafM1KNWaANO2nWdx0QpLE_ZC-Qs,2014
|
|
2
|
-
aimodelshare/__init__.py,sha256=
|
|
3
|
-
aimodelshare/aimsonnx.py,sha256=
|
|
4
|
-
aimodelshare/api.py,sha256=
|
|
5
|
-
aimodelshare/
|
|
2
|
+
aimodelshare/__init__.py,sha256=csP3KFDIvloTtRqqGh7Jg1eo0Q6-V63VQbgYR2zzWZs,3228
|
|
3
|
+
aimodelshare/aimsonnx.py,sha256=NCjRd535kTfJ6zSEa2o9QpC-TqHPIiDNuS0yHcAQzx8,77178
|
|
4
|
+
aimodelshare/api.py,sha256=3AuTS88M-6zXye3eCjjaAnHpdvLjHHdYMyPZJm9O0Cc,35107
|
|
5
|
+
aimodelshare/auth.py,sha256=7FatqYMDF3x2u9GRuNm-2lvMMLKO1AHtWRxzu36ZVqE,4774
|
|
6
|
+
aimodelshare/aws.py,sha256=GSFw1Flc-hIyAk3a_0CtlKTOlhUBSeMF0tcRIweOHbI,15234
|
|
6
7
|
aimodelshare/aws_client.py,sha256=Ce19iwf69BwpuyyJlVN8z1da3c5jf93svsTgx1OWhaA,6784
|
|
7
|
-
aimodelshare/base_image.py,sha256=
|
|
8
|
+
aimodelshare/base_image.py,sha256=itaQmX_q5GmgQrL3VNCBJpDGhl4PGA-nLTCbuyNDCCc,4825
|
|
8
9
|
aimodelshare/bucketpolicy.py,sha256=KLyl-BLBiFdTYzCK7tJV8NBJHBKWRlF3_msSTGwgaQQ,3055
|
|
9
|
-
aimodelshare/containerisation.py,sha256=
|
|
10
|
+
aimodelshare/containerisation.py,sha256=SaiO92wcdCwi8_C31AXNvaCdmZLnOB-7KTyP68-TQpM,8758
|
|
10
11
|
aimodelshare/containerization.py,sha256=Sa9GWxmz1qoDZ3lUQjFa1ctQUSs666I7-Yf0YU3We1U,29609
|
|
11
12
|
aimodelshare/custom_eval_metrics.py,sha256=NghFslmLDyvIkZ27yZhFIItLbzHnNb0bJ2ZO7cqkucw,3170
|
|
12
13
|
aimodelshare/deploy_custom_lambda.py,sha256=HFxxIYI2JrZwPrjqKgFkj6KaCeRBOn6tf9e2fqBUl2U,11045
|
|
13
14
|
aimodelshare/exceptions.py,sha256=gfrwQ7LHNyjgUNHM4X_LNZ7JhKwZv9qWN3DhBaB-f-k,318
|
|
14
|
-
aimodelshare/generatemodelapi.py,sha256=
|
|
15
|
+
aimodelshare/generatemodelapi.py,sha256=lfJQs93gu_WYrUPmbXrb0Whe3cABLtvA5cXpqq2gs8A,59701
|
|
15
16
|
aimodelshare/leaderboard.py,sha256=xtKJcNCsZjy2IoK1fUTAFyM_I-eLCMS1WJRfwgsT5AA,5216
|
|
16
|
-
aimodelshare/model.py,sha256=
|
|
17
|
-
aimodelshare/modeluser.py,sha256=
|
|
18
|
-
aimodelshare/playground.py,sha256=
|
|
17
|
+
aimodelshare/model.py,sha256=_W7479wJxzsguDBKhmaNZyAtuSHiNI_fF6EZAOtbL8w,62311
|
|
18
|
+
aimodelshare/modeluser.py,sha256=uZJjwaT7zHBEcfttl6JYxvDdabKl36YdgvSjRa-gF8E,7512
|
|
19
|
+
aimodelshare/playground.py,sha256=jOMls-mv_A8W8AOM8ZCpSci63UauciMxPH5VwHclLN0,89273
|
|
19
20
|
aimodelshare/postprocessormodules.py,sha256=L87fM2mywlInOrgaMETi-7zdHBGbIMRcrXKttQthyQ4,4992
|
|
20
|
-
aimodelshare/preprocessormodules.py,sha256=
|
|
21
|
+
aimodelshare/preprocessormodules.py,sha256=48HIur55nytD0FdhW1u1wWSAiaIW4uof0cJP1Yoq0T4,13183
|
|
21
22
|
aimodelshare/readme.md,sha256=_OMdUIeIYZnpFlKdafM1KNWaANO2nWdx0QpLE_ZC-Qs,2014
|
|
22
|
-
aimodelshare/reproducibility.py,sha256=
|
|
23
|
+
aimodelshare/reproducibility.py,sha256=5uN_2deZeFWyupR5uXnhu2RUQefXTSt9W0bsLJ86VPc,6227
|
|
23
24
|
aimodelshare/tools.py,sha256=e9nRv_1H06nIum6BW2gyI0EF3GGkQ7-gPrppEPiq5C0,3109
|
|
24
25
|
aimodelshare/utils.py,sha256=8vZ6hx-CGliVxXe_ed_viV_ZPGQVi4SSMRFfD71N1vs,1336
|
|
25
26
|
aimodelshare/color_mappings/color_mapping_keras.csv,sha256=dOJjZ9TGE7EbCPg6rW_r4Ysv45bskH77fXakfDmGKuM,2728
|
|
@@ -31,7 +32,7 @@ aimodelshare/containerization_templates/lambda_function.txt,sha256=nEFoPDXemNcQZ
|
|
|
31
32
|
aimodelshare/custom_approach/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
32
33
|
aimodelshare/custom_approach/lambda_function.py,sha256=d1HZlgviHZq4mNBKx4q-RCunDK8P8i9DKZcfv6Nmgzc,479
|
|
33
34
|
aimodelshare/data_sharing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
34
|
-
aimodelshare/data_sharing/download_data.py,sha256=
|
|
35
|
+
aimodelshare/data_sharing/download_data.py,sha256=xJ6ylVO_oAiS72ue5iy2eOFol5Bnc7ZI8-OW0TC9sIw,25317
|
|
35
36
|
aimodelshare/data_sharing/share_data.py,sha256=dMOP0-PTSpviOeHi3Nvj-uiq5PlIfk_SN5nN92j4PnI,13964
|
|
36
37
|
aimodelshare/data_sharing/utils.py,sha256=865lN8-oGFi_U_zRaNnGB8Bd0sC8dN_iI5krZOSt_Ts,236
|
|
37
38
|
aimodelshare/data_sharing/data_sharing_templates/Dockerfile.txt,sha256=27wmp7b0rXqJQsumhPxCvGHmUcDiiVgrC6i7DmY7KQA,77
|
|
@@ -107,10 +108,21 @@ aimodelshare/main/8.txt,sha256=MfcEQe9Gv6RSmWL3kd7oYkRkdDdkN4bPxEG43QVs7ms,4513
|
|
|
107
108
|
aimodelshare/main/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
108
109
|
aimodelshare/main/authorization.txt,sha256=lBWFZ1pyNuYFSEEWQbfEAZFDspcVE1guzlfpES7HNxk,10942
|
|
109
110
|
aimodelshare/main/eval_classification.txt,sha256=gCBU71rbXRlkBwefVN3WhwVJX9fXh6bwOCa7ofLMdnA,3081
|
|
110
|
-
aimodelshare/main/eval_lambda.txt,sha256=
|
|
111
|
+
aimodelshare/main/eval_lambda.txt,sha256=r3GqJodO5QG6jeK4xWUzLrXM9K7XLXeUJouhz6efQbA,62831
|
|
111
112
|
aimodelshare/main/eval_regression.txt,sha256=iQeE9mbOkg-BDF9TnoQmglo86jBJitJQCvaf1eELzrs,3111
|
|
112
113
|
aimodelshare/main/lambda_function.txt,sha256=-XkuD2YUOWNryNT7rBPjlts588UAeE949TUqeVGCRlQ,150
|
|
113
114
|
aimodelshare/main/nst.txt,sha256=8kTsR18kDEcaQbv6091XDq1tRiqqFxdqfCteslR_udk,4941
|
|
115
|
+
aimodelshare/moral_compass/README.md,sha256=2wTCI0s43Tm9WXRIJjtj6h-FFHsdHhzx-Aimy_UupBY,11888
|
|
116
|
+
aimodelshare/moral_compass/__init__.py,sha256=JG0-WFfZ5T5uylkZVLk9LseyeKGoBZQYjOmZOlgP8aA,973
|
|
117
|
+
aimodelshare/moral_compass/_version.py,sha256=GhviEK9nfN1o9XunS8V4jXqgMmcB-Vpp2VCYWplLzqE,80
|
|
118
|
+
aimodelshare/moral_compass/api_client.py,sha256=3WVWBuuEJ9x0R6raUu6TKZPv2swgVxCDwKXKg8Esq24,22393
|
|
119
|
+
aimodelshare/moral_compass/challenge.py,sha256=p--uqP30tPQnVcOPs4LEJFaXlqTRL9Zb7SVkEvggl2U,12971
|
|
120
|
+
aimodelshare/moral_compass/config.py,sha256=8HsoTreAAdXaWOdg30B1IJXwIGMBNEz7hqgNZpAFUhI,6119
|
|
121
|
+
aimodelshare/moral_compass/apps/__init__.py,sha256=sgWuLHjAMxoAbUc-_itEy6kyyb3WBxfGbMgLl2ESW2Y,934
|
|
122
|
+
aimodelshare/moral_compass/apps/ai_consequences.py,sha256=OsGtXpinvC3lhTy7LfK7hoZAAEehlf5OZB3of8KkVe4,15176
|
|
123
|
+
aimodelshare/moral_compass/apps/judge.py,sha256=ddITvOwlDLc2T26jrZw229LpaeFHNG4cw8TPBnMkif0,12246
|
|
124
|
+
aimodelshare/moral_compass/apps/tutorial.py,sha256=zNaQJM04VqUSaFBofR8rvb6tws3vPwqhztt5KYvi8ck,8042
|
|
125
|
+
aimodelshare/moral_compass/apps/what_is_ai.py,sha256=Hvy6qbhg_pRAlUcEnLJjuv-DRezgsf369oz5pLdRx3Y,20244
|
|
114
126
|
aimodelshare/placeholders/model.onnx,sha256=i04ndsRw5VBTOpIH-LHqTjAPHcJZNzyWSSz1zSmukBw,3464
|
|
115
127
|
aimodelshare/placeholders/preprocessor.zip,sha256=463dahdrgzYzFY338r_be7xptPm_Z1kNgu52SMsFVAU,2930
|
|
116
128
|
aimodelshare/pyspark/1.txt,sha256=FQYyw5s8bnrDFIl6kzqXZ6qGuh-N68SN4mRL561vjto,6529
|
|
@@ -140,11 +152,10 @@ aimodelshare/sam/codepipeline_policies.txt,sha256=267HMXMnbP7qRASkmFZYSx-2HmKf5o
|
|
|
140
152
|
aimodelshare/sam/codepipeline_trust_relationship.txt,sha256=yfPYvZlN3fnaIHs7I3ENMMveigIE89mufV9pvR8EQH8,245
|
|
141
153
|
aimodelshare/sam/spark-class.txt,sha256=chyJBxDzCzlUKXzVQYTzuJ2PXCTwg8_gd1yfnI-xbRw,217
|
|
142
154
|
aimodelshare/sam/template.txt,sha256=JKSvEOZNaaLalHSx7r9psJg_6LLCb0XLAYi1-jYPu3M,1195
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
aimodelshare-0.1.
|
|
147
|
-
aimodelshare-0.1.
|
|
148
|
-
aimodelshare-0.1.
|
|
149
|
-
aimodelshare-0.1.
|
|
150
|
-
aimodelshare-0.1.12.dist-info/RECORD,,
|
|
155
|
+
aimodelshare/utils/__init__.py,sha256=6ieChHjYDsn_gSyeOiLeWW5hWkUfZUucEzSFyBN7xck,1973
|
|
156
|
+
aimodelshare/utils/optional_deps.py,sha256=t0ZcPlaAKEQqBpD-GDbFGg9a-qp2fsqonTVM0dLWNV4,1257
|
|
157
|
+
aimodelshare-0.1.64.dist-info/licenses/LICENSE,sha256=XdPthYienQee9LH1duXNGtsj6GUTXPvtf_1MpC8WhL4,115
|
|
158
|
+
aimodelshare-0.1.64.dist-info/METADATA,sha256=058noEGD1gz6g0NCLUjSGs-1_k0Ic20g-5QR84yBiEs,10077
|
|
159
|
+
aimodelshare-0.1.64.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
160
|
+
aimodelshare-0.1.64.dist-info/top_level.txt,sha256=d-0DAtZDZsvfauQzUjXHJRKVYfaqMWZXz3WGmmIzE5w,13
|
|
161
|
+
aimodelshare-0.1.64.dist-info/RECORD,,
|