aimodelshare 0.1.32__py3-none-any.whl → 0.1.62__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aimodelshare might be problematic. Click here for more details.

Files changed (38) hide show
  1. aimodelshare/__init__.py +94 -14
  2. aimodelshare/aimsonnx.py +312 -95
  3. aimodelshare/api.py +13 -12
  4. aimodelshare/auth.py +163 -0
  5. aimodelshare/aws.py +4 -4
  6. aimodelshare/base_image.py +1 -1
  7. aimodelshare/containerisation.py +1 -1
  8. aimodelshare/data_sharing/download_data.py +142 -87
  9. aimodelshare/generatemodelapi.py +7 -6
  10. aimodelshare/main/authorization.txt +275 -275
  11. aimodelshare/main/eval_lambda.txt +81 -13
  12. aimodelshare/model.py +493 -197
  13. aimodelshare/modeluser.py +89 -1
  14. aimodelshare/moral_compass/README.md +408 -0
  15. aimodelshare/moral_compass/__init__.py +37 -0
  16. aimodelshare/moral_compass/_version.py +3 -0
  17. aimodelshare/moral_compass/api_client.py +601 -0
  18. aimodelshare/moral_compass/apps/__init__.py +17 -0
  19. aimodelshare/moral_compass/apps/tutorial.py +198 -0
  20. aimodelshare/moral_compass/challenge.py +365 -0
  21. aimodelshare/moral_compass/config.py +187 -0
  22. aimodelshare/playground.py +26 -14
  23. aimodelshare/preprocessormodules.py +60 -6
  24. aimodelshare/pyspark/authorization.txt +258 -258
  25. aimodelshare/pyspark/eval_lambda.txt +1 -1
  26. aimodelshare/reproducibility.py +20 -5
  27. aimodelshare/utils/__init__.py +78 -0
  28. aimodelshare/utils/optional_deps.py +38 -0
  29. aimodelshare-0.1.62.dist-info/METADATA +298 -0
  30. {aimodelshare-0.1.32.dist-info → aimodelshare-0.1.62.dist-info}/RECORD +33 -25
  31. {aimodelshare-0.1.32.dist-info → aimodelshare-0.1.62.dist-info}/WHEEL +1 -1
  32. aimodelshare-0.1.62.dist-info/licenses/LICENSE +5 -0
  33. {aimodelshare-0.1.32.dist-info → aimodelshare-0.1.62.dist-info}/top_level.txt +0 -1
  34. aimodelshare-0.1.32.dist-info/METADATA +0 -78
  35. aimodelshare-0.1.32.dist-info/licenses/LICENSE +0 -22
  36. tests/__init__.py +0 -0
  37. tests/test_aimsonnx.py +0 -135
  38. tests/test_playground.py +0 -721
aimodelshare/aimsonnx.py CHANGED
@@ -1,29 +1,32 @@
1
1
  # data wrangling
2
2
  import pandas as pd
3
- import numpy as np
3
+ import numpy as np
4
+
5
+ # Import optional dependency checker
6
+ from aimodelshare.utils.optional_deps import check_optional
4
7
 
5
8
  # ml frameworks
6
9
  try:
7
10
  import sklearn
8
11
  from sklearn.model_selection import GridSearchCV, RandomizedSearchCV
9
12
  except:
10
- print("Warning: Please install sklearn to enable sklearn features")
13
+ check_optional("sklearn", "Scikit-learn")
11
14
 
12
15
  try:
13
16
  import torch
14
17
  except:
15
- print("Warning: Please install pytorch to enable pytorch features")
18
+ check_optional("torch", "PyTorch")
16
19
 
17
20
  try:
18
21
  import xgboost
19
22
  except:
20
- print("Warning: Please install xgboost to enable xgboost features")
23
+ check_optional("xgboost", "XGBoost")
21
24
 
22
25
  try:
23
26
  import tensorflow as tf
24
27
  import keras
25
28
  except:
26
- print("Warning: Please install tensorflow/keras to enable tensorflow/keras features")
29
+ check_optional("tensorflow", "TensorFlow/Keras")
27
30
 
28
31
  try:
29
32
  import pyspark
@@ -32,14 +35,17 @@ try:
32
35
  from pyspark.ml.tuning import CrossValidatorModel, TrainValidationSplitModel
33
36
  from onnxmltools import convert_sparkml
34
37
  except:
35
- print("Warning: Please install pyspark to enable pyspark features")
38
+ check_optional("pyspark", "PySpark")
36
39
 
37
40
 
38
41
  # onnx modules
39
42
  import onnx
40
43
  import skl2onnx
41
44
  from skl2onnx import convert_sklearn
42
- import tf2onnx
45
+ # tf2onnx import is lazy-loaded to avoid requiring TensorFlow for non-TF workflows
46
+ _TF2ONNX_AVAILABLE = None
47
+ _tf2onnx_module = None
48
+ _tensorflow_module = None
43
49
  try:
44
50
  from torch.onnx import export
45
51
  except:
@@ -71,18 +77,59 @@ import wget
71
77
  from copy import copy
72
78
  import psutil
73
79
  from pympler import asizeof
74
- from IPython.core.display import display, HTML, SVG
80
+ from IPython.display import display, HTML, SVG
75
81
  import absl.logging
76
82
  import networkx as nx
77
83
  import warnings
78
84
  from pathlib import Path
79
85
  import time
80
86
  import signal
81
- from scikeras.wrappers import KerasClassifier, KerasRegressor
87
+
88
+ # scikeras imports keras which requires TensorFlow - lazy load it
89
+ try:
90
+ from scikeras.wrappers import KerasClassifier, KerasRegressor
91
+ _SCIKERAS_AVAILABLE = True
92
+ except ImportError:
93
+ _SCIKERAS_AVAILABLE = False
94
+ KerasClassifier = None
95
+ KerasRegressor = None
82
96
 
83
97
 
84
98
  absl.logging.set_verbosity(absl.logging.ERROR)
85
99
 
100
+ def _check_tf2onnx_available():
101
+ """Check if tf2onnx and TensorFlow are available, and load them if needed.
102
+
103
+ Returns:
104
+ tuple: (tf2onnx_module, tensorflow_module) on success
105
+
106
+ Raises:
107
+ RuntimeError: If TensorFlow or tf2onnx are not installed
108
+ """
109
+ global _TF2ONNX_AVAILABLE, _tf2onnx_module, _tensorflow_module
110
+
111
+ if _TF2ONNX_AVAILABLE is None:
112
+ try:
113
+ import tf2onnx as tf2onnx_temp
114
+ import tensorflow as tf_temp
115
+ _tf2onnx_module = tf2onnx_temp
116
+ _tensorflow_module = tf_temp
117
+ _TF2ONNX_AVAILABLE = True
118
+ except ImportError as e:
119
+ _TF2ONNX_AVAILABLE = False
120
+ raise RuntimeError(
121
+ "TensorFlow and tf2onnx are required for Keras model conversion to ONNX. "
122
+ "Please install them with: pip install tensorflow tf2onnx"
123
+ ) from e
124
+
125
+ if not _TF2ONNX_AVAILABLE:
126
+ raise RuntimeError(
127
+ "TensorFlow and tf2onnx are required for Keras model conversion to ONNX. "
128
+ "Please install them with: pip install tensorflow tf2onnx"
129
+ )
130
+
131
+ return _tf2onnx_module, _tensorflow_module
132
+
86
133
  def _extract_onnx_metadata(onnx_model, framework):
87
134
  '''Extracts model metadata from ONNX file.'''
88
135
 
@@ -92,11 +139,14 @@ def _extract_onnx_metadata(onnx_model, framework):
92
139
  # initialize metadata dict
93
140
  metadata_onnx = {}
94
141
 
95
- # get input shape
96
- metadata_onnx["input_shape"] = graph.input[0].type.tensor_type.shape.dim[1].dim_value
97
-
98
- # get output shape
99
- metadata_onnx["output_shape"] = graph.output[0].type.tensor_type.shape.dim[1].dim_value
142
+ def _get_shape(dims):
143
+ return [d.dim_value if d.HasField("dim_value") else None for d in dims]
144
+
145
+ input_dims = graph.input[0].type.tensor_type.shape.dim
146
+ output_dims = graph.output[0].type.tensor_type.shape.dim
147
+
148
+ metadata_onnx["input_shape"] = _get_shape(input_dims)
149
+ metadata_onnx["output_shape"] = _get_shape(output_dims)
100
150
 
101
151
  # get layers and activations NEW
102
152
  # match layers and nodes and initalizers in sinle object
@@ -262,28 +312,8 @@ def _sklearn_to_onnx(model, initial_types=None, transfer_learning=None,
262
312
 
263
313
  onx = convert_sklearn(model, initial_types=initial_types,target_opset={'': 15, 'ai.onnx.ml': 2})
264
314
 
265
- ## Dynamically set model ir_version to ensure sklearn opsets work properly
266
- from onnx.helper import VERSION_TABLE
267
- import onnx
268
- import numpy as np
269
-
270
- indexlocationlist=[]
271
- for i in VERSION_TABLE:
272
- indexlocationlist.append(str(i).find(str(onnx.__version__)))
273
-
274
-
275
- arr = np.array(indexlocationlist)
276
-
277
- def condition(x): return x > -1
278
-
279
- bool_arr = condition(arr)
280
-
281
- output = np.where(bool_arr)[0]
282
-
283
- ir_version=VERSION_TABLE[output[0]][1]
284
-
285
- #add to model object before saving
286
- onx.ir_version = ir_version
315
+ ## set model ir_version to ensure sklearn opsets work properly
316
+ onx.ir_version = 8
287
317
 
288
318
  # generate metadata dict
289
319
  metadata = {}
@@ -552,8 +582,9 @@ def _keras_to_onnx(model, transfer_learning=None,
552
582
  deep_learning=None, task_type=None, epochs=None):
553
583
  '''Converts a Keras model to ONNX and extracts metadata.'''
554
584
 
555
- import tf2onnx
556
- import tensorflow as tf
585
+ # Check and load tf2onnx and TensorFlow lazily (only when needed)
586
+ tf2onnx, tf = _check_tf2onnx_available()
587
+
557
588
  import numpy as np
558
589
  import onnx
559
590
  import pickle
@@ -647,20 +678,52 @@ def _keras_to_onnx(model, transfer_learning=None,
647
678
  metadata['model_weights'] = pickle.dumps(model.get_weights())
648
679
 
649
680
  # Extract architecture
650
-
681
+ if not model.built: # add shape outputs if model not built
682
+ try:
683
+ model(tf.random.uniform([1] + list(input_shape[1:])))
684
+ except Exception:
685
+ pass # fallback, don't crash conversion
686
+
651
687
  keras_layers = keras_unpack(model)
688
+
689
+
690
+ from tensorflow.python.framework import tensor_shape # <- place this at the top of your file
691
+
652
692
  layers = []
653
693
  layers_n_params = []
654
694
  layers_shapes = []
655
695
  activations = []
656
-
696
+
657
697
  for layer in keras_layers:
698
+ # layer name
658
699
  layers.append(layer.__class__.__name__)
659
- layers_n_params.append(layer.count_params())
660
- layers_shapes.append(getattr(layer, 'output_shape', None))
700
+
701
+ # parameter count
702
+ try:
703
+ layers_n_params.append(layer.count_params())
704
+ except:
705
+ layers_n_params.append(0)
706
+
707
+ # output shape (sanitized for JSON)
708
+ shape = getattr(layer, 'output_shape', None)
709
+
710
+ if isinstance(shape, tensor_shape.TensorShape):
711
+ shape = shape.as_list()
712
+ elif shape is not None:
713
+ try:
714
+ shape = list(shape)
715
+ except:
716
+ shape = str(shape)
717
+ else:
718
+ shape = None
719
+
720
+ layers_shapes.append(shape)
721
+
722
+ # activation
661
723
  if hasattr(layer, 'activation'):
662
724
  act = getattr(layer.activation, '__name__', None)
663
- if act: activations.append(act)
725
+ if act:
726
+ activations.append(act)
664
727
 
665
728
  optimizer = getattr(model.optimizer, '__class__', None)
666
729
  loss = getattr(model.loss, '__class__', None)
@@ -899,7 +962,7 @@ def model_to_onnx(model, framework=None, model_input=None, initial_types=None,
899
962
  from pyspark.ml.tuning import CrossValidatorModel, TrainValidationSplitModel
900
963
  from onnxmltools import convert_sparkml
901
964
  except:
902
- print("Warning: Please install pyspark to enable pyspark features")
965
+ check_optional("pyspark", "PySpark")
903
966
  onnx = _pyspark_to_onnx(model, initial_types=initial_types,
904
967
  transfer_learning=transfer_learning,
905
968
  deep_learning=deep_learning,
@@ -954,23 +1017,39 @@ def model_to_onnx_timed(model_filepath, force_onnx=False, timeout=60, model_inpu
954
1017
 
955
1018
  except:
956
1019
  print("Timeout: Model to ONNX conversion is taking longer than expected. This can be the case for big models.")
957
- response = ''
958
- while response not in {"1", "2"}:
959
- response = input("Do you want to keep trying (1) or submit predictions only (2)? ")
960
-
961
- if response == "1":
962
- try:
963
- import torch
964
- if isinstance(model_filepath, torch.nn.Module):
1020
+
1021
+ # Detect CI/testing environment for non-interactive fallback
1022
+ is_non_interactive = (
1023
+ os.environ.get("PYTEST_CURRENT_TEST") is not None or
1024
+ os.environ.get("AIMS_NON_INTERACTIVE") == "1"
1025
+ )
1026
+
1027
+ if is_non_interactive:
1028
+ # Auto-fallback to predictions-only in CI/testing environment
1029
+ print("Non-interactive environment detected. Falling back to predictions-only submission.")
1030
+ model_filepath = None
1031
+ else:
1032
+ # Interactive prompt for manual runs
1033
+ response = ''
1034
+ while response not in {"1", "2"}:
1035
+ response = input("Do you want to keep trying (1) or submit predictions only (2)? ")
1036
+
1037
+ if response == "1":
1038
+ try:
1039
+ import torch
1040
+ if isinstance(model_filepath, torch.nn.Module):
1041
+ onnx_model = model_to_onnx(model_filepath, model_input=model_input)
1042
+ else:
1043
+ onnx_model = model_to_onnx(model_filepath)
1044
+ except Exception as e:
1045
+ # Final fallback - if torch-specific handling failed, try generic conversion
1046
+ # This handles cases where torch module detection fails but conversion might still work
1047
+ warnings.warn(f"PyTorch-specific ONNX conversion failed ({e}), attempting generic conversion")
965
1048
  onnx_model = model_to_onnx(model_filepath, model_input=model_input)
966
- else:
967
- onnx_model = model_to_onnx(model_filepath)
968
- except:
969
- onnx_model = model_to_onnx(model_filepath)
970
- model_filepath = onnx_model
1049
+ model_filepath = onnx_model
971
1050
 
972
- elif response == "2":
973
- model_filepath = None
1051
+ elif response == "2":
1052
+ model_filepath = None
974
1053
 
975
1054
  finally:
976
1055
  print()
@@ -989,6 +1068,12 @@ def _get_metadata(onnx_model):
989
1068
  #assert(isinstance(onnx_model, onnx.onnx_ml_pb2.ModelProto)), \
990
1069
  #"Please pass a onnx model object."
991
1070
 
1071
+ # Handle None input gracefully - always return a dict
1072
+ if onnx_model is None:
1073
+ if os.environ.get("AIMODELSHARE_DEBUG_METADATA"):
1074
+ print("[DEBUG] _get_metadata: onnx_model is None, returning empty dict")
1075
+ return {}
1076
+
992
1077
  try:
993
1078
  onnx_meta = onnx_model.metadata_props
994
1079
 
@@ -999,36 +1084,121 @@ def _get_metadata(onnx_model):
999
1084
 
1000
1085
  onnx_meta_dict = ast.literal_eval(onnx_meta_dict['model_metadata'])
1001
1086
 
1087
+ # Handle case where metadata is stored as a list instead of dict
1088
+ if isinstance(onnx_meta_dict, list):
1089
+ if os.environ.get("AIMODELSHARE_DEBUG_METADATA"):
1090
+ print(f"[DEBUG] _get_metadata: metadata is a list of length {len(onnx_meta_dict)}")
1091
+ if len(onnx_meta_dict) > 0 and isinstance(onnx_meta_dict[0], dict):
1092
+ onnx_meta_dict = onnx_meta_dict[0]
1093
+ if os.environ.get("AIMODELSHARE_DEBUG_METADATA"):
1094
+ print("[DEBUG] _get_metadata: Extracted first dict from list")
1095
+ else:
1096
+ # Return empty dict if list doesn't contain valid dicts
1097
+ if os.environ.get("AIMODELSHARE_DEBUG_METADATA"):
1098
+ print("[DEBUG] _get_metadata: List does not contain valid dicts, returning empty dict")
1099
+ return {}
1100
+
1101
+ # Ensure we have a dict at this point
1102
+ if not isinstance(onnx_meta_dict, dict):
1103
+ if os.environ.get("AIMODELSHARE_DEBUG_METADATA"):
1104
+ print(f"[DEBUG] _get_metadata: Unexpected metadata type {type(onnx_meta_dict)}, returning empty dict")
1105
+ return {}
1106
+
1002
1107
  #if onnx_meta_dict['model_config'] != None and \
1003
1108
  #onnx_meta_dict['ml_framework'] != 'pytorch':
1004
1109
  # onnx_meta_dict['model_config'] = ast.literal_eval(onnx_meta_dict['model_config'])
1005
1110
 
1006
- if onnx_meta_dict['model_architecture'] != None:
1007
- onnx_meta_dict['model_architecture'] = ast.literal_eval(onnx_meta_dict['model_architecture'])
1111
+ # Attempt to parse nested fields only if they are string representations of dicts
1112
+ if 'model_architecture' in onnx_meta_dict and onnx_meta_dict['model_architecture'] != None:
1113
+ try:
1114
+ if isinstance(onnx_meta_dict['model_architecture'], str):
1115
+ onnx_meta_dict['model_architecture'] = ast.literal_eval(onnx_meta_dict['model_architecture'])
1116
+ except (ValueError, SyntaxError):
1117
+ # Keep as-is if parsing fails
1118
+ pass
1119
+
1120
+ if 'model_config' in onnx_meta_dict and onnx_meta_dict['model_config'] != None:
1121
+ try:
1122
+ if isinstance(onnx_meta_dict['model_config'], str):
1123
+ onnx_meta_dict['model_config'] = ast.literal_eval(onnx_meta_dict['model_config'])
1124
+ except (ValueError, SyntaxError):
1125
+ # Keep as-is if parsing fails
1126
+ pass
1008
1127
 
1009
- if onnx_meta_dict['metadata_onnx'] != None:
1010
- onnx_meta_dict['metadata_onnx'] = ast.literal_eval(onnx_meta_dict['metadata_onnx'])
1128
+ if 'metadata_onnx' in onnx_meta_dict and onnx_meta_dict['metadata_onnx'] != None:
1129
+ try:
1130
+ if isinstance(onnx_meta_dict['metadata_onnx'], str):
1131
+ onnx_meta_dict['metadata_onnx'] = ast.literal_eval(onnx_meta_dict['metadata_onnx'])
1132
+ except (ValueError, SyntaxError):
1133
+ # Keep as-is if parsing fails
1134
+ pass
1011
1135
 
1012
1136
  # onnx_meta_dict['model_image'] = onnx_to_image(onnx_model)
1013
1137
 
1014
1138
  except Exception as e:
1015
1139
 
1016
- print(e)
1140
+ if os.environ.get("AIMODELSHARE_DEBUG_METADATA"):
1141
+ print(f"[DEBUG] _get_metadata: Exception during metadata extraction: {e}")
1017
1142
 
1018
- onnx_meta_dict = ast.literal_eval(onnx_meta_dict)
1143
+ try:
1144
+ onnx_meta_dict = ast.literal_eval(onnx_meta_dict)
1145
+ # Handle list case in exception path as well
1146
+ if isinstance(onnx_meta_dict, list) and len(onnx_meta_dict) > 0 and isinstance(onnx_meta_dict[0], dict):
1147
+ onnx_meta_dict = onnx_meta_dict[0]
1148
+ elif not isinstance(onnx_meta_dict, dict):
1149
+ onnx_meta_dict = {}
1150
+ except:
1151
+ onnx_meta_dict = {}
1152
+
1153
+ # Final safety check: ensure we always return a dict
1154
+ if not isinstance(onnx_meta_dict, dict):
1155
+ if os.environ.get("AIMODELSHARE_DEBUG_METADATA"):
1156
+ print(f"[DEBUG] _get_metadata: Final check failed, returning empty dict instead of {type(onnx_meta_dict)}")
1157
+ return {}
1019
1158
 
1020
1159
  return onnx_meta_dict
1021
1160
 
1022
1161
 
1023
1162
 
1024
1163
  def _get_leaderboard_data(onnx_model, eval_metrics=None):
1164
+ '''Extract leaderboard data from ONNX model or return defaults.
1165
+
1166
+ This function performs single-pass normalization and safely handles:
1167
+ - None onnx_model (returns defaults)
1168
+ - Invalid metadata structures
1169
+ - Missing keys in metadata
1170
+ '''
1025
1171
 
1172
+ # Start with eval_metrics if provided, otherwise empty dict
1026
1173
  if eval_metrics is not None:
1027
- metadata = eval_metrics
1174
+ metadata = dict(eval_metrics) if isinstance(eval_metrics, dict) else {}
1028
1175
  else:
1029
- metadata = dict()
1176
+ metadata = {}
1177
+
1178
+ # Handle None onnx_model gracefully
1179
+ if onnx_model is None:
1180
+ if os.environ.get("AIMODELSHARE_DEBUG_METADATA"):
1181
+ print("[DEBUG] _get_leaderboard_data: onnx_model is None, using default metadata")
1182
+ # Return metadata with safe defaults injected
1183
+ metadata['ml_framework'] = metadata.get('ml_framework', None)
1184
+ metadata['transfer_learning'] = metadata.get('transfer_learning', None)
1185
+ metadata['deep_learning'] = metadata.get('deep_learning', None)
1186
+ metadata['model_type'] = metadata.get('model_type', None)
1187
+ metadata['depth'] = metadata.get('depth', 0)
1188
+ metadata['num_params'] = metadata.get('num_params', 0)
1189
+ return metadata
1030
1190
 
1191
+ # Get metadata from ONNX - _get_metadata now always returns a dict
1031
1192
  metadata_raw = _get_metadata(onnx_model)
1193
+
1194
+ if os.environ.get("AIMODELSHARE_DEBUG_METADATA"):
1195
+ print(f"[DEBUG] _get_leaderboard_data: metadata_raw type={type(metadata_raw)}, keys={list(metadata_raw.keys()) if isinstance(metadata_raw, dict) else 'N/A'}")
1196
+
1197
+ # Single-pass normalization: ensure metadata_raw is a dict
1198
+ if not isinstance(metadata_raw, dict):
1199
+ if os.environ.get("AIMODELSHARE_DEBUG_METADATA"):
1200
+ print(f"[DEBUG] _get_leaderboard_data: metadata_raw is not a dict (type={type(metadata_raw)}), using empty dict")
1201
+ metadata_raw = {}
1032
1202
 
1033
1203
  # get list of current layer types
1034
1204
  layer_list_keras, activation_list_keras = _get_layer_names()
@@ -1037,46 +1207,55 @@ def _get_leaderboard_data(onnx_model, eval_metrics=None):
1037
1207
  layer_list = list(set(layer_list_keras + layer_list_pytorch))
1038
1208
  activation_list = list(set(activation_list_keras + activation_list_pytorch))
1039
1209
 
1040
- # get general model info
1041
- metadata['ml_framework'] = metadata_raw['ml_framework']
1042
- metadata['transfer_learning'] = metadata_raw['transfer_learning']
1043
- metadata['deep_learning'] = metadata_raw['deep_learning']
1044
- metadata['model_type'] = metadata_raw['model_type']
1210
+ # get general model info - use .get() for safety
1211
+ metadata['ml_framework'] = metadata_raw.get('ml_framework')
1212
+ metadata['transfer_learning'] = metadata_raw.get('transfer_learning')
1213
+ metadata['deep_learning'] = metadata_raw.get('deep_learning')
1214
+ metadata['model_type'] = metadata_raw.get('model_type')
1045
1215
 
1046
1216
 
1047
1217
  # get neural network metrics
1048
- if metadata_raw['ml_framework'] in ['keras', 'pytorch'] or metadata_raw['model_type'] in ['MLPClassifier', 'MLPRegressor']:
1049
- metadata['depth'] = metadata_raw['model_architecture']['layers_number']
1050
- metadata['num_params'] = sum(metadata_raw['model_architecture']['layers_n_params'])
1218
+ # Add isinstance check for model_architecture to prevent TypeError
1219
+ if (metadata_raw.get('ml_framework') in ['keras', 'pytorch'] or
1220
+ metadata_raw.get('model_type') in ['MLPClassifier', 'MLPRegressor']) and \
1221
+ isinstance(metadata_raw.get('model_architecture'), dict):
1222
+
1223
+ metadata['depth'] = metadata_raw['model_architecture'].get('layers_number', 0)
1224
+ metadata['num_params'] = sum(metadata_raw['model_architecture'].get('layers_n_params', []))
1051
1225
 
1052
1226
  for i in layer_list:
1053
- if i in metadata_raw['model_architecture']['layers_summary']:
1054
- metadata[i.lower()+'_layers'] = metadata_raw['model_architecture']['layers_summary'][i]
1227
+ layers_summary = metadata_raw['model_architecture'].get('layers_summary', {})
1228
+ if i in layers_summary:
1229
+ metadata[i.lower()+'_layers'] = layers_summary[i]
1055
1230
  elif i.lower()+'_layers' not in metadata.keys():
1056
1231
  metadata[i.lower()+'_layers'] = 0
1057
1232
 
1058
1233
  for i in activation_list:
1059
- if i in metadata_raw['model_architecture']['activations_summary']:
1234
+ activations_summary = metadata_raw['model_architecture'].get('activations_summary', {})
1235
+ if i in activations_summary:
1060
1236
  if i.lower()+'_act' in metadata:
1061
- metadata[i.lower()+'_act'] += metadata_raw['model_architecture']['activations_summary'][i]
1237
+ metadata[i.lower()+'_act'] += activations_summary[i]
1062
1238
  else:
1063
- metadata[i.lower()+'_act'] = metadata_raw['model_architecture']['activations_summary'][i]
1239
+ metadata[i.lower()+'_act'] = activations_summary[i]
1064
1240
  else:
1065
1241
  if i.lower()+'_act' not in metadata:
1066
1242
  metadata[i.lower()+'_act'] = 0
1067
1243
 
1068
- metadata['loss'] = metadata_raw['model_architecture']['loss']
1069
- metadata['optimizer'] = metadata_raw['model_architecture']["optimizer"]
1070
- metadata['model_config'] = metadata_raw['model_config']
1071
- metadata['epochs'] = metadata_raw['epochs']
1072
- metadata['memory_size'] = metadata_raw['memory_size']
1244
+ metadata['loss'] = metadata_raw['model_architecture'].get('loss')
1245
+ metadata['optimizer'] = metadata_raw['model_architecture'].get('optimizer')
1246
+ metadata['model_config'] = metadata_raw.get('model_config')
1247
+ metadata['epochs'] = metadata_raw.get('epochs')
1248
+ metadata['memory_size'] = metadata_raw.get('memory_size')
1073
1249
 
1074
1250
  # get sklearn & pyspark model metrics
1075
- elif metadata_raw['ml_framework'] in ['sklearn', 'xgboost', 'pyspark']:
1251
+ elif metadata_raw.get('ml_framework') in ['sklearn', 'xgboost', 'pyspark']:
1076
1252
  metadata['depth'] = 0
1077
1253
 
1078
1254
  try:
1079
- metadata['num_params'] = sum(metadata_raw['model_architecture']['layers_n_params'])
1255
+ if isinstance(metadata_raw.get('model_architecture'), dict):
1256
+ metadata['num_params'] = sum(metadata_raw['model_architecture'].get('layers_n_params', []))
1257
+ else:
1258
+ metadata['num_params'] = 0
1080
1259
  except:
1081
1260
  metadata['num_params'] = 0
1082
1261
 
@@ -1089,21 +1268,36 @@ def _get_leaderboard_data(onnx_model, eval_metrics=None):
1089
1268
  metadata['loss'] = None
1090
1269
 
1091
1270
  try:
1092
- metadata['optimizer'] = metadata_raw['model_architecture']['optimizer']
1271
+ if isinstance(metadata_raw.get('model_architecture'), dict):
1272
+ metadata['optimizer'] = metadata_raw['model_architecture'].get('optimizer')
1273
+ else:
1274
+ metadata['optimizer'] = None
1093
1275
  except:
1094
1276
  metadata['optimizer'] = None
1095
1277
 
1096
1278
  try:
1097
- metadata['model_config'] = metadata_raw['model_config']
1279
+ metadata['model_config'] = metadata_raw.get('model_config')
1098
1280
  except:
1099
1281
  metadata['model_config'] = None
1100
1282
 
1283
+ # Default handling for unknown frameworks
1284
+ else:
1285
+ if os.environ.get("AIMODELSHARE_DEBUG_METADATA"):
1286
+ print(f"[DEBUG] _get_leaderboard_data: Unknown framework '{metadata_raw.get('ml_framework')}', using defaults")
1287
+ metadata.setdefault('depth', 0)
1288
+ metadata.setdefault('num_params', 0)
1289
+ for i in layer_list:
1290
+ metadata.setdefault(i.lower()+'_layers', 0)
1291
+ for i in activation_list:
1292
+ metadata.setdefault(i.lower()+'_act', 0)
1293
+
1101
1294
  return metadata
1102
1295
 
1103
1296
 
1104
1297
 
1105
1298
  def _model_summary(meta_dict, from_onnx=False):
1106
1299
  '''Creates model summary table from model metadata dict.'''
1300
+ import io
1107
1301
 
1108
1302
  assert(isinstance(meta_dict, dict)), \
1109
1303
  "Please pass valid metadata dict."
@@ -1112,9 +1306,9 @@ def _model_summary(meta_dict, from_onnx=False):
1112
1306
  "Please make sure model architecture data is included."
1113
1307
 
1114
1308
  if from_onnx == True:
1115
- model_summary = pd.read_json(meta_dict['metadata_onnx']["model_summary"])
1309
+ model_summary = pd.read_json(io.StringIO(meta_dict['metadata_onnx']["model_summary"]))
1116
1310
  else:
1117
- model_summary = pd.read_json(meta_dict["model_summary"])
1311
+ model_summary = pd.read_json(io.StringIO(meta_dict["model_summary"]))
1118
1312
 
1119
1313
  return model_summary
1120
1314
 
@@ -1517,7 +1711,8 @@ def _get_sklearn_modules():
1517
1711
 
1518
1712
  sklearn_modules = ['ensemble', 'gaussian_process', 'isotonic',
1519
1713
  'linear_model', 'mixture', 'multiclass', 'naive_bayes',
1520
- 'neighbors', 'neural_network', 'svm', 'tree']
1714
+ 'neighbors', 'neural_network', 'svm', 'tree',
1715
+ 'discriminant_analysis', 'calibration']
1521
1716
 
1522
1717
  models_modules_dict = {}
1523
1718
 
@@ -1533,9 +1728,31 @@ def _get_sklearn_modules():
1533
1728
 
1534
1729
  def model_from_string(model_type):
1535
1730
  models_modules_dict = _get_sklearn_modules()
1536
- module = models_modules_dict[model_type]
1537
- model_class = getattr(importlib.import_module(module), model_type)
1538
- return model_class
1731
+ try:
1732
+ module = models_modules_dict[model_type]
1733
+ model_class = getattr(importlib.import_module(module), model_type)
1734
+ return model_class
1735
+ except KeyError:
1736
+ # Return a placeholder class if estimator not found
1737
+ import warnings
1738
+ warnings.warn(f"Model type '{model_type}' not found in sklearn modules. Returning placeholder class.")
1739
+
1740
+ # Create a minimal placeholder class that can be instantiated
1741
+ class PlaceholderModel:
1742
+ def __init__(self, **kwargs):
1743
+ self._model_type = model_type
1744
+ self._params = kwargs
1745
+
1746
+ def get_params(self, deep=True):
1747
+ return self._params
1748
+
1749
+ def __str__(self):
1750
+ return f"PlaceholderModel({self._model_type})"
1751
+
1752
+ def __repr__(self):
1753
+ return f"PlaceholderModel({self._model_type})"
1754
+
1755
+ return PlaceholderModel
1539
1756
 
1540
1757
  def _get_pyspark_modules():
1541
1758
  try:
aimodelshare/api.py CHANGED
@@ -57,7 +57,7 @@ class create_prediction_api_class():
57
57
  self.pyspark_support = pyspark_support
58
58
  self.region = os.environ.get("AWS_REGION")
59
59
  self.bucket_name = os.environ.get("BUCKET_NAME")
60
- self.python_runtime = 'python3.10'
60
+ self.python_runtime = os.environ.get("PYTHON_RUNTIME", "python3.12")
61
61
  #####
62
62
 
63
63
  self.model_type = self.model_type.lower()
@@ -87,23 +87,24 @@ class create_prediction_api_class():
87
87
  "video": 90,
88
88
  "custom": 90
89
89
  }
90
-
90
+ # UPDATED: New eval layer ARNs (python3.12)
91
91
  self.eval_layer_map = {
92
- "us-east-1": "arn:aws:lambda:us-east-1:517169013426:layer:eval_layer_test:13",
93
- "us-east-2": "arn:aws:lambda:us-east-2:517169013426:layer:eval_layer_test:7",
94
- "us-west-1": "arn:aws:lambda:us-west-1:517169013426:layer:eval_layer_test:2",
95
- "us-west-2": "arn:aws:lambda:us-west-2:517169013426:layer:eval_layer_test:2",
96
- "eu-west-1": "arn:aws:lambda:eu-west-1:517169013426:layer:eval_layer_test:2",
97
- "eu-west-2": "arn:aws:lambda:eu-west-2:517169013426:layer:eval_layer_test:2",
98
- "eu-west-3": "arn:aws:lambda:eu-west-3:517169013426:layer:eval_layer_test:2"
92
+ "us-east-1": "arn:aws:lambda:us-east-1:585666012274:layer:eval-layer-python3-12:4",
93
+ "us-east-2": "arn:aws:lambda:us-east-2:517169013426:layer:eval_layer_test:5",
94
+ "us-west-1": "arn:aws:lambda:us-west-1:517169013426:layer:eval_layer_test:1",
95
+ "us-west-2": "arn:aws:lambda:us-west-2:517169013426:layer:eval_layer_test:1",
96
+ "eu-west-1": "arn:aws:lambda:eu-west-1:585666012274:layer:eval-layer-python3-12:1",
97
+ "eu-west-2": "arn:aws:lambda:eu-west-2:517169013426:layer:eval_layer_test:1",
98
+ "eu-west-3": "arn:aws:lambda:eu-west-3:517169013426:layer:eval_layer_test:1"
99
99
  }
100
100
 
101
+ # UPDATED: New auth layer ARNs (python3.12)
101
102
  self.auth_layer_map = {
102
- "us-east-1": "arn:aws:lambda:us-east-1:517169013426:layer:aimsauth_layer:2",
103
+ "us-east-1": "arn:aws:lambda:us-east-1:585666012274:layer:aimsauth-layer-python3-12:6",
103
104
  "us-east-2": "arn:aws:lambda:us-east-2:517169013426:layer:aimsauth_layer:9",
104
105
  "us-west-1": "arn:aws:lambda:us-west-1:517169013426:layer:aimsauth_layer:1",
105
106
  "us-west-2": "arn:aws:lambda:us-west-2:517169013426:layer:aimsauth_layer:1",
106
- "eu-west-1": "arn:aws:lambda:eu-west-1:517169013426:layer:aimsauth_layer:1",
107
+ "eu-west-1": "arn:aws:lambda:eu-west-1:585666012274:layer:aimsauth-layer-python3-12:6",
107
108
  "eu-west-2": "arn:aws:lambda:eu-west-2:517169013426:layer:aimsauth_layer:1",
108
109
  "eu-west-3": "arn:aws:lambda:eu-west-3:517169013426:layer:aimsauth_layer:1"
109
110
  }
@@ -293,7 +294,7 @@ class create_prediction_api_class():
293
294
  )
294
295
 
295
296
  eval_code_source = {'S3Bucket': self.bucket_name, 'S3Key': self.unique_model_id + "/" + "archiveeval.zip"}
296
- eval_layers = [self.eval_layer]
297
+ eval_layers = [self.eval_layer, self.auth_layer]
297
298
  create_lambda_function(lambdaevalfxnname, self.python_runtime, role_arn, handler, eval_code_source, 90, 2048, eval_layers)
298
299
 
299
300
  auth_code_source = {'S3Bucket': self.bucket_name, 'S3Key': self.unique_model_id + "/" + "archiveauth.zip"}