morphml 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of morphml might be problematic. Click here for more details.

Files changed (158) hide show
  1. morphml/__init__.py +14 -0
  2. morphml/api/__init__.py +26 -0
  3. morphml/api/app.py +326 -0
  4. morphml/api/auth.py +193 -0
  5. morphml/api/client.py +338 -0
  6. morphml/api/models.py +132 -0
  7. morphml/api/rate_limit.py +192 -0
  8. morphml/benchmarking/__init__.py +36 -0
  9. morphml/benchmarking/comparison.py +430 -0
  10. morphml/benchmarks/__init__.py +56 -0
  11. morphml/benchmarks/comparator.py +409 -0
  12. morphml/benchmarks/datasets.py +280 -0
  13. morphml/benchmarks/metrics.py +199 -0
  14. morphml/benchmarks/openml_suite.py +201 -0
  15. morphml/benchmarks/problems.py +289 -0
  16. morphml/benchmarks/suite.py +318 -0
  17. morphml/cli/__init__.py +5 -0
  18. morphml/cli/commands/experiment.py +329 -0
  19. morphml/cli/main.py +457 -0
  20. morphml/cli/quickstart.py +312 -0
  21. morphml/config.py +278 -0
  22. morphml/constraints/__init__.py +19 -0
  23. morphml/constraints/handler.py +205 -0
  24. morphml/constraints/predicates.py +285 -0
  25. morphml/core/__init__.py +3 -0
  26. morphml/core/crossover.py +449 -0
  27. morphml/core/dsl/README.md +359 -0
  28. morphml/core/dsl/__init__.py +72 -0
  29. morphml/core/dsl/ast_nodes.py +364 -0
  30. morphml/core/dsl/compiler.py +318 -0
  31. morphml/core/dsl/layers.py +368 -0
  32. morphml/core/dsl/lexer.py +336 -0
  33. morphml/core/dsl/parser.py +455 -0
  34. morphml/core/dsl/search_space.py +386 -0
  35. morphml/core/dsl/syntax.py +199 -0
  36. morphml/core/dsl/type_system.py +361 -0
  37. morphml/core/dsl/validator.py +386 -0
  38. morphml/core/graph/__init__.py +40 -0
  39. morphml/core/graph/edge.py +124 -0
  40. morphml/core/graph/graph.py +507 -0
  41. morphml/core/graph/mutations.py +409 -0
  42. morphml/core/graph/node.py +196 -0
  43. morphml/core/graph/serialization.py +361 -0
  44. morphml/core/graph/visualization.py +431 -0
  45. morphml/core/objectives/__init__.py +20 -0
  46. morphml/core/search/__init__.py +33 -0
  47. morphml/core/search/individual.py +252 -0
  48. morphml/core/search/parameters.py +453 -0
  49. morphml/core/search/population.py +375 -0
  50. morphml/core/search/search_engine.py +340 -0
  51. morphml/distributed/__init__.py +76 -0
  52. morphml/distributed/fault_tolerance.py +497 -0
  53. morphml/distributed/health_monitor.py +348 -0
  54. morphml/distributed/master.py +709 -0
  55. morphml/distributed/proto/README.md +224 -0
  56. morphml/distributed/proto/__init__.py +74 -0
  57. morphml/distributed/proto/worker.proto +170 -0
  58. morphml/distributed/proto/worker_pb2.py +79 -0
  59. morphml/distributed/proto/worker_pb2_grpc.py +423 -0
  60. morphml/distributed/resource_manager.py +416 -0
  61. morphml/distributed/scheduler.py +567 -0
  62. morphml/distributed/storage/__init__.py +33 -0
  63. morphml/distributed/storage/artifacts.py +381 -0
  64. morphml/distributed/storage/cache.py +366 -0
  65. morphml/distributed/storage/checkpointing.py +329 -0
  66. morphml/distributed/storage/database.py +459 -0
  67. morphml/distributed/worker.py +549 -0
  68. morphml/evaluation/__init__.py +5 -0
  69. morphml/evaluation/heuristic.py +237 -0
  70. morphml/exceptions.py +55 -0
  71. morphml/execution/__init__.py +5 -0
  72. morphml/execution/local_executor.py +350 -0
  73. morphml/integrations/__init__.py +28 -0
  74. morphml/integrations/jax_adapter.py +206 -0
  75. morphml/integrations/pytorch_adapter.py +530 -0
  76. morphml/integrations/sklearn_adapter.py +206 -0
  77. morphml/integrations/tensorflow_adapter.py +230 -0
  78. morphml/logging_config.py +93 -0
  79. morphml/meta_learning/__init__.py +66 -0
  80. morphml/meta_learning/architecture_similarity.py +277 -0
  81. morphml/meta_learning/experiment_database.py +240 -0
  82. morphml/meta_learning/knowledge_base/__init__.py +19 -0
  83. morphml/meta_learning/knowledge_base/embedder.py +179 -0
  84. morphml/meta_learning/knowledge_base/knowledge_base.py +313 -0
  85. morphml/meta_learning/knowledge_base/meta_features.py +265 -0
  86. morphml/meta_learning/knowledge_base/vector_store.py +271 -0
  87. morphml/meta_learning/predictors/__init__.py +27 -0
  88. morphml/meta_learning/predictors/ensemble.py +221 -0
  89. morphml/meta_learning/predictors/gnn_predictor.py +552 -0
  90. morphml/meta_learning/predictors/learning_curve.py +231 -0
  91. morphml/meta_learning/predictors/proxy_metrics.py +261 -0
  92. morphml/meta_learning/strategy_evolution/__init__.py +27 -0
  93. morphml/meta_learning/strategy_evolution/adaptive_optimizer.py +226 -0
  94. morphml/meta_learning/strategy_evolution/bandit.py +276 -0
  95. morphml/meta_learning/strategy_evolution/portfolio.py +230 -0
  96. morphml/meta_learning/transfer.py +581 -0
  97. morphml/meta_learning/warm_start.py +286 -0
  98. morphml/optimizers/__init__.py +74 -0
  99. morphml/optimizers/adaptive_operators.py +399 -0
  100. morphml/optimizers/bayesian/__init__.py +52 -0
  101. morphml/optimizers/bayesian/acquisition.py +387 -0
  102. morphml/optimizers/bayesian/base.py +319 -0
  103. morphml/optimizers/bayesian/gaussian_process.py +635 -0
  104. morphml/optimizers/bayesian/smac.py +534 -0
  105. morphml/optimizers/bayesian/tpe.py +411 -0
  106. morphml/optimizers/differential_evolution.py +220 -0
  107. morphml/optimizers/evolutionary/__init__.py +61 -0
  108. morphml/optimizers/evolutionary/cma_es.py +416 -0
  109. morphml/optimizers/evolutionary/differential_evolution.py +556 -0
  110. morphml/optimizers/evolutionary/encoding.py +426 -0
  111. morphml/optimizers/evolutionary/particle_swarm.py +449 -0
  112. morphml/optimizers/genetic_algorithm.py +486 -0
  113. morphml/optimizers/gradient_based/__init__.py +22 -0
  114. morphml/optimizers/gradient_based/darts.py +550 -0
  115. morphml/optimizers/gradient_based/enas.py +585 -0
  116. morphml/optimizers/gradient_based/operations.py +474 -0
  117. morphml/optimizers/gradient_based/utils.py +601 -0
  118. morphml/optimizers/hill_climbing.py +169 -0
  119. morphml/optimizers/multi_objective/__init__.py +56 -0
  120. morphml/optimizers/multi_objective/indicators.py +504 -0
  121. morphml/optimizers/multi_objective/nsga2.py +647 -0
  122. morphml/optimizers/multi_objective/visualization.py +427 -0
  123. morphml/optimizers/nsga2.py +308 -0
  124. morphml/optimizers/random_search.py +172 -0
  125. morphml/optimizers/simulated_annealing.py +181 -0
  126. morphml/plugins/__init__.py +35 -0
  127. morphml/plugins/custom_evaluator_example.py +81 -0
  128. morphml/plugins/custom_optimizer_example.py +63 -0
  129. morphml/plugins/plugin_system.py +454 -0
  130. morphml/reports/__init__.py +30 -0
  131. morphml/reports/generator.py +362 -0
  132. morphml/tracking/__init__.py +7 -0
  133. morphml/tracking/experiment.py +309 -0
  134. morphml/tracking/logger.py +301 -0
  135. morphml/tracking/reporter.py +357 -0
  136. morphml/utils/__init__.py +6 -0
  137. morphml/utils/checkpoint.py +189 -0
  138. morphml/utils/comparison.py +390 -0
  139. morphml/utils/export.py +407 -0
  140. morphml/utils/progress.py +392 -0
  141. morphml/utils/validation.py +392 -0
  142. morphml/version.py +7 -0
  143. morphml/visualization/__init__.py +50 -0
  144. morphml/visualization/analytics.py +423 -0
  145. morphml/visualization/architecture_diagrams.py +353 -0
  146. morphml/visualization/architecture_plot.py +223 -0
  147. morphml/visualization/convergence_plot.py +174 -0
  148. morphml/visualization/crossover_viz.py +386 -0
  149. morphml/visualization/graph_viz.py +338 -0
  150. morphml/visualization/pareto_plot.py +149 -0
  151. morphml/visualization/plotly_dashboards.py +422 -0
  152. morphml/visualization/population.py +309 -0
  153. morphml/visualization/progress.py +260 -0
  154. morphml-1.0.0.dist-info/METADATA +434 -0
  155. morphml-1.0.0.dist-info/RECORD +158 -0
  156. morphml-1.0.0.dist-info/WHEEL +4 -0
  157. morphml-1.0.0.dist-info/entry_points.txt +3 -0
  158. morphml-1.0.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,206 @@
1
+ """Scikit-learn adapter for MorphML.
2
+
3
+ Converts ModelGraph to scikit-learn Pipeline for classical ML.
4
+
5
+ Example:
6
+ >>> from morphml.integrations import SklearnAdapter
7
+ >>> adapter = SklearnAdapter()
8
+ >>> pipeline = adapter.build_pipeline(graph)
9
+ >>> pipeline.fit(X_train, y_train)
10
+ >>> predictions = pipeline.predict(X_test)
11
+ """
12
+
13
+ from typing import Any, Dict, Optional
14
+
15
+ try:
16
+ from sklearn.decomposition import PCA
17
+ from sklearn.ensemble import GradientBoostingClassifier, RandomForestClassifier
18
+ from sklearn.linear_model import LogisticRegression
19
+ from sklearn.neighbors import KNeighborsClassifier
20
+ from sklearn.pipeline import Pipeline
21
+ from sklearn.preprocessing import StandardScaler
22
+ from sklearn.svm import SVC
23
+
24
+ SKLEARN_AVAILABLE = True
25
+ except ImportError:
26
+ SKLEARN_AVAILABLE = False
27
+ Pipeline = None
28
+
29
+ from morphml.core.graph import GraphNode, ModelGraph
30
+ from morphml.logging_config import get_logger
31
+
32
+ logger = get_logger(__name__)
33
+
34
+
35
+ class SklearnAdapter:
36
+ """
37
+ Convert ModelGraph to scikit-learn Pipeline.
38
+
39
+ Supports classical ML algorithms and preprocessing steps.
40
+
41
+ Example:
42
+ >>> adapter = SklearnAdapter()
43
+ >>> pipeline = adapter.build_pipeline(graph)
44
+ >>> pipeline.fit(X_train, y_train)
45
+ >>> score = pipeline.score(X_test, y_test)
46
+ """
47
+
48
+ def __init__(self):
49
+ """Initialize Scikit-learn adapter."""
50
+ if not SKLEARN_AVAILABLE:
51
+ raise ImportError(
52
+ "Scikit-learn is required for SklearnAdapter. "
53
+ "Install with: pip install scikit-learn"
54
+ )
55
+ logger.info("Initialized SklearnAdapter")
56
+
57
+ def build_pipeline(
58
+ self, graph: ModelGraph, config: Optional[Dict[str, Any]] = None
59
+ ) -> Pipeline:
60
+ """
61
+ Build scikit-learn pipeline from graph.
62
+
63
+ Args:
64
+ graph: ModelGraph to convert
65
+ config: Optional configuration
66
+
67
+ Returns:
68
+ sklearn Pipeline instance
69
+
70
+ Example:
71
+ >>> pipeline = adapter.build_pipeline(graph)
72
+ """
73
+ steps = []
74
+
75
+ for node in graph.topological_sort():
76
+ step = self._create_step(node)
77
+ if step is not None:
78
+ steps.append(step)
79
+
80
+ if not steps:
81
+ # Default pipeline
82
+ steps = [("scaler", StandardScaler()), ("classifier", RandomForestClassifier())]
83
+
84
+ pipeline = Pipeline(steps)
85
+
86
+ logger.info(f"Created sklearn pipeline with {len(steps)} steps")
87
+
88
+ return pipeline
89
+
90
+ def _create_step(self, node: GraphNode) -> Optional[tuple]:
91
+ """
92
+ Create pipeline step from node.
93
+
94
+ Args:
95
+ node: GraphNode to convert
96
+
97
+ Returns:
98
+ Tuple of (name, estimator) or None
99
+ """
100
+ op = node.operation
101
+ params = node.params
102
+
103
+ if op == "input":
104
+ return None
105
+
106
+ elif op == "scaler":
107
+ return (f"scaler_{node.id}", StandardScaler())
108
+
109
+ elif op == "pca":
110
+ n_components = params.get("n_components", 50)
111
+ return (f"pca_{node.id}", PCA(n_components=n_components))
112
+
113
+ elif op == "random_forest":
114
+ return (
115
+ f"rf_{node.id}",
116
+ RandomForestClassifier(
117
+ n_estimators=params.get("n_estimators", 100),
118
+ max_depth=params.get("max_depth", None),
119
+ min_samples_split=params.get("min_samples_split", 2),
120
+ random_state=params.get("random_state", 42),
121
+ ),
122
+ )
123
+
124
+ elif op == "gradient_boosting":
125
+ return (
126
+ f"gb_{node.id}",
127
+ GradientBoostingClassifier(
128
+ n_estimators=params.get("n_estimators", 100),
129
+ learning_rate=params.get("learning_rate", 0.1),
130
+ max_depth=params.get("max_depth", 3),
131
+ random_state=params.get("random_state", 42),
132
+ ),
133
+ )
134
+
135
+ elif op == "logistic_regression":
136
+ return (
137
+ f"lr_{node.id}",
138
+ LogisticRegression(
139
+ C=params.get("C", 1.0),
140
+ max_iter=params.get("max_iter", 100),
141
+ random_state=params.get("random_state", 42),
142
+ ),
143
+ )
144
+
145
+ elif op == "svm":
146
+ return (
147
+ f"svm_{node.id}",
148
+ SVC(
149
+ C=params.get("C", 1.0),
150
+ kernel=params.get("kernel", "rbf"),
151
+ gamma=params.get("gamma", "scale"),
152
+ random_state=params.get("random_state", 42),
153
+ ),
154
+ )
155
+
156
+ elif op == "knn":
157
+ return (
158
+ f"knn_{node.id}",
159
+ KNeighborsClassifier(
160
+ n_neighbors=params.get("n_neighbors", 5),
161
+ weights=params.get("weights", "uniform"),
162
+ ),
163
+ )
164
+
165
+ else:
166
+ logger.warning(f"Unknown operation for sklearn: {op}")
167
+ return None
168
+
169
+ def create_search_space_for_sklearn(self, graph: ModelGraph) -> Dict[str, Any]:
170
+ """
171
+ Create hyperparameter search space for sklearn pipeline.
172
+
173
+ Args:
174
+ graph: ModelGraph
175
+
176
+ Returns:
177
+ Dictionary of hyperparameter distributions
178
+
179
+ Example:
180
+ >>> search_space = adapter.create_search_space_for_sklearn(graph)
181
+ >>> # Use with GridSearchCV or RandomizedSearchCV
182
+ """
183
+ param_grid = {}
184
+
185
+ for node in graph.topological_sort():
186
+ op = node.operation
187
+
188
+ if op == "random_forest":
189
+ param_grid[f"rf_{node.id}__n_estimators"] = [50, 100, 200]
190
+ param_grid[f"rf_{node.id}__max_depth"] = [None, 10, 20, 30]
191
+
192
+ elif op == "gradient_boosting":
193
+ param_grid[f"gb_{node.id}__n_estimators"] = [50, 100, 200]
194
+ param_grid[f"gb_{node.id}__learning_rate"] = [0.01, 0.1, 0.2]
195
+
196
+ elif op == "logistic_regression":
197
+ param_grid[f"lr_{node.id}__C"] = [0.1, 1.0, 10.0]
198
+
199
+ elif op == "svm":
200
+ param_grid[f"svm_{node.id}__C"] = [0.1, 1.0, 10.0]
201
+ param_grid[f"svm_{node.id}__kernel"] = ["linear", "rbf"]
202
+
203
+ elif op == "pca":
204
+ param_grid[f"pca_{node.id}__n_components"] = [10, 20, 50, 100]
205
+
206
+ return param_grid
@@ -0,0 +1,230 @@
1
+ """TensorFlow/Keras adapter for MorphML.
2
+
3
+ Converts ModelGraph to Keras Model using Functional API.
4
+
5
+ Example:
6
+ >>> from morphml.integrations import TensorFlowAdapter
7
+ >>> adapter = TensorFlowAdapter()
8
+ >>> model = adapter.build_model(graph)
9
+ >>> model.compile(optimizer='adam', loss='categorical_crossentropy')
10
+ >>> model.fit(x_train, y_train, validation_data=(x_val, y_val))
11
+ """
12
+
13
+ from typing import Any, Dict, Optional, Tuple
14
+
15
+ try:
16
+ import tensorflow as tf
17
+ from tensorflow import keras
18
+ from tensorflow.keras import layers
19
+
20
+ TF_AVAILABLE = True
21
+ except ImportError:
22
+ TF_AVAILABLE = False
23
+ tf = None
24
+ keras = None
25
+ layers = None
26
+
27
+ from morphml.core.graph import GraphNode, ModelGraph
28
+ from morphml.logging_config import get_logger
29
+
30
+ logger = get_logger(__name__)
31
+
32
+
33
+ class TensorFlowAdapter:
34
+ """
35
+ Convert ModelGraph to TensorFlow/Keras Model.
36
+
37
+ Uses Keras Functional API to build models from graph structure.
38
+
39
+ Example:
40
+ >>> adapter = TensorFlowAdapter()
41
+ >>> model = adapter.build_model(graph, input_shape=(32, 32, 3))
42
+ >>> model.summary()
43
+ """
44
+
45
+ def __init__(self):
46
+ """Initialize TensorFlow adapter."""
47
+ if not TF_AVAILABLE:
48
+ raise ImportError(
49
+ "TensorFlow is required for TensorFlowAdapter. "
50
+ "Install with: pip install tensorflow"
51
+ )
52
+ logger.info("Initialized TensorFlowAdapter")
53
+
54
+ def build_model(
55
+ self, graph: ModelGraph, input_shape: Optional[Tuple[int, ...]] = None
56
+ ) -> "keras.Model":
57
+ """
58
+ Build Keras model from graph.
59
+
60
+ Args:
61
+ graph: ModelGraph to convert
62
+ input_shape: Input shape (H, W, C) for Keras
63
+
64
+ Returns:
65
+ keras.Model instance
66
+
67
+ Example:
68
+ >>> model = adapter.build_model(graph, input_shape=(32, 32, 3))
69
+ """
70
+ if input_shape is None:
71
+ input_shape = (32, 32, 3)
72
+
73
+ # Create input
74
+ inputs = keras.Input(shape=input_shape)
75
+
76
+ # Build layers following graph topology
77
+ layer_outputs = {}
78
+
79
+ for node in graph.topological_sort():
80
+ if node.operation == "input":
81
+ layer_outputs[node.id] = inputs
82
+ continue
83
+
84
+ # Create layer
85
+ layer = self._create_layer(node)
86
+
87
+ # Get input
88
+ if not node.predecessors:
89
+ x = inputs
90
+ else:
91
+ # Get predecessor outputs
92
+ pred_outputs = [layer_outputs[p.id] for p in node.predecessors]
93
+
94
+ if len(pred_outputs) == 1:
95
+ x = pred_outputs[0]
96
+ else:
97
+ # Concatenate along channel dimension
98
+ x = layers.Concatenate(axis=-1)(pred_outputs)
99
+
100
+ # Apply layer
101
+ if layer is not None:
102
+ layer_outputs[node.id] = layer(x)
103
+ else:
104
+ layer_outputs[node.id] = x
105
+
106
+ # Get output
107
+ output_nodes = [n for n in graph.nodes.values() if not n.successors]
108
+ if output_nodes:
109
+ outputs = layer_outputs[output_nodes[0].id]
110
+ else:
111
+ # Use last node
112
+ outputs = layer_outputs[list(layer_outputs.keys())[-1]]
113
+
114
+ # Create model
115
+ model = keras.Model(inputs=inputs, outputs=outputs)
116
+
117
+ logger.info(f"Created Keras model with {len(model.layers)} layers")
118
+
119
+ return model
120
+
121
+ def _create_layer(self, node: GraphNode):
122
+ """
123
+ Create Keras layer from node.
124
+
125
+ Args:
126
+ node: GraphNode to convert
127
+
128
+ Returns:
129
+ Keras layer or None
130
+ """
131
+ op = node.operation
132
+ params = node.params
133
+
134
+ if op == "input":
135
+ return None
136
+
137
+ elif op == "conv2d":
138
+ return layers.Conv2D(
139
+ filters=params.get("filters", 64),
140
+ kernel_size=params.get("kernel_size", 3),
141
+ strides=params.get("stride", 1),
142
+ padding=params.get("padding", "same"),
143
+ activation=None,
144
+ )
145
+
146
+ elif op == "maxpool":
147
+ return layers.MaxPooling2D(
148
+ pool_size=params.get("pool_size", 2), strides=params.get("stride", None)
149
+ )
150
+
151
+ elif op == "avgpool":
152
+ return layers.AveragePooling2D(
153
+ pool_size=params.get("pool_size", 2), strides=params.get("stride", None)
154
+ )
155
+
156
+ elif op == "dense":
157
+ return layers.Dense(units=params.get("units", 10), activation=None)
158
+
159
+ elif op == "relu":
160
+ return layers.ReLU()
161
+
162
+ elif op == "sigmoid":
163
+ return layers.Activation("sigmoid")
164
+
165
+ elif op == "tanh":
166
+ return layers.Activation("tanh")
167
+
168
+ elif op == "softmax":
169
+ return layers.Softmax()
170
+
171
+ elif op == "batchnorm":
172
+ return layers.BatchNormalization()
173
+
174
+ elif op == "dropout":
175
+ return layers.Dropout(rate=params.get("rate", 0.5))
176
+
177
+ elif op == "flatten":
178
+ return layers.Flatten()
179
+
180
+ else:
181
+ logger.warning(f"Unknown operation: {op}, using Lambda identity")
182
+ return layers.Lambda(lambda x: x)
183
+
184
+ def compile_model(
185
+ self, model: "keras.Model", config: Optional[Dict[str, Any]] = None
186
+ ) -> "keras.Model":
187
+ """
188
+ Compile Keras model with optimizer and loss.
189
+
190
+ Args:
191
+ model: Keras model
192
+ config: Compilation configuration
193
+
194
+ Returns:
195
+ Compiled model
196
+
197
+ Example:
198
+ >>> model = adapter.compile_model(model, {
199
+ ... 'optimizer': 'adam',
200
+ ... 'learning_rate': 1e-3,
201
+ ... 'loss': 'categorical_crossentropy'
202
+ ... })
203
+ """
204
+ if config is None:
205
+ config = {}
206
+
207
+ # Optimizer
208
+ optimizer_name = config.get("optimizer", "adam")
209
+ lr = config.get("learning_rate", 1e-3)
210
+
211
+ if optimizer_name == "adam":
212
+ optimizer = keras.optimizers.Adam(learning_rate=lr)
213
+ elif optimizer_name == "sgd":
214
+ optimizer = keras.optimizers.SGD(learning_rate=lr, momentum=config.get("momentum", 0.9))
215
+ elif optimizer_name == "rmsprop":
216
+ optimizer = keras.optimizers.RMSprop(learning_rate=lr)
217
+ else:
218
+ optimizer = optimizer_name
219
+
220
+ # Loss
221
+ loss = config.get("loss", "categorical_crossentropy")
222
+
223
+ # Metrics
224
+ metrics = config.get("metrics", ["accuracy"])
225
+
226
+ model.compile(optimizer=optimizer, loss=loss, metrics=metrics)
227
+
228
+ logger.info(f"Compiled model with {optimizer_name} optimizer")
229
+
230
+ return model
@@ -0,0 +1,93 @@
1
+ """Logging configuration for MorphML."""
2
+
3
+ import logging
4
+ from pathlib import Path
5
+ from typing import Optional
6
+
7
+ from rich.console import Console
8
+ from rich.logging import RichHandler
9
+
10
+
11
+ def setup_logging(
12
+ level: str = "INFO",
13
+ log_file: Optional[str] = None,
14
+ console: bool = True,
15
+ format_string: Optional[str] = None,
16
+ ) -> logging.Logger:
17
+ """
18
+ Set up logging for MorphML.
19
+
20
+ Args:
21
+ level: Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
22
+ log_file: Optional file path for logging
23
+ console: Enable console logging with Rich
24
+ format_string: Custom format string (if not using Rich)
25
+
26
+ Returns:
27
+ Configured logger instance
28
+
29
+ Example:
30
+ >>> logger = setup_logging(level="DEBUG", log_file="morphml.log")
31
+ >>> logger.info("Starting experiment")
32
+ """
33
+ # Get root logger for morphml
34
+ logger = logging.getLogger("morphml")
35
+ logger.setLevel(getattr(logging, level.upper()))
36
+
37
+ # Remove existing handlers
38
+ logger.handlers.clear()
39
+
40
+ # Console handler with Rich
41
+ if console:
42
+ console_handler = RichHandler(
43
+ rich_tracebacks=True,
44
+ tracebacks_show_locals=True,
45
+ console=Console(stderr=True),
46
+ )
47
+ console_handler.setLevel(getattr(logging, level.upper()))
48
+ logger.addHandler(console_handler)
49
+
50
+ # File handler
51
+ if log_file:
52
+ # Create log directory if it doesn't exist
53
+ log_path = Path(log_file)
54
+ log_path.parent.mkdir(parents=True, exist_ok=True)
55
+
56
+ file_handler = logging.FileHandler(log_file)
57
+ file_handler.setLevel(logging.DEBUG) # Always DEBUG to file
58
+
59
+ # Use custom format for file
60
+ if format_string is None:
61
+ format_string = (
62
+ "%(asctime)s - %(name)s - %(levelname)s - " "%(filename)s:%(lineno)d - %(message)s"
63
+ )
64
+
65
+ formatter = logging.Formatter(format_string)
66
+ file_handler.setFormatter(formatter)
67
+ logger.addHandler(file_handler)
68
+
69
+ # Prevent propagation to root logger
70
+ logger.propagate = False
71
+
72
+ return logger
73
+
74
+
75
+ def get_logger(name: str) -> logging.Logger:
76
+ """
77
+ Get a logger instance for a specific module.
78
+
79
+ Args:
80
+ name: Logger name (usually __name__)
81
+
82
+ Returns:
83
+ Logger instance
84
+
85
+ Example:
86
+ >>> logger = get_logger(__name__)
87
+ >>> logger.info("Module initialized")
88
+ """
89
+ return logging.getLogger(f"morphml.{name}")
90
+
91
+
92
+ # Create default logger
93
+ logger = logging.getLogger("morphml")
@@ -0,0 +1,66 @@
1
+ """Meta-learning and transfer learning for MorphML.
2
+
3
+ Enables intelligent search through:
4
+ - Warm-starting from past experiments
5
+ - Performance prediction
6
+ - Transfer learning across tasks
7
+ - Knowledge base management
8
+ - Strategy evolution
9
+
10
+ Author: Eshan Roy <eshanized@proton.me>
11
+ Organization: TONMOY INFRASTRUCTURE & VISION
12
+ """
13
+
14
+ from morphml.meta_learning.architecture_similarity import ArchitectureSimilarity
15
+ from morphml.meta_learning.experiment_database import ExperimentDatabase, TaskMetadata
16
+ from morphml.meta_learning.knowledge_base import (
17
+ ArchitectureEmbedder,
18
+ KnowledgeBase,
19
+ MetaFeatureExtractor,
20
+ VectorStore,
21
+ )
22
+ from morphml.meta_learning.predictors import (
23
+ EnsemblePredictor,
24
+ LearningCurvePredictor,
25
+ ProxyMetricPredictor,
26
+ )
27
+ from morphml.meta_learning.strategy_evolution import (
28
+ AdaptiveOptimizer,
29
+ PortfolioOptimizer,
30
+ ThompsonSamplingSelector,
31
+ UCBSelector,
32
+ )
33
+ from morphml.meta_learning.transfer import (
34
+ ArchitectureTransfer,
35
+ FineTuningStrategy,
36
+ MultiTaskNAS,
37
+ )
38
+ from morphml.meta_learning.warm_start import WarmStarter
39
+
40
+ __all__ = [
41
+ # Warm-starting
42
+ "WarmStarter",
43
+ "TaskMetadata",
44
+ "ExperimentDatabase",
45
+ "ArchitectureSimilarity",
46
+ # Performance prediction
47
+ "ProxyMetricPredictor",
48
+ "LearningCurvePredictor",
49
+ "EnsemblePredictor",
50
+ # Knowledge base
51
+ "KnowledgeBase",
52
+ "ArchitectureEmbedder",
53
+ "MetaFeatureExtractor",
54
+ "VectorStore",
55
+ # Strategy evolution
56
+ "UCBSelector",
57
+ "ThompsonSamplingSelector",
58
+ "AdaptiveOptimizer",
59
+ "PortfolioOptimizer",
60
+ # Transfer learning
61
+ "ArchitectureTransfer",
62
+ "FineTuningStrategy",
63
+ "MultiTaskNAS",
64
+ ]
65
+
66
+ __version__ = "0.1.0"