morphml 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of morphml might be problematic. Click here for more details.
- morphml/__init__.py +14 -0
- morphml/api/__init__.py +26 -0
- morphml/api/app.py +326 -0
- morphml/api/auth.py +193 -0
- morphml/api/client.py +338 -0
- morphml/api/models.py +132 -0
- morphml/api/rate_limit.py +192 -0
- morphml/benchmarking/__init__.py +36 -0
- morphml/benchmarking/comparison.py +430 -0
- morphml/benchmarks/__init__.py +56 -0
- morphml/benchmarks/comparator.py +409 -0
- morphml/benchmarks/datasets.py +280 -0
- morphml/benchmarks/metrics.py +199 -0
- morphml/benchmarks/openml_suite.py +201 -0
- morphml/benchmarks/problems.py +289 -0
- morphml/benchmarks/suite.py +318 -0
- morphml/cli/__init__.py +5 -0
- morphml/cli/commands/experiment.py +329 -0
- morphml/cli/main.py +457 -0
- morphml/cli/quickstart.py +312 -0
- morphml/config.py +278 -0
- morphml/constraints/__init__.py +19 -0
- morphml/constraints/handler.py +205 -0
- morphml/constraints/predicates.py +285 -0
- morphml/core/__init__.py +3 -0
- morphml/core/crossover.py +449 -0
- morphml/core/dsl/README.md +359 -0
- morphml/core/dsl/__init__.py +72 -0
- morphml/core/dsl/ast_nodes.py +364 -0
- morphml/core/dsl/compiler.py +318 -0
- morphml/core/dsl/layers.py +368 -0
- morphml/core/dsl/lexer.py +336 -0
- morphml/core/dsl/parser.py +455 -0
- morphml/core/dsl/search_space.py +386 -0
- morphml/core/dsl/syntax.py +199 -0
- morphml/core/dsl/type_system.py +361 -0
- morphml/core/dsl/validator.py +386 -0
- morphml/core/graph/__init__.py +40 -0
- morphml/core/graph/edge.py +124 -0
- morphml/core/graph/graph.py +507 -0
- morphml/core/graph/mutations.py +409 -0
- morphml/core/graph/node.py +196 -0
- morphml/core/graph/serialization.py +361 -0
- morphml/core/graph/visualization.py +431 -0
- morphml/core/objectives/__init__.py +20 -0
- morphml/core/search/__init__.py +33 -0
- morphml/core/search/individual.py +252 -0
- morphml/core/search/parameters.py +453 -0
- morphml/core/search/population.py +375 -0
- morphml/core/search/search_engine.py +340 -0
- morphml/distributed/__init__.py +76 -0
- morphml/distributed/fault_tolerance.py +497 -0
- morphml/distributed/health_monitor.py +348 -0
- morphml/distributed/master.py +709 -0
- morphml/distributed/proto/README.md +224 -0
- morphml/distributed/proto/__init__.py +74 -0
- morphml/distributed/proto/worker.proto +170 -0
- morphml/distributed/proto/worker_pb2.py +79 -0
- morphml/distributed/proto/worker_pb2_grpc.py +423 -0
- morphml/distributed/resource_manager.py +416 -0
- morphml/distributed/scheduler.py +567 -0
- morphml/distributed/storage/__init__.py +33 -0
- morphml/distributed/storage/artifacts.py +381 -0
- morphml/distributed/storage/cache.py +366 -0
- morphml/distributed/storage/checkpointing.py +329 -0
- morphml/distributed/storage/database.py +459 -0
- morphml/distributed/worker.py +549 -0
- morphml/evaluation/__init__.py +5 -0
- morphml/evaluation/heuristic.py +237 -0
- morphml/exceptions.py +55 -0
- morphml/execution/__init__.py +5 -0
- morphml/execution/local_executor.py +350 -0
- morphml/integrations/__init__.py +28 -0
- morphml/integrations/jax_adapter.py +206 -0
- morphml/integrations/pytorch_adapter.py +530 -0
- morphml/integrations/sklearn_adapter.py +206 -0
- morphml/integrations/tensorflow_adapter.py +230 -0
- morphml/logging_config.py +93 -0
- morphml/meta_learning/__init__.py +66 -0
- morphml/meta_learning/architecture_similarity.py +277 -0
- morphml/meta_learning/experiment_database.py +240 -0
- morphml/meta_learning/knowledge_base/__init__.py +19 -0
- morphml/meta_learning/knowledge_base/embedder.py +179 -0
- morphml/meta_learning/knowledge_base/knowledge_base.py +313 -0
- morphml/meta_learning/knowledge_base/meta_features.py +265 -0
- morphml/meta_learning/knowledge_base/vector_store.py +271 -0
- morphml/meta_learning/predictors/__init__.py +27 -0
- morphml/meta_learning/predictors/ensemble.py +221 -0
- morphml/meta_learning/predictors/gnn_predictor.py +552 -0
- morphml/meta_learning/predictors/learning_curve.py +231 -0
- morphml/meta_learning/predictors/proxy_metrics.py +261 -0
- morphml/meta_learning/strategy_evolution/__init__.py +27 -0
- morphml/meta_learning/strategy_evolution/adaptive_optimizer.py +226 -0
- morphml/meta_learning/strategy_evolution/bandit.py +276 -0
- morphml/meta_learning/strategy_evolution/portfolio.py +230 -0
- morphml/meta_learning/transfer.py +581 -0
- morphml/meta_learning/warm_start.py +286 -0
- morphml/optimizers/__init__.py +74 -0
- morphml/optimizers/adaptive_operators.py +399 -0
- morphml/optimizers/bayesian/__init__.py +52 -0
- morphml/optimizers/bayesian/acquisition.py +387 -0
- morphml/optimizers/bayesian/base.py +319 -0
- morphml/optimizers/bayesian/gaussian_process.py +635 -0
- morphml/optimizers/bayesian/smac.py +534 -0
- morphml/optimizers/bayesian/tpe.py +411 -0
- morphml/optimizers/differential_evolution.py +220 -0
- morphml/optimizers/evolutionary/__init__.py +61 -0
- morphml/optimizers/evolutionary/cma_es.py +416 -0
- morphml/optimizers/evolutionary/differential_evolution.py +556 -0
- morphml/optimizers/evolutionary/encoding.py +426 -0
- morphml/optimizers/evolutionary/particle_swarm.py +449 -0
- morphml/optimizers/genetic_algorithm.py +486 -0
- morphml/optimizers/gradient_based/__init__.py +22 -0
- morphml/optimizers/gradient_based/darts.py +550 -0
- morphml/optimizers/gradient_based/enas.py +585 -0
- morphml/optimizers/gradient_based/operations.py +474 -0
- morphml/optimizers/gradient_based/utils.py +601 -0
- morphml/optimizers/hill_climbing.py +169 -0
- morphml/optimizers/multi_objective/__init__.py +56 -0
- morphml/optimizers/multi_objective/indicators.py +504 -0
- morphml/optimizers/multi_objective/nsga2.py +647 -0
- morphml/optimizers/multi_objective/visualization.py +427 -0
- morphml/optimizers/nsga2.py +308 -0
- morphml/optimizers/random_search.py +172 -0
- morphml/optimizers/simulated_annealing.py +181 -0
- morphml/plugins/__init__.py +35 -0
- morphml/plugins/custom_evaluator_example.py +81 -0
- morphml/plugins/custom_optimizer_example.py +63 -0
- morphml/plugins/plugin_system.py +454 -0
- morphml/reports/__init__.py +30 -0
- morphml/reports/generator.py +362 -0
- morphml/tracking/__init__.py +7 -0
- morphml/tracking/experiment.py +309 -0
- morphml/tracking/logger.py +301 -0
- morphml/tracking/reporter.py +357 -0
- morphml/utils/__init__.py +6 -0
- morphml/utils/checkpoint.py +189 -0
- morphml/utils/comparison.py +390 -0
- morphml/utils/export.py +407 -0
- morphml/utils/progress.py +392 -0
- morphml/utils/validation.py +392 -0
- morphml/version.py +7 -0
- morphml/visualization/__init__.py +50 -0
- morphml/visualization/analytics.py +423 -0
- morphml/visualization/architecture_diagrams.py +353 -0
- morphml/visualization/architecture_plot.py +223 -0
- morphml/visualization/convergence_plot.py +174 -0
- morphml/visualization/crossover_viz.py +386 -0
- morphml/visualization/graph_viz.py +338 -0
- morphml/visualization/pareto_plot.py +149 -0
- morphml/visualization/plotly_dashboards.py +422 -0
- morphml/visualization/population.py +309 -0
- morphml/visualization/progress.py +260 -0
- morphml-1.0.0.dist-info/METADATA +434 -0
- morphml-1.0.0.dist-info/RECORD +158 -0
- morphml-1.0.0.dist-info/WHEEL +4 -0
- morphml-1.0.0.dist-info/entry_points.txt +3 -0
- morphml-1.0.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,455 @@
|
|
|
1
|
+
"""Recursive descent parser for MorphML DSL.
|
|
2
|
+
|
|
3
|
+
Parses token stream into Abstract Syntax Tree (AST).
|
|
4
|
+
|
|
5
|
+
Author: Eshan Roy <eshanized@proton.me>
|
|
6
|
+
Organization: TONMOY INFRASTRUCTURE & VISION
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from typing import Any, Dict, List, Optional
|
|
10
|
+
|
|
11
|
+
from morphml.core.dsl.ast_nodes import (
|
|
12
|
+
ConstraintNode,
|
|
13
|
+
EvolutionNode,
|
|
14
|
+
ExperimentNode,
|
|
15
|
+
LayerNode,
|
|
16
|
+
ParamNode,
|
|
17
|
+
SearchSpaceNode,
|
|
18
|
+
)
|
|
19
|
+
from morphml.core.dsl.lexer import Token
|
|
20
|
+
from morphml.core.dsl.syntax import TokenType
|
|
21
|
+
from morphml.exceptions import DSLError
|
|
22
|
+
from morphml.logging_config import get_logger
|
|
23
|
+
|
|
24
|
+
logger = get_logger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class Parser:
|
|
28
|
+
"""
|
|
29
|
+
Recursive descent parser for MorphML DSL.
|
|
30
|
+
|
|
31
|
+
Converts a stream of tokens into an Abstract Syntax Tree (AST)
|
|
32
|
+
that can be compiled into executable search space definitions.
|
|
33
|
+
|
|
34
|
+
Grammar:
|
|
35
|
+
experiment := search_space_def [evolution_def] [constraint_list]
|
|
36
|
+
search_space_def := "SearchSpace" "(" param_list ")"
|
|
37
|
+
layer_def := "Layer" "." layer_type "(" param_list ")"
|
|
38
|
+
evolution_def := "Evolution" "(" param_list ")"
|
|
39
|
+
|
|
40
|
+
Example:
|
|
41
|
+
>>> tokens = lexer.tokenize()
|
|
42
|
+
>>> parser = Parser(tokens)
|
|
43
|
+
>>> ast = parser.parse()
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
def __init__(self, tokens: List[Token]):
|
|
47
|
+
"""
|
|
48
|
+
Initialize parser with token stream.
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
tokens: List of tokens from lexer
|
|
52
|
+
"""
|
|
53
|
+
self.tokens = tokens
|
|
54
|
+
self.position = 0
|
|
55
|
+
self.current_token = tokens[0] if tokens else None
|
|
56
|
+
|
|
57
|
+
def parse(self) -> ExperimentNode:
|
|
58
|
+
"""
|
|
59
|
+
Parse complete experiment definition.
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
ExperimentNode representing the entire experiment
|
|
63
|
+
|
|
64
|
+
Raises:
|
|
65
|
+
DSLError: If parsing fails
|
|
66
|
+
"""
|
|
67
|
+
logger.debug("Starting parse")
|
|
68
|
+
|
|
69
|
+
# Parse required search space
|
|
70
|
+
search_space = self.parse_search_space()
|
|
71
|
+
|
|
72
|
+
# Parse optional evolution config
|
|
73
|
+
evolution = None
|
|
74
|
+
if self._match(TokenType.EVOLUTION):
|
|
75
|
+
evolution = self.parse_evolution()
|
|
76
|
+
|
|
77
|
+
# Parse optional constraints
|
|
78
|
+
constraints = []
|
|
79
|
+
while self._match(TokenType.CONSTRAINT):
|
|
80
|
+
constraints.append(self.parse_constraint())
|
|
81
|
+
|
|
82
|
+
# Expect EOF
|
|
83
|
+
self._expect(TokenType.EOF)
|
|
84
|
+
|
|
85
|
+
logger.debug(f"Parse complete: {len(search_space.layers)} layers")
|
|
86
|
+
|
|
87
|
+
return ExperimentNode(
|
|
88
|
+
search_space=search_space, evolution=evolution, constraints=constraints
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
def parse_search_space(self) -> SearchSpaceNode:
|
|
92
|
+
"""
|
|
93
|
+
Parse search space definition.
|
|
94
|
+
|
|
95
|
+
Grammar: "SearchSpace" "(" param_list ")"
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
SearchSpaceNode
|
|
99
|
+
"""
|
|
100
|
+
self._expect(TokenType.SEARCHSPACE)
|
|
101
|
+
self._expect(TokenType.LPAREN)
|
|
102
|
+
|
|
103
|
+
layers: List[LayerNode] = []
|
|
104
|
+
global_params: Dict[str, ParamNode] = {}
|
|
105
|
+
name: Optional[str] = None
|
|
106
|
+
|
|
107
|
+
# Parse keyword arguments
|
|
108
|
+
while not self._match(TokenType.RPAREN):
|
|
109
|
+
param_name_token = self._expect(TokenType.IDENTIFIER)
|
|
110
|
+
param_name = param_name_token.value
|
|
111
|
+
self._expect(TokenType.ASSIGN)
|
|
112
|
+
|
|
113
|
+
if param_name == "layers":
|
|
114
|
+
layers = self._parse_layer_list()
|
|
115
|
+
elif param_name == "name":
|
|
116
|
+
name_token = self._expect(TokenType.STRING)
|
|
117
|
+
name = name_token.value
|
|
118
|
+
else:
|
|
119
|
+
# Global parameter
|
|
120
|
+
param_value = self._parse_value_expr()
|
|
121
|
+
global_params[param_name] = ParamNode(
|
|
122
|
+
name=param_name,
|
|
123
|
+
values=param_value if isinstance(param_value, list) else [param_value],
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
# Optional comma
|
|
127
|
+
if self._match(TokenType.COMMA):
|
|
128
|
+
self._advance()
|
|
129
|
+
|
|
130
|
+
self._expect(TokenType.RPAREN)
|
|
131
|
+
|
|
132
|
+
return SearchSpaceNode(layers=layers, global_params=global_params, name=name)
|
|
133
|
+
|
|
134
|
+
def _parse_layer_list(self) -> List[LayerNode]:
|
|
135
|
+
"""
|
|
136
|
+
Parse list of layers.
|
|
137
|
+
|
|
138
|
+
Grammar: "[" layer_def ("," layer_def)* "]"
|
|
139
|
+
|
|
140
|
+
Returns:
|
|
141
|
+
List of LayerNode
|
|
142
|
+
"""
|
|
143
|
+
self._expect(TokenType.LBRACKET)
|
|
144
|
+
layers = []
|
|
145
|
+
|
|
146
|
+
while not self._match(TokenType.RBRACKET):
|
|
147
|
+
layers.append(self.parse_layer())
|
|
148
|
+
|
|
149
|
+
# Optional comma
|
|
150
|
+
if self._match(TokenType.COMMA):
|
|
151
|
+
self._advance()
|
|
152
|
+
elif not self._match(TokenType.RBRACKET):
|
|
153
|
+
self._error("Expected ',' or ']' in layer list")
|
|
154
|
+
|
|
155
|
+
self._expect(TokenType.RBRACKET)
|
|
156
|
+
return layers
|
|
157
|
+
|
|
158
|
+
def parse_layer(self) -> LayerNode:
|
|
159
|
+
"""
|
|
160
|
+
Parse layer definition.
|
|
161
|
+
|
|
162
|
+
Grammar: "Layer" "." layer_type "(" param_list ")"
|
|
163
|
+
|
|
164
|
+
Returns:
|
|
165
|
+
LayerNode
|
|
166
|
+
"""
|
|
167
|
+
self._expect(TokenType.LAYER)
|
|
168
|
+
self._expect(TokenType.DOT)
|
|
169
|
+
|
|
170
|
+
layer_type_token = self._expect(TokenType.IDENTIFIER)
|
|
171
|
+
layer_type = layer_type_token.value
|
|
172
|
+
|
|
173
|
+
self._expect(TokenType.LPAREN)
|
|
174
|
+
|
|
175
|
+
# Parse parameters
|
|
176
|
+
params = self._parse_param_list()
|
|
177
|
+
|
|
178
|
+
self._expect(TokenType.RPAREN)
|
|
179
|
+
|
|
180
|
+
return LayerNode(layer_type=layer_type, params=params)
|
|
181
|
+
|
|
182
|
+
def _parse_param_list(self) -> Dict[str, ParamNode]:
|
|
183
|
+
"""
|
|
184
|
+
Parse parameter list.
|
|
185
|
+
|
|
186
|
+
Grammar: param ("," param)*
|
|
187
|
+
param := IDENTIFIER "=" value_expr
|
|
188
|
+
|
|
189
|
+
Returns:
|
|
190
|
+
Dictionary mapping parameter names to ParamNode
|
|
191
|
+
"""
|
|
192
|
+
params: Dict[str, ParamNode] = {}
|
|
193
|
+
|
|
194
|
+
while self._match(TokenType.IDENTIFIER):
|
|
195
|
+
name_token = self._advance()
|
|
196
|
+
name = name_token.value
|
|
197
|
+
|
|
198
|
+
self._expect(TokenType.ASSIGN)
|
|
199
|
+
|
|
200
|
+
value_expr = self._parse_value_expr()
|
|
201
|
+
|
|
202
|
+
# Create ParamNode
|
|
203
|
+
if isinstance(value_expr, list):
|
|
204
|
+
params[name] = ParamNode(name=name, values=value_expr)
|
|
205
|
+
else:
|
|
206
|
+
params[name] = ParamNode(name=name, values=[value_expr])
|
|
207
|
+
|
|
208
|
+
# Optional comma
|
|
209
|
+
if self._match(TokenType.COMMA):
|
|
210
|
+
self._advance()
|
|
211
|
+
else:
|
|
212
|
+
break
|
|
213
|
+
|
|
214
|
+
return params
|
|
215
|
+
|
|
216
|
+
def _parse_value_expr(self) -> Any:
|
|
217
|
+
"""
|
|
218
|
+
Parse value expression (single value or list).
|
|
219
|
+
|
|
220
|
+
Grammar: value | "[" value ("," value)* "]"
|
|
221
|
+
|
|
222
|
+
Returns:
|
|
223
|
+
Single value or list of values
|
|
224
|
+
"""
|
|
225
|
+
if self._match(TokenType.LBRACKET):
|
|
226
|
+
return self._parse_value_list()
|
|
227
|
+
else:
|
|
228
|
+
return self._parse_value()
|
|
229
|
+
|
|
230
|
+
def _parse_value_list(self) -> List[Any]:
|
|
231
|
+
"""
|
|
232
|
+
Parse list of values.
|
|
233
|
+
|
|
234
|
+
Grammar: "[" value ("," value)* "]"
|
|
235
|
+
|
|
236
|
+
Returns:
|
|
237
|
+
List of values
|
|
238
|
+
"""
|
|
239
|
+
self._expect(TokenType.LBRACKET)
|
|
240
|
+
values = []
|
|
241
|
+
|
|
242
|
+
while not self._match(TokenType.RBRACKET):
|
|
243
|
+
values.append(self._parse_value())
|
|
244
|
+
|
|
245
|
+
# Optional comma
|
|
246
|
+
if self._match(TokenType.COMMA):
|
|
247
|
+
self._advance()
|
|
248
|
+
elif not self._match(TokenType.RBRACKET):
|
|
249
|
+
self._error("Expected ',' or ']' in value list")
|
|
250
|
+
|
|
251
|
+
self._expect(TokenType.RBRACKET)
|
|
252
|
+
return values
|
|
253
|
+
|
|
254
|
+
def _parse_value(self) -> Any:
|
|
255
|
+
"""
|
|
256
|
+
Parse a single value.
|
|
257
|
+
|
|
258
|
+
Grammar: NUMBER | STRING | BOOLEAN | IDENTIFIER
|
|
259
|
+
|
|
260
|
+
Returns:
|
|
261
|
+
Value (int, float, str, bool)
|
|
262
|
+
"""
|
|
263
|
+
if self._match(TokenType.NUMBER):
|
|
264
|
+
token = self._advance()
|
|
265
|
+
return token.value
|
|
266
|
+
elif self._match(TokenType.STRING):
|
|
267
|
+
token = self._advance()
|
|
268
|
+
return token.value
|
|
269
|
+
elif self._match(TokenType.BOOLEAN):
|
|
270
|
+
token = self._advance()
|
|
271
|
+
return token.value
|
|
272
|
+
elif self._match(TokenType.IDENTIFIER):
|
|
273
|
+
token = self._advance()
|
|
274
|
+
return token.value
|
|
275
|
+
else:
|
|
276
|
+
self._error(
|
|
277
|
+
f"Expected value (number, string, boolean, or identifier), "
|
|
278
|
+
f"got {self.current_token.type.name}"
|
|
279
|
+
)
|
|
280
|
+
|
|
281
|
+
def parse_evolution(self) -> EvolutionNode:
|
|
282
|
+
"""
|
|
283
|
+
Parse evolution configuration.
|
|
284
|
+
|
|
285
|
+
Grammar: "Evolution" "(" param_list ")"
|
|
286
|
+
|
|
287
|
+
Returns:
|
|
288
|
+
EvolutionNode
|
|
289
|
+
"""
|
|
290
|
+
self._expect(TokenType.EVOLUTION)
|
|
291
|
+
self._expect(TokenType.LPAREN)
|
|
292
|
+
|
|
293
|
+
params: Dict[str, Any] = {}
|
|
294
|
+
strategy: Optional[str] = None
|
|
295
|
+
|
|
296
|
+
# Parse parameters
|
|
297
|
+
while self._match(TokenType.IDENTIFIER):
|
|
298
|
+
name_token = self._advance()
|
|
299
|
+
name = name_token.value
|
|
300
|
+
|
|
301
|
+
self._expect(TokenType.ASSIGN)
|
|
302
|
+
|
|
303
|
+
value = self._parse_value_expr()
|
|
304
|
+
|
|
305
|
+
if name == "strategy":
|
|
306
|
+
strategy = value if isinstance(value, str) else value[0]
|
|
307
|
+
else:
|
|
308
|
+
params[name] = value
|
|
309
|
+
|
|
310
|
+
# Optional comma
|
|
311
|
+
if self._match(TokenType.COMMA):
|
|
312
|
+
self._advance()
|
|
313
|
+
|
|
314
|
+
self._expect(TokenType.RPAREN)
|
|
315
|
+
|
|
316
|
+
if not strategy:
|
|
317
|
+
self._error("Evolution must specify 'strategy' parameter")
|
|
318
|
+
|
|
319
|
+
return EvolutionNode(strategy=strategy, params=params)
|
|
320
|
+
|
|
321
|
+
def parse_constraint(self) -> ConstraintNode:
|
|
322
|
+
"""
|
|
323
|
+
Parse constraint definition.
|
|
324
|
+
|
|
325
|
+
Grammar: "Constraint" "(" param_list ")"
|
|
326
|
+
|
|
327
|
+
Returns:
|
|
328
|
+
ConstraintNode
|
|
329
|
+
"""
|
|
330
|
+
self._expect(TokenType.CONSTRAINT)
|
|
331
|
+
self._expect(TokenType.LPAREN)
|
|
332
|
+
|
|
333
|
+
params: Dict[str, Any] = {}
|
|
334
|
+
constraint_type: Optional[str] = None
|
|
335
|
+
|
|
336
|
+
# Parse parameters
|
|
337
|
+
while self._match(TokenType.IDENTIFIER):
|
|
338
|
+
name_token = self._advance()
|
|
339
|
+
name = name_token.value
|
|
340
|
+
|
|
341
|
+
self._expect(TokenType.ASSIGN)
|
|
342
|
+
|
|
343
|
+
value = self._parse_value_expr()
|
|
344
|
+
|
|
345
|
+
if name == "type":
|
|
346
|
+
constraint_type = value if isinstance(value, str) else value[0]
|
|
347
|
+
else:
|
|
348
|
+
params[name] = value
|
|
349
|
+
|
|
350
|
+
# Optional comma
|
|
351
|
+
if self._match(TokenType.COMMA):
|
|
352
|
+
self._advance()
|
|
353
|
+
|
|
354
|
+
self._expect(TokenType.RPAREN)
|
|
355
|
+
|
|
356
|
+
if not constraint_type:
|
|
357
|
+
self._error("Constraint must specify 'type' parameter")
|
|
358
|
+
|
|
359
|
+
return ConstraintNode(constraint_type=constraint_type, params=params)
|
|
360
|
+
|
|
361
|
+
# Helper methods for token manipulation
|
|
362
|
+
|
|
363
|
+
def _current_token(self) -> Token:
|
|
364
|
+
"""Get current token."""
|
|
365
|
+
return self.current_token
|
|
366
|
+
|
|
367
|
+
def _peek_token(self, offset: int = 1) -> Optional[Token]:
|
|
368
|
+
"""Look ahead at token."""
|
|
369
|
+
pos = self.position + offset
|
|
370
|
+
if pos < len(self.tokens):
|
|
371
|
+
return self.tokens[pos]
|
|
372
|
+
return None
|
|
373
|
+
|
|
374
|
+
def _advance(self) -> Token:
|
|
375
|
+
"""Move to next token and return current."""
|
|
376
|
+
token = self.current_token
|
|
377
|
+
self.position += 1
|
|
378
|
+
if self.position < len(self.tokens):
|
|
379
|
+
self.current_token = self.tokens[self.position]
|
|
380
|
+
else:
|
|
381
|
+
self.current_token = None
|
|
382
|
+
return token
|
|
383
|
+
|
|
384
|
+
def _match(self, *token_types: TokenType) -> bool:
|
|
385
|
+
"""Check if current token matches any of the given types."""
|
|
386
|
+
if self.current_token is None:
|
|
387
|
+
return False
|
|
388
|
+
return self.current_token.type in token_types
|
|
389
|
+
|
|
390
|
+
def _expect(self, token_type: TokenType) -> Token:
|
|
391
|
+
"""
|
|
392
|
+
Expect specific token type, raise error if not found.
|
|
393
|
+
|
|
394
|
+
Args:
|
|
395
|
+
token_type: Expected token type
|
|
396
|
+
|
|
397
|
+
Returns:
|
|
398
|
+
Current token
|
|
399
|
+
|
|
400
|
+
Raises:
|
|
401
|
+
DSLError: If current token doesn't match expected type
|
|
402
|
+
"""
|
|
403
|
+
if not self._match(token_type):
|
|
404
|
+
self._error(
|
|
405
|
+
f"Expected {token_type.name}, got {self.current_token.type.name if self.current_token else 'EOF'}"
|
|
406
|
+
)
|
|
407
|
+
|
|
408
|
+
return self._advance()
|
|
409
|
+
|
|
410
|
+
def _error(self, message: str) -> None:
|
|
411
|
+
"""
|
|
412
|
+
Raise DSLError with current position information.
|
|
413
|
+
|
|
414
|
+
Args:
|
|
415
|
+
message: Error message
|
|
416
|
+
|
|
417
|
+
Raises:
|
|
418
|
+
DSLError: With formatted error message
|
|
419
|
+
"""
|
|
420
|
+
if self.current_token:
|
|
421
|
+
raise DSLError(
|
|
422
|
+
f"Parse error: {message}\n"
|
|
423
|
+
f" at line {self.current_token.line}, column {self.current_token.column}\n"
|
|
424
|
+
f" near token: {self.current_token}",
|
|
425
|
+
line=self.current_token.line,
|
|
426
|
+
column=self.current_token.column,
|
|
427
|
+
)
|
|
428
|
+
else:
|
|
429
|
+
raise DSLError(f"Parse error: {message}\n at end of input")
|
|
430
|
+
|
|
431
|
+
|
|
432
|
+
def parse_dsl(source: str) -> ExperimentNode:
|
|
433
|
+
"""
|
|
434
|
+
Convenience function to lex and parse DSL source.
|
|
435
|
+
|
|
436
|
+
Args:
|
|
437
|
+
source: DSL source code
|
|
438
|
+
|
|
439
|
+
Returns:
|
|
440
|
+
ExperimentNode AST
|
|
441
|
+
|
|
442
|
+
Example:
|
|
443
|
+
>>> source = '''
|
|
444
|
+
... SearchSpace(
|
|
445
|
+
... layers=[Layer.conv2d(filters=[32, 64])]
|
|
446
|
+
... )
|
|
447
|
+
... '''
|
|
448
|
+
>>> ast = parse_dsl(source)
|
|
449
|
+
"""
|
|
450
|
+
from morphml.core.dsl.lexer import Lexer
|
|
451
|
+
|
|
452
|
+
lexer = Lexer(source)
|
|
453
|
+
tokens = lexer.tokenize()
|
|
454
|
+
parser = Parser(tokens)
|
|
455
|
+
return parser.parse()
|