nlp2mcp 0.7.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. nlp2mcp-0.7.0/PKG-INFO +670 -0
  2. nlp2mcp-0.7.0/README.md +630 -0
  3. nlp2mcp-0.7.0/nlp2mcp.egg-info/PKG-INFO +670 -0
  4. nlp2mcp-0.7.0/nlp2mcp.egg-info/SOURCES.txt +69 -0
  5. nlp2mcp-0.7.0/nlp2mcp.egg-info/dependency_links.txt +1 -0
  6. nlp2mcp-0.7.0/nlp2mcp.egg-info/entry_points.txt +2 -0
  7. nlp2mcp-0.7.0/nlp2mcp.egg-info/requires.txt +19 -0
  8. nlp2mcp-0.7.0/nlp2mcp.egg-info/top_level.txt +1 -0
  9. nlp2mcp-0.7.0/pyproject.toml +195 -0
  10. nlp2mcp-0.7.0/setup.cfg +4 -0
  11. nlp2mcp-0.7.0/setup.py +7 -0
  12. nlp2mcp-0.7.0/src/__init__.py +3 -0
  13. nlp2mcp-0.7.0/src/ad/__init__.py +49 -0
  14. nlp2mcp-0.7.0/src/ad/ad_core.py +451 -0
  15. nlp2mcp-0.7.0/src/ad/api.py +201 -0
  16. nlp2mcp-0.7.0/src/ad/constraint_jacobian.py +620 -0
  17. nlp2mcp-0.7.0/src/ad/derivative_rules.py +1237 -0
  18. nlp2mcp-0.7.0/src/ad/evaluator.py +310 -0
  19. nlp2mcp-0.7.0/src/ad/gradient.py +295 -0
  20. nlp2mcp-0.7.0/src/ad/index_mapping.py +433 -0
  21. nlp2mcp-0.7.0/src/ad/jacobian.py +278 -0
  22. nlp2mcp-0.7.0/src/ad/minmax_flattener.py +613 -0
  23. nlp2mcp-0.7.0/src/ad/sparsity.py +229 -0
  24. nlp2mcp-0.7.0/src/ad/term_collection.py +493 -0
  25. nlp2mcp-0.7.0/src/ad/validation.py +329 -0
  26. nlp2mcp-0.7.0/src/cli.py +400 -0
  27. nlp2mcp-0.7.0/src/config.py +40 -0
  28. nlp2mcp-0.7.0/src/config_loader.py +66 -0
  29. nlp2mcp-0.7.0/src/diagnostics/__init__.py +9 -0
  30. nlp2mcp-0.7.0/src/diagnostics/convexity/__init__.py +20 -0
  31. nlp2mcp-0.7.0/src/diagnostics/convexity/pattern_matcher.py +225 -0
  32. nlp2mcp-0.7.0/src/diagnostics/convexity/patterns.py +407 -0
  33. nlp2mcp-0.7.0/src/diagnostics/matrix_market.py +243 -0
  34. nlp2mcp-0.7.0/src/diagnostics/statistics.py +161 -0
  35. nlp2mcp-0.7.0/src/emit/__init__.py +26 -0
  36. nlp2mcp-0.7.0/src/emit/emit_gams.py +184 -0
  37. nlp2mcp-0.7.0/src/emit/equations.py +154 -0
  38. nlp2mcp-0.7.0/src/emit/expr_to_gams.py +251 -0
  39. nlp2mcp-0.7.0/src/emit/model.py +190 -0
  40. nlp2mcp-0.7.0/src/emit/original_symbols.py +150 -0
  41. nlp2mcp-0.7.0/src/emit/templates.py +268 -0
  42. nlp2mcp-0.7.0/src/gams/__init__.py +0 -0
  43. nlp2mcp-0.7.0/src/gams/gams_grammar.lark +246 -0
  44. nlp2mcp-0.7.0/src/ir/__init__.py +19 -0
  45. nlp2mcp-0.7.0/src/ir/ast.py +127 -0
  46. nlp2mcp-0.7.0/src/ir/condition_eval.py +213 -0
  47. nlp2mcp-0.7.0/src/ir/minmax_detection.py +393 -0
  48. nlp2mcp-0.7.0/src/ir/model_ir.py +71 -0
  49. nlp2mcp-0.7.0/src/ir/normalize.py +238 -0
  50. nlp2mcp-0.7.0/src/ir/parser.py +1476 -0
  51. nlp2mcp-0.7.0/src/ir/preprocessor.py +482 -0
  52. nlp2mcp-0.7.0/src/ir/symbols.py +96 -0
  53. nlp2mcp-0.7.0/src/kkt/__init__.py +34 -0
  54. nlp2mcp-0.7.0/src/kkt/assemble.py +299 -0
  55. nlp2mcp-0.7.0/src/kkt/complementarity.py +187 -0
  56. nlp2mcp-0.7.0/src/kkt/kkt_system.py +133 -0
  57. nlp2mcp-0.7.0/src/kkt/naming.py +162 -0
  58. nlp2mcp-0.7.0/src/kkt/objective.py +147 -0
  59. nlp2mcp-0.7.0/src/kkt/partition.py +185 -0
  60. nlp2mcp-0.7.0/src/kkt/reformulation.py +902 -0
  61. nlp2mcp-0.7.0/src/kkt/scaling.py +193 -0
  62. nlp2mcp-0.7.0/src/kkt/stationarity.py +488 -0
  63. nlp2mcp-0.7.0/src/logging_config.py +128 -0
  64. nlp2mcp-0.7.0/src/utils/__init__.py +24 -0
  65. nlp2mcp-0.7.0/src/utils/error_codes.py +143 -0
  66. nlp2mcp-0.7.0/src/utils/error_formatter.py +330 -0
  67. nlp2mcp-0.7.0/src/utils/errors.py +214 -0
  68. nlp2mcp-0.7.0/src/validation/__init__.py +26 -0
  69. nlp2mcp-0.7.0/src/validation/gams_check.py +212 -0
  70. nlp2mcp-0.7.0/src/validation/model.py +242 -0
  71. nlp2mcp-0.7.0/src/validation/numerical.py +247 -0
nlp2mcp-0.7.0/PKG-INFO ADDED
@@ -0,0 +1,670 @@
1
+ Metadata-Version: 2.4
2
+ Name: nlp2mcp
3
+ Version: 0.7.0
4
+ Summary: Convert GAMS NLP models to MCP via KKT conditions
5
+ Author-email: Jeffrey Horn <jeffreydhorn@gmail.com>
6
+ License-Expression: MIT
7
+ Keywords: optimization,nlp,mcp,gams,kkt
8
+ Classifier: Development Status :: 4 - Beta
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: Intended Audience :: Science/Research
11
+ Classifier: Topic :: Scientific/Engineering :: Mathematics
12
+ Classifier: Topic :: Software Development :: Code Generators
13
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3 :: Only
16
+ Classifier: Programming Language :: Python :: 3.11
17
+ Classifier: Programming Language :: Python :: 3.12
18
+ Classifier: Programming Language :: Python :: 3.13
19
+ Classifier: Operating System :: OS Independent
20
+ Classifier: Environment :: Console
21
+ Classifier: Natural Language :: English
22
+ Classifier: Typing :: Typed
23
+ Requires-Python: >=3.11
24
+ Description-Content-Type: text/markdown
25
+ Requires-Dist: lark>=1.1.9
26
+ Requires-Dist: numpy>=1.24.0
27
+ Requires-Dist: click>=8.0.0
28
+ Requires-Dist: tomli>=2.0.0; python_version < "3.11"
29
+ Provides-Extra: dev
30
+ Requires-Dist: pytest>=7.4.4; extra == "dev"
31
+ Requires-Dist: pytest-cov>=4.1.0; extra == "dev"
32
+ Requires-Dist: pytest-xdist>=3.8.0; extra == "dev"
33
+ Requires-Dist: black>=23.0.0; extra == "dev"
34
+ Requires-Dist: ruff>=0.1.0; extra == "dev"
35
+ Requires-Dist: mypy>=1.0.0; extra == "dev"
36
+ Provides-Extra: docs
37
+ Requires-Dist: sphinx>=7.0.0; extra == "docs"
38
+ Requires-Dist: sphinx-rtd-theme>=1.3.0; extra == "docs"
39
+ Requires-Dist: sphinx-autodoc-typehints>=1.25.0; extra == "docs"
40
+
41
+ # NLP2MCP: Convert GAMS NLP to MCP via KKT Conditions
42
+
43
+ ![CI](https://github.com/jeffreyhorn/nlp2mcp/workflows/CI/badge.svg)
44
+ ![Lint](https://github.com/jeffreyhorn/nlp2mcp/workflows/Lint/badge.svg)
45
+ [![PyPI version](https://img.shields.io/pypi/v/nlp2mcp.svg)](https://pypi.org/project/nlp2mcp/)
46
+ [![Python Support](https://img.shields.io/pypi/pyversions/nlp2mcp.svg)](https://pypi.org/project/nlp2mcp/)
47
+
48
+ A Python tool that transforms Nonlinear Programming (NLP) models written in GAMS into equivalent Mixed Complementarity Problems (MCP) by generating the Karush-Kuhn-Tucker (KKT) conditions.
49
+
50
+ ## Overview
51
+
52
+ This project automates the process of converting a GAMS NLP model into its KKT-based MCP formulation, which is useful for:
53
+
54
+ - **Mathematical research**: Analyzing stationarity conditions of nonlinear programs
55
+ - **Solver development**: Testing MCP solvers on problems derived from NLPs
56
+ - **Educational purposes**: Understanding the relationship between NLP and MCP formulations
57
+ - **Advanced modeling**: Working with equilibrium problems and complementarity conditions
58
+
59
+ ## Background
60
+
61
+ The standard recipe for NLP → MCP transformation is:
62
+
63
+ 1. Start with a nonlinear program (NLP)
64
+ 2. Write down its KKT (Karush-Kuhn-Tucker) conditions
65
+ 3. Encode those KKT conditions as a Mixed Complementarity Problem (MCP):
66
+ - Equations for stationarity and equality constraints
67
+ - Complementarity pairs for inequalities and bounds
68
+
69
+ For more details, see [docs/concepts/IDEA.md](docs/concepts/IDEA.md) and [docs/concepts/NLP2MCP_HIGH_LEVEL.md](docs/concepts/NLP2MCP_HIGH_LEVEL.md).
70
+
71
+ ## Features
72
+
73
+ Epic 1 (Sprints 1-5) is complete, delivering core NLP to MCP transformation capabilities. For detailed sprint summaries, see [docs/planning/EPIC_1/SUMMARY.md](docs/planning/EPIC_1/SUMMARY.md).
74
+
75
+ **Sprint 6 Progress** (Epic 2 - Sprint 6: Convexity Heuristics, Bug Fixes, GAMSLib, UX):
76
+ - [x] Day 0: Pre-Sprint Research & Setup
77
+ - [x] Day 1: Nested Min/Max Research
78
+ - [x] Day 2: Nested Min/Max Implementation
79
+ - [x] Day 3: Convexity Heuristics - Core Patterns
80
+ - [x] Day 4: Convexity Heuristics - CLI Integration
81
+ - [x] Day 5: GAMSLib Integration - Model Ingestion
82
+ - [x] Day 6: GAMSLib Integration - Conversion Dashboard
83
+ - [x] Day 7: UX Improvements - Error Message Integration
84
+ - [x] Day 8: UX Improvements - Documentation & Polish
85
+ - [x] Day 9: Testing & Quality Assurance
86
+ - [x] Day 10: Release Preparation & Sprint Review
87
+
88
+ For the detailed Sprint 6 plan, see [docs/planning/EPIC_2/SPRINT_6/PLAN.md](docs/planning/EPIC_2/SPRINT_6/PLAN.md).
89
+
90
+ **Sprint 7 Progress** (Epic 2 - Sprint 7: Parser Enhancements & GAMSLib Expansion):
91
+ - [x] Day 0: Pre-Sprint Setup & Kickoff
92
+ - [x] Day 1: Preprocessor Directives (Part 1)
93
+ - [x] Day 2: Preprocessor Directives (Part 2) + Set Range Syntax (Part 1)
94
+ - [x] Day 3: Set Range Syntax (Part 2)
95
+ - [x] Day 4: Parser Integration & Testing + Quick Wins
96
+ - [x] Day 5: GAMSLib Retest & Checkpoint 1 (Fixtures created, parse rate 20% ModelIR / 50% grammar)
97
+ - [x] Day 6: Test Performance (Part 1) - pytest-xdist (1277 tests pass in parallel, ~2min runtime)
98
+ - [x] Day 7: Test Performance (Part 2) & Checkpoint 2 (Fast: 29s, Full: 111s, 8 slow tests marked)
99
+ - [x] Day 8: Convexity UX + Multi-Dim Fixtures
100
+ - [x] Day 9: CI Automation + Statement Fixtures & Checkpoint 3 (Regression CI + 34 total fixtures)
101
+ - [x] Day 10: Sprint Review, Release & Checkpoint 4
102
+
103
+ For the detailed Sprint 7 plan, see [docs/planning/EPIC_2/SPRINT_7/PLAN.md](docs/planning/EPIC_2/SPRINT_7/PLAN.md).
104
+
105
+ ## Installation
106
+
107
+ ### Requirements
108
+
109
+ - Python 3.11 or higher
110
+ - pip 21.3 or higher
111
+
112
+ ### Quick Start
113
+
114
+ Install from PyPI:
115
+
116
+ ```bash
117
+ pip install nlp2mcp
118
+ ```
119
+
120
+ Verify installation:
121
+
122
+ ```bash
123
+ nlp2mcp --help
124
+ ```
125
+
126
+ ### From Source (Development)
127
+
128
+ For contributing or development:
129
+
130
+ ```bash
131
+ # Clone the repository
132
+ git clone https://github.com/jeffreyhorn/nlp2mcp.git
133
+ cd nlp2mcp
134
+
135
+ # Create a virtual environment
136
+ python -m venv .venv
137
+ source .venv/bin/activate # On Windows: .venv\Scripts\activate
138
+
139
+ # Install with development dependencies
140
+ make install-dev
141
+
142
+ # Or manually:
143
+ pip install -e .
144
+ pip install -r requirements.txt
145
+ ```
146
+
147
+ ### Beta/Pre-release Versions
148
+
149
+ To test beta releases:
150
+
151
+ ```bash
152
+ # Install specific version
153
+ pip install nlp2mcp==0.5.0b0
154
+
155
+ # Or install from TestPyPI
156
+ pip install --index-url https://test.pypi.org/simple/ \
157
+ --extra-index-url https://pypi.org/simple/ \
158
+ nlp2mcp
159
+
160
+ # Or install directly from GitHub
161
+ pip install git+https://github.com/jeffreyhorn/nlp2mcp.git
162
+ ```
163
+
164
+ ## Usage
165
+
166
+ ### Command Line Interface
167
+
168
+ Note: the package exposes a console script `nlp2mcp` (defined in `pyproject.toml` as
169
+ `[project.scripts] nlp2mcp = "src.cli:main"`). After installing the package (for example
170
+ with `pip install -e .` or `pip install nlp2mcp`), the `nlp2mcp` command will be available on your PATH
171
+ and will invoke `src.cli:main`.
172
+
173
+ If you prefer not to install the package, you can run the CLI directly from the repository with:
174
+
175
+ ```bash
176
+ python -m src.cli examples/simple_nlp.gms -o output_mcp.gms
177
+ ```
178
+
179
+
180
+ ```bash
181
+ # Convert NLP to MCP
182
+ nlp2mcp input.gms -o output_mcp.gms
183
+
184
+ # Print to stdout
185
+ nlp2mcp input.gms
186
+
187
+ # Verbose output (show pipeline stages)
188
+ nlp2mcp input.gms -o output.gms -v
189
+
190
+ # Very verbose (show detailed statistics)
191
+ nlp2mcp input.gms -o output.gms -vv
192
+
193
+ # Quiet mode (errors only)
194
+ nlp2mcp input.gms -o output.gms --quiet
195
+
196
+ # Show model statistics
197
+ nlp2mcp input.gms -o output.gms --stats
198
+
199
+ # Export Jacobian structure for analysis
200
+ nlp2mcp input.gms -o output.gms --dump-jacobian jacobian.mtx
201
+
202
+ # Apply Curtis-Reid scaling for ill-conditioned systems
203
+ nlp2mcp input.gms -o output.gms --scale auto
204
+
205
+ # Enable smooth abs() approximation
206
+ nlp2mcp input.gms -o output.gms --smooth-abs
207
+
208
+ # Customize model name
209
+ nlp2mcp input.gms -o output.gms --model-name my_mcp_model
210
+
211
+ # Disable explanatory comments
212
+ nlp2mcp input.gms -o output.gms --no-comments
213
+
214
+ # Show excluded duplicate bounds
215
+ nlp2mcp input.gms -o output.gms --show-excluded
216
+ ```
217
+
218
+ **CLI Options:**
219
+ - `-o, --output FILE`: Output file path (default: stdout)
220
+ - `-v, --verbose`: Increase verbosity (stackable: -v, -vv, -vvv)
221
+ - `-q, --quiet`: Suppress non-error output
222
+ - `--model-name NAME`: Custom GAMS model name (default: mcp_model)
223
+ - `--show-excluded / --no-show-excluded`: Show duplicate bounds excluded (default: no)
224
+ - `--no-comments`: Disable explanatory comments in output
225
+ - `--stats`: Print model statistics (equations, variables, nonzeros)
226
+ - `--dump-jacobian FILE`: Export Jacobian structure to Matrix Market format
227
+ - `--scale {none,auto,byvar}`: Apply scaling (default: none)
228
+ - `--simplification {none,basic,advanced}`: Expression simplification mode (default: advanced)
229
+ - `--smooth-abs`: Enable smooth abs() approximation via sqrt(x²+ε)
230
+ - `--smooth-abs-epsilon FLOAT`: Epsilon for abs smoothing (default: 1e-6)
231
+ - `--help`: Show help message
232
+
233
+ ### Expression Simplification
234
+
235
+ nlp2mcp automatically simplifies derivative expressions to produce more compact and efficient MCP formulations. The simplification mode can be controlled via the `--simplification` flag or configuration file.
236
+
237
+ #### Simplification Modes
238
+
239
+ **Advanced (default)** - `--simplification advanced`
240
+ - Applies all basic simplifications plus algebraic term collection
241
+
242
+ *Additive term collection:*
243
+ - **Constant collection**: `1 + x + 1 → x + 2`
244
+ - **Like-term collection**: `x + y + x + y → 2*x + 2*y`
245
+ - **Coefficient collection**: `2*x + 3*x → 5*x`
246
+ - **Term cancellation**: `x - x → 0`, `x + y - x → y`
247
+ - **Complex bases**: `x*y + 2*x*y → 3*x*y`
248
+
249
+ *Multiplicative term collection:*
250
+ - **Variable collection**: `x * x → x^2`, `x * x * x → x^3`
251
+ - **Power multiplication**: `x^2 * x^3 → x^5`
252
+ - **Mixed multiplication**: `x^2 * x → x^3`, `x * x^2 → x^3`
253
+
254
+ *Other algebraic rules:*
255
+ - **Multiplicative cancellation**: `2*x / 2 → x`, `2*x / (1+1) → x`
256
+ - **Power division**: `x^5 / x^2 → x^3`, `x / x^2 → 1/x`
257
+ - **Nested powers**: `(x^2)^3 → x^6`
258
+
259
+ Recommended for most use cases - produces cleanest output
260
+
261
+ **Basic** - `--simplification basic`
262
+ - Applies only fundamental simplification rules:
263
+ - Constant folding: `2 + 3 → 5`, `4 * 5 → 20`
264
+ - Zero elimination: `x + 0 → x`, `0 * x → 0`
265
+ - Identity elimination: `x * 1 → x`, `x / 1 → x`, `x^1 → x`
266
+ - Algebraic identities: `x - x → 0`, `x / x → 1`
267
+ - Use when you want minimal transformation of expressions
268
+
269
+ **None** - `--simplification none`
270
+ - No simplification applied
271
+ - Derivative expressions remain in raw differentiated form
272
+ - Useful for debugging or understanding the differentiation process
273
+ - May produce very large expressions
274
+
275
+ #### Examples
276
+
277
+ ```bash
278
+ # Default: advanced simplification
279
+ nlp2mcp model.gms -o output.gms
280
+
281
+ # Explicitly use advanced
282
+ nlp2mcp model.gms -o output.gms --simplification advanced
283
+
284
+ # Use basic simplification only
285
+ nlp2mcp model.gms -o output.gms --simplification basic
286
+
287
+ # Disable simplification
288
+ nlp2mcp model.gms -o output.gms --simplification none
289
+ ```
290
+
291
+ #### Configuration File
292
+
293
+ You can set the default simplification mode in `pyproject.toml`:
294
+
295
+ ```toml
296
+ [tool.nlp2mcp]
297
+ simplification = "advanced" # or "basic" or "none"
298
+ scale = "none"
299
+ smooth_abs = false
300
+ ```
301
+
302
+ #### When to Use Each Mode
303
+
304
+ - **Advanced** (default): Best for production use - produces cleanest, most readable output
305
+ - **Basic**: When you need predictable transformations without aggressive optimization
306
+ - **None**: For debugging, education, or when you need to see raw derivative expressions
307
+
308
+ ### Complete Example
309
+
310
+ **Input** (`examples/scalar_nlp.gms`):
311
+ ```gams
312
+ Variables x, obj;
313
+ Scalars a /2.0/;
314
+ Equations objective, stationarity;
315
+
316
+ objective.. obj =E= x;
317
+ stationarity.. x + a =E= 0;
318
+
319
+ Model mymodel /all/;
320
+ Solve mymodel using NLP minimizing obj;
321
+ ```
322
+
323
+ **Run nlp2mcp**:
324
+ ```bash
325
+ nlp2mcp examples/scalar_nlp.gms -o output_mcp.gms
326
+ ```
327
+
328
+ **Output** (`output_mcp.gms`):
329
+ ```gams
330
+ * Generated by nlp2mcp
331
+ * KKT System with stationarity, complementarity, and multipliers
332
+
333
+ Scalars
334
+ a /2.0/
335
+ ;
336
+
337
+ Variables
338
+ x
339
+ obj
340
+ nu_objective
341
+ nu_stationarity
342
+ ;
343
+
344
+ Equations
345
+ stat_x
346
+ objective
347
+ stationarity
348
+ ;
349
+
350
+ stat_x.. 1 + nu_stationarity =E= 0;
351
+ objective.. obj =E= x;
352
+ stationarity.. x + a =E= 0;
353
+
354
+ Model mcp_model /
355
+ stat_x.x,
356
+ objective.obj,
357
+ stationarity.nu_stationarity
358
+ /;
359
+
360
+ Solve mcp_model using MCP;
361
+ ```
362
+
363
+ ### Python API
364
+
365
+ After an editable install (`pip install -e .`) the package imports use the package name. Example usage:
366
+
367
+ ```python
368
+ from nlp2mcp.ir.parser import parse_model_file
369
+ from nlp2mcp.ir.normalize import normalize_model
370
+ from nlp2mcp.ad.gradient import compute_objective_gradient
371
+ from nlp2mcp.ad.constraint_jacobian import compute_constraint_jacobian
372
+ from nlp2mcp.kkt.assemble import assemble_kkt_system
373
+ from nlp2mcp.emit.emit_gams import emit_gams_mcp
374
+
375
+ # Full pipeline
376
+ model = parse_model_file("examples/simple_nlp.gms")
377
+ normalize_model(model)
378
+ gradient = compute_objective_gradient(model)
379
+ J_eq, J_ineq = compute_constraint_jacobian(model)
380
+ kkt = assemble_kkt_system(model, gradient, J_eq, J_ineq)
381
+ gams_code = emit_gams_mcp(kkt, model_name="mcp_model", add_comments=True)
382
+
383
+ print(gams_code)
384
+ ```
385
+
386
+ Note: if you prefer running from the repository without installing, either set `PYTHONPATH=.`, or run modules directly (for example `python -m src.cli ...`), but the recommended workflow for development is an editable install so imports use `nlp2mcp.*`.
387
+
388
+ ## Project Structure
389
+
390
+ ```
391
+ nlp2mcp/
392
+ ├── src/
393
+ │ ├── ad/ # Symbolic differentiation engine
394
+ │ │ ├── api.py # High-level API
395
+ │ │ ├── differentiate.py # Core differentiation rules
396
+ │ │ ├── simplify.py # Expression simplification
397
+ │ │ ├── evaluate.py # AST evaluation
398
+ │ │ ├── gradient.py # Gradient computation
399
+ │ │ ├── jacobian.py # Jacobian computation
400
+ │ │ ├── mapping.py # Index mapping utilities
401
+ │ │ └── validation.py # Finite-difference validation
402
+ │ ├── emit/ # Code generation for GAMS MCP (planned)
403
+ │ ├── gams/ # GAMS grammar and parsing utilities
404
+ │ ├── ir/ # Intermediate representation
405
+ │ │ ├── ast.py # Expression AST nodes
406
+ │ │ ├── model_ir.py # Model IR data structures
407
+ │ │ ├── normalize.py # Constraint normalization
408
+ │ │ ├── parser.py # GAMS parser
409
+ │ │ └── symbols.py # Symbol table definitions
410
+ │ ├── kkt/ # KKT system assembly (planned)
411
+ │ └── utils/ # Utility functions
412
+ ├── tests/
413
+ │ ├── ad/ # Differentiation tests
414
+ │ ├── gams/ # Parser tests
415
+ │ └── ir/ # IR and normalization tests
416
+ ├── examples/ # Example GAMS models
417
+ ├── docs/ # Additional documentation
418
+ │ ├── ad/ # Automatic differentiation docs
419
+ │ ├── architecture/ # System architecture
420
+ │ ├── emit/ # GAMS emission docs
421
+ │ ├── kkt/ # KKT assembly docs
422
+ │ └── planning/ # Sprint plans and retrospectives
423
+ ├── pyproject.toml # Project configuration
424
+ ├── Makefile # Development commands
425
+ └── README.md # This file
426
+ ```
427
+
428
+ ## Development
429
+
430
+ ### Available Make Commands
431
+
432
+ ```bash
433
+ make help # Show all available commands
434
+ make install # Install the package
435
+ make install-dev # Install with dev dependencies
436
+ make lint # Run linters (ruff, mypy)
437
+ make format # Format code (black, ruff)
438
+ make test # Run tests
439
+ make clean # Remove build artifacts
440
+ ```
441
+
442
+ ### Running Tests
443
+
444
+ The test suite is organized into four layers for fast feedback.
445
+
446
+ 📊 **[View Test Pyramid Visualization](docs/testing/TEST_PYRAMID.md)** - See test coverage breakdown by module and type.
447
+
448
+ ```bash
449
+ # Run fast unit tests only (~10 seconds)
450
+ ./scripts/test_fast.sh
451
+ # Or: pytest tests/unit/ -v
452
+
453
+ # Run unit + integration tests (~30 seconds)
454
+ ./scripts/test_integration.sh
455
+ # Or: pytest tests/unit/ tests/integration/ -v
456
+
457
+ # Run complete test suite (~60 seconds)
458
+ ./scripts/test_all.sh
459
+ # Or: pytest tests/ -v
460
+
461
+ # Run specific test category
462
+ pytest -m unit # Only unit tests
463
+ pytest -m integration # Only integration tests
464
+ pytest -m e2e # Only end-to-end tests
465
+ pytest -m validation # Only validation tests
466
+
467
+ # Run specific test file
468
+ pytest tests/unit/ad/test_arithmetic.py -v
469
+
470
+ # Run with coverage
471
+ pytest --cov=src tests/
472
+
473
+ # Run tests in parallel (faster, ~2 minutes for full suite)
474
+ pytest -n 4 # Use 4 workers
475
+ pytest -n auto # Auto-detect CPU count
476
+ ```
477
+
478
+ **Parallel Testing:** The test suite supports parallel execution using `pytest-xdist`. Running with `-n 4` reduces test time from ~3-4 minutes to ~2 minutes. All tests are isolated and safe for parallel execution.
479
+
480
+ ## Test Organization
481
+
482
+ The test suite is split into unit, integration, e2e, and validation layers. You can run the different subsets with the scripts in `./scripts/` or via pytest directly. Below are the counts collected locally on Nov 5, 2025 (run in this repository with `python3 -m pytest --collect-only`):
483
+
484
+ - Total collected tests: **1281**
485
+ - Marker breakdown (may overlap if tests carry multiple markers):
486
+ - unit: **434**
487
+ - integration: **223**
488
+ - e2e: **45**
489
+ - validation: **66**
490
+
491
+ Note: marker-based counts can overlap and the total may include tests without markers or additional collected items (fixtures, doctests, etc.). To reproduce these numbers locally run:
492
+
493
+ ```bash
494
+ # Total collected tests
495
+ python3 -m pytest --collect-only -q | wc -l
496
+
497
+ # Per-marker counts
498
+ python3 -m pytest -m unit --collect-only -q | wc -l
499
+ python3 -m pytest -m integration --collect-only -q | wc -l
500
+ python3 -m pytest -m e2e --collect-only -q | wc -l
501
+ python3 -m pytest -m validation --collect-only -q | wc -l
502
+ ```
503
+
504
+ Typical layout:
505
+
506
+ ```
507
+ tests/
508
+ ├── unit/
509
+ ├── integration/
510
+ ├── e2e/
511
+ └── validation/
512
+ ```
513
+
514
+ Test pyramid guidance: prefer fast unit tests during development, run integration/e2e for cross-module confidence, and run the full validation/validation suite before releases.
515
+
516
+ ### Code Style
517
+
518
+ This project uses:
519
+ - **Black** for code formatting (line length: 100)
520
+ - **Ruff** for linting and import sorting
521
+ - **MyPy** for type checking
522
+
523
+ Format your code before committing:
524
+
525
+ ```bash
526
+ make format
527
+ make lint
528
+ ```
529
+
530
+ ## Examples
531
+
532
+ The `examples/` directory contains sample GAMS NLP models:
533
+
534
+ - `simple_nlp.gms` - Basic indexed NLP with objective and constraints
535
+ - `scalar_nlp.gms` - Simple scalar optimization problem
536
+ - `indexed_balance.gms` - Model with indexed balance equations
537
+ - `bounds_nlp.gms` - Demonstrates variable bounds handling
538
+ - `nonlinear_mix.gms` - Mixed nonlinear functions
539
+
540
+ ## Supported GAMS Subset
541
+
542
+ ### Declarations
543
+ - ✅ `Sets` with explicit members
544
+ - ✅ `Aliases`
545
+ - ✅ `Parameters` (scalar and indexed)
546
+ - ✅ `Scalars`
547
+ - ✅ `Variables` (scalar and indexed)
548
+ - ✅ `Equations` (scalar and indexed)
549
+ - ✅ `Table` data blocks
550
+
551
+ ### Preprocessing
552
+ - ✅ `$include` directive (nested, relative paths)
553
+
554
+ ### Comments
555
+ - ✅ GAMS inline comments (`* comment`)
556
+ - ✅ C-style line comments (`// comment`)
557
+ - ✅ Block comments (`$ontext ... $offtext`)
558
+
559
+ **Note:** Input file comments are stripped during parsing and do not appear in the output. However, the emitter can add explanatory comments to the output (controlled by `--no-comments` flag).
560
+
561
+ ### Expressions
562
+ - ✅ Arithmetic: `+`, `-`, `*`, `/`, `^`
563
+ - ✅ Functions: `exp`, `log`, `sqrt`, `sin`, `cos`, `tan`
564
+ - ✅ Aggregation: `sum(i, expr)`
565
+ - ✅ Comparisons: `=`, `<>`, `<`, `>`, `<=`, `>=`
566
+ - ✅ Logic: `and`, `or`
567
+ - ✅ `min()` and `max()` (reformulated to complementarity)
568
+ - ✅ `abs()` (smooth approximation with `--smooth-abs`)
569
+
570
+ ### Equations
571
+ - ✅ Relations: `=e=` (equality), `=l=` (≤), `=g=` (≥)
572
+ - ✅ Variable bounds: `.lo`, `.up`, `.fx`
573
+
574
+ ### Model
575
+ - ✅ `Model` declaration with equation lists or `/all/`
576
+ - ✅ `Solve` statement with `using NLP` and objective
577
+
578
+ ### Advanced Features
579
+ - ✅ **Scaling**: Curtis-Reid and byvar scaling (`--scale auto|byvar`)
580
+ - ✅ **Diagnostics**: Model statistics (`--stats`), Jacobian export (`--dump-jacobian`)
581
+ - ✅ **Configuration**: `pyproject.toml` support for default options
582
+ - ✅ **Logging**: Structured logging with verbosity control (`--verbose`, `--quiet`)
583
+
584
+ ### Not Yet Supported
585
+ - ❌ Control flow (`Loop`, `If`, `While`)
586
+ - ❌ Other `$` directives (`$if`, `$set`, etc.)
587
+ - ❌ External/user-defined functions
588
+ - ❌ Other non-differentiable functions (floor, ceil, sign, etc.)
589
+
590
+ ## Documentation
591
+
592
+ ### Concepts & Planning
593
+ - [docs/concepts/IDEA.md](docs/concepts/IDEA.md) - Original concept: How KKT conditions transform NLP to MCP
594
+ - [docs/concepts/NLP2MCP_HIGH_LEVEL.md](docs/concepts/NLP2MCP_HIGH_LEVEL.md) - Feasibility study and implementation blueprint
595
+ - [docs/planning/EPIC_1/SUMMARY.md](docs/planning/EPIC_1/SUMMARY.md) - Epic 1 sprint summary and feature overview
596
+ - [docs/planning/EPIC_1/PROJECT_PLAN.md](docs/planning/EPIC_1/PROJECT_PLAN.md) - Detailed 5-sprint development plan
597
+ - [docs/planning/EPIC_1/README.md](docs/planning/EPIC_1/README.md) - Sprint summaries and retrospectives
598
+ - [docs/development/AGENTS.md](docs/development/AGENTS.md) - Agent-based development notes
599
+
600
+ ### Technical Documentation
601
+
602
+ **System Architecture:**
603
+ - [docs/architecture/SYSTEM_ARCHITECTURE.md](docs/architecture/SYSTEM_ARCHITECTURE.md) - Overall system data flow
604
+ - [docs/architecture/DATA_STRUCTURES.md](docs/architecture/DATA_STRUCTURES.md) - IR and KKT data structures
605
+
606
+ **Automatic Differentiation:**
607
+ - [docs/ad/README.md](docs/ad/README.md) - AD module overview and quick start
608
+ - [docs/ad/ARCHITECTURE.md](docs/ad/ARCHITECTURE.md) - Design decisions and rationale
609
+ - [docs/ad/DESIGN.md](docs/ad/DESIGN.md) - Detailed implementation approach
610
+ - [docs/ad/DERIVATIVE_RULES.md](docs/ad/DERIVATIVE_RULES.md) - Complete derivative rules reference
611
+
612
+ **KKT Assembly & Code Generation:**
613
+ - [docs/kkt/KKT_ASSEMBLY.md](docs/kkt/KKT_ASSEMBLY.md) - KKT system assembly (mathematical background, implementation)
614
+ - [docs/emit/GAMS_EMISSION.md](docs/emit/GAMS_EMISSION.md) - GAMS MCP code generation (syntax, patterns, examples)
615
+
616
+ ## Contributing
617
+
618
+ **Please read [CONTRIBUTING.md](CONTRIBUTING.md) before contributing!**
619
+
620
+ This project is in active development (Sprint 5 in progress - hardening, packaging, and documentation). Contributions are welcome!
621
+
622
+ ### Quick Start for Contributors
623
+
624
+ 1. **Read guidelines**: [CONTRIBUTING.md](CONTRIBUTING.md) and [docs/development/AGENTS.md](docs/development/AGENTS.md)
625
+ 2. **Setup environment**:
626
+ ```bash
627
+ python3.12 -m venv .venv
628
+ source .venv/bin/activate
629
+ make install-dev
630
+ ```
631
+ 3. **Create feature branch**: `git checkout -b feature/amazing-feature`
632
+ 4. **Make changes**: Follow code style in CONTRIBUTING.md
633
+ 5. **Quality checks**:
634
+ ```bash
635
+ make format # Auto-format code
636
+ make lint # Type checking and linting
637
+ make test # All tests must pass (602+ tests)
638
+ ```
639
+ 6. **Submit PR**: Push branch and create Pull Request on GitHub
640
+
641
+ ### Requirements
642
+ - Python 3.12+ with modern type hints
643
+ - All tests passing
644
+ - Code formatted with Black + Ruff
645
+ - Type checked with mypy
646
+
647
+ See [CONTRIBUTING.md](CONTRIBUTING.md) for detailed guidelines.
648
+
649
+ ## License
650
+
651
+ MIT License - See LICENSE file for details
652
+
653
+ ## Acknowledgments
654
+
655
+ - Based on the mathematical framework of KKT conditions for nonlinear optimization
656
+ - Uses [Lark](https://github.com/lark-parser/lark) for parsing GAMS syntax
657
+ - Inspired by GAMS/PATH and other MCP solvers
658
+
659
+ ## Roadmap
660
+
661
+ - **v0.1.0** (Sprint 1): ✅ Parser and IR - COMPLETE
662
+ - **v0.2.0** (Sprint 2): ✅ Symbolic differentiation - COMPLETE
663
+ - **v0.3.0** (Sprint 3): ✅ KKT synthesis and MCP code generation - COMPLETE
664
+ - **v0.3.1** (Post Sprint 3): ✅ Issue #47 fix (indexed equations) - COMPLETE
665
+ - **v0.4.0** (Sprint 4): ✅ Extended features and robustness - COMPLETE
666
+ - **v1.0.0** (Sprint 5): 🔄 Production-ready with hardening, packaging, and comprehensive documentation - IN PROGRESS
667
+
668
+ ## Contact
669
+
670
+ For questions, issues, or suggestions, please open an issue on GitHub.