pyoframe 0.0.11__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyoframe/util.py CHANGED
@@ -2,15 +2,28 @@
2
2
  File containing utility functions and classes.
3
3
  """
4
4
 
5
- from typing import Any, Iterable, Optional, Union, List, Dict
6
-
7
5
  from dataclasses import dataclass, field
6
+ from functools import wraps
7
+ from typing import (
8
+ TYPE_CHECKING,
9
+ Any,
10
+ Dict,
11
+ Iterable,
12
+ List,
13
+ Optional,
14
+ Sequence,
15
+ Type,
16
+ Union,
17
+ )
8
18
 
9
- import polars as pl
10
19
  import pandas as pd
11
- from functools import wraps
20
+ import polars as pl
12
21
 
13
- from pyoframe.constants import COEF_KEY, CONST_TERM, RESERVED_COL_KEYS, VAR_KEY
22
+ from pyoframe.constants import COEF_KEY, CONST_TERM, RESERVED_COL_KEYS, VAR_KEY, Config
23
+
24
+ if TYPE_CHECKING: # pragma: no cover
25
+ from pyoframe.model import Variable
26
+ from pyoframe.model_element import ModelElementWithId
14
27
 
15
28
 
16
29
  def get_obj_repr(obj: object, _props: Iterable[str] = (), **kwargs):
@@ -47,10 +60,20 @@ def parse_inputs_as_iterable(
47
60
 
48
61
 
49
62
  def _is_iterable(input: Union[Any, Iterable[Any]]) -> bool:
50
- # Inspired from the polars library
63
+ # Inspired from the polars library, TODO: Consider using opposite check, i.e. equals list or tuple
51
64
  return isinstance(input, Iterable) and not isinstance(
52
65
  input,
53
- (str, bytes, pl.DataFrame, pl.Series, pd.DataFrame, pd.Series, pd.Index, dict),
66
+ (
67
+ str,
68
+ bytes,
69
+ pl.DataFrame,
70
+ pl.Series,
71
+ pd.DataFrame,
72
+ pd.Series,
73
+ pd.Index,
74
+ dict,
75
+ range,
76
+ ),
54
77
  )
55
78
 
56
79
 
@@ -58,7 +81,7 @@ def concat_dimensions(
58
81
  df: pl.DataFrame,
59
82
  prefix: Optional[str] = None,
60
83
  keep_dims: bool = True,
61
- ignore_columns=RESERVED_COL_KEYS,
84
+ ignore_columns: Sequence[str] = RESERVED_COL_KEYS,
62
85
  replace_spaces: bool = True,
63
86
  to_col: str = "concated_dim",
64
87
  ) -> pl.DataFrame:
@@ -66,12 +89,14 @@ def concat_dimensions(
66
89
  Returns a new DataFrame with the column 'concated_dim'. Reserved columns are ignored.
67
90
 
68
91
  Parameters:
69
- df : pl.DataFrame
92
+ df:
70
93
  The input DataFrame.
71
- prefix : str, optional
94
+ prefix:
72
95
  The prefix to be added to the concated dimension.
73
- keep_dims : bool, optional
96
+ keep_dims:
74
97
  If True, the original dimensions are kept in the new DataFrame.
98
+ replace_spaces : bool, optional
99
+ If True, replaces spaces with underscores.
75
100
 
76
101
  Examples:
77
102
  >>> import polars as pl
@@ -160,18 +185,19 @@ def concat_dimensions(
160
185
 
161
186
 
162
187
  def cast_coef_to_string(
163
- df: pl.DataFrame, column_name: str = COEF_KEY, drop_ones=True, float_precision=None
188
+ df: pl.DataFrame, column_name: str = COEF_KEY, drop_ones: bool = True
164
189
  ) -> pl.DataFrame:
165
190
  """
191
+ Converts column `column_name` of the dataframe `df` to a string. Rounds to `Config.print_float_precision` decimal places if not None.
192
+
166
193
  Parameters:
167
- df : pl.DataFrame
194
+ df:
168
195
  The input DataFrame.
169
- column_name : str, optional
196
+ column_name:
170
197
  The name of the column to be casted.
171
- drop_ones : bool, optional
198
+ drop_ones:
172
199
  If True, 1s are replaced with an empty string for non-constant terms.
173
- float_precision : int, optional
174
- The number of decimal places to round the coefficients to. If None, no rounding is done (so Polars' default precision is used).
200
+
175
201
  Examples:
176
202
  >>> import polars as pl
177
203
  >>> df = pl.DataFrame({"x": [1.0, -2.0, 1.0, 4.0], VAR_KEY: [1, 2, 0, 4]})
@@ -193,8 +219,8 @@ def cast_coef_to_string(
193
219
  _sign=pl.when(pl.col(column_name) < 0).then(pl.lit("-")).otherwise(pl.lit("+")),
194
220
  )
195
221
 
196
- if float_precision is not None:
197
- df = df.with_columns(pl.col(column_name).round(float_precision))
222
+ if Config.float_to_str_precision is not None:
223
+ df = df.with_columns(pl.col(column_name).round(Config.float_to_str_precision))
198
224
 
199
225
  df = df.with_columns(
200
226
  pl.when(pl.col(column_name) == pl.col(column_name).floor())
@@ -270,3 +296,115 @@ def dataframe_to_tupled_list(
270
296
  class FuncArgs:
271
297
  args: List
272
298
  kwargs: Dict = field(default_factory=dict)
299
+
300
+
301
+ class Container:
302
+ """
303
+ A placeholder object that makes it easy to set and get attributes. Used in Model.attr and Model.params, for example.
304
+
305
+ Examples:
306
+ >>> x = {}
307
+ >>> params = Container(setter=lambda n, v: x.__setitem__(n, v), getter=lambda n: x[n])
308
+ >>> params.a = 1
309
+ >>> params.b = 2
310
+ >>> params.a
311
+ 1
312
+ >>> params.b
313
+ 2
314
+ """
315
+
316
+ def __init__(self, setter, getter):
317
+ self._setter = setter
318
+ self._getter = getter
319
+
320
+ def __setattr__(self, name: str, value: Any) -> None:
321
+ if name.startswith("_"): # pragma: no cover
322
+ return super().__setattr__(name, value)
323
+ self._setter(name, value)
324
+
325
+ def __getattr__(self, name: str) -> Any:
326
+ if name.startswith("_"): # pragma: no cover
327
+ return super().__getattribute__(name)
328
+ return self._getter(name)
329
+
330
+
331
+ class NamedVariableMapper:
332
+ """
333
+ Maps variables to a string representation using the object's name and dimensions.
334
+
335
+ Examples:
336
+ >>> import polars as pl
337
+ >>> m = pf.Model()
338
+ >>> m.foo = pf.Variable(pl.DataFrame({"t": range(4)}))
339
+ >>> pf.sum(m.foo)
340
+ <Expression size=1 dimensions={} terms=4>
341
+ foo[0] + foo[1] + foo[2] + foo[3]
342
+ """
343
+
344
+ CONST_TERM_NAME = "_ONE"
345
+ NAME_COL = "__name"
346
+
347
+ def __init__(self, cls: Type["ModelElementWithId"]) -> None:
348
+ self._ID_COL = VAR_KEY
349
+ self.mapping_registry = pl.DataFrame(
350
+ {self._ID_COL: [], self.NAME_COL: []},
351
+ schema={self._ID_COL: pl.UInt32, self.NAME_COL: pl.String},
352
+ )
353
+ self._extend_registry(
354
+ pl.DataFrame(
355
+ {self._ID_COL: [CONST_TERM], self.NAME_COL: [self.CONST_TERM_NAME]},
356
+ schema={self._ID_COL: pl.UInt32, self.NAME_COL: pl.String},
357
+ )
358
+ )
359
+
360
+ def add(self, element: "Variable") -> None:
361
+ self._extend_registry(self._element_to_map(element))
362
+
363
+ def _extend_registry(self, df: pl.DataFrame) -> None:
364
+ self.mapping_registry = pl.concat([self.mapping_registry, df])
365
+
366
+ def apply(
367
+ self,
368
+ df: pl.DataFrame,
369
+ to_col: str,
370
+ id_col: str,
371
+ ) -> pl.DataFrame:
372
+ return df.join(
373
+ self.mapping_registry,
374
+ how="left",
375
+ validate="m:1",
376
+ left_on=id_col,
377
+ right_on=self._ID_COL,
378
+ ).rename({self.NAME_COL: to_col})
379
+
380
+ def _element_to_map(self, element) -> pl.DataFrame:
381
+ element_name = element.name # type: ignore
382
+ assert (
383
+ element_name is not None
384
+ ), "Element must have a name to be used in a named mapping."
385
+ element._assert_has_ids()
386
+ return concat_dimensions(
387
+ element.data.select(element.dimensions_unsafe + [VAR_KEY]),
388
+ keep_dims=False,
389
+ prefix=element_name,
390
+ to_col=self.NAME_COL,
391
+ )
392
+
393
+
394
+ def for_solvers(*solvers: str):
395
+ """
396
+ Decorator that limits the function to only be called when the solver is in the `only` list.
397
+ """
398
+
399
+ def decorator(func):
400
+ @wraps(func)
401
+ def wrapper(self, *args, **kwargs):
402
+ if self.solver_name not in solvers:
403
+ raise NotImplementedError(
404
+ f"Method '{func.__name__}' is not implemented for solver '{self.solver_name}'."
405
+ )
406
+ return func(self, *args, **kwargs)
407
+
408
+ return wrapper
409
+
410
+ return decorator
@@ -1,8 +1,6 @@
1
1
  MIT License
2
2
 
3
3
  Copyright 2024 Bravos Power
4
- Copyright 2021-2023 Fabian Hofmann
5
- Copyright 2015-2021 PyPSA Developers
6
4
 
7
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
8
6
  of this software and associated documentation files (the "Software"), to deal
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: pyoframe
3
- Version: 0.0.11
3
+ Version: 0.1.1
4
4
  Summary: Blazing fast linear program interface
5
5
  Author-email: Bravos Power <dev@bravospower.com>
6
6
  Project-URL: Homepage, https://bravos-power.github.io/pyoframe/
@@ -12,31 +12,37 @@ Classifier: Operating System :: OS Independent
12
12
  Classifier: Development Status :: 3 - Alpha
13
13
  Classifier: License :: OSI Approved :: MIT License
14
14
  Classifier: Natural Language :: English
15
- Requires-Python: >=3.8
15
+ Requires-Python: >=3.9
16
16
  Description-Content-Type: text/markdown
17
17
  License-File: LICENSE
18
- Requires-Dist: polars <2,>=0.20
18
+ Requires-Dist: polars<2,>=0.20
19
19
  Requires-Dist: numpy
20
20
  Requires-Dist: pyarrow
21
21
  Requires-Dist: pandas
22
22
  Requires-Dist: packaging
23
+ Requires-Dist: pyoptinterface~=0.4
23
24
  Provides-Extra: dev
24
- Requires-Dist: black ; extra == 'dev'
25
- Requires-Dist: bumpver ; extra == 'dev'
26
- Requires-Dist: isort ; extra == 'dev'
27
- Requires-Dist: pip-tools ; extra == 'dev'
28
- Requires-Dist: pytest ; extra == 'dev'
29
- Requires-Dist: pytest-cov ; extra == 'dev'
30
- Requires-Dist: pre-commit ; extra == 'dev'
31
- Requires-Dist: gurobipy ; extra == 'dev'
25
+ Requires-Dist: black[jupyter]; extra == "dev"
26
+ Requires-Dist: bumpver; extra == "dev"
27
+ Requires-Dist: isort; extra == "dev"
28
+ Requires-Dist: pip-tools; extra == "dev"
29
+ Requires-Dist: pytest; extra == "dev"
30
+ Requires-Dist: pytest-cov; extra == "dev"
31
+ Requires-Dist: pre-commit; extra == "dev"
32
+ Requires-Dist: gurobipy; extra == "dev"
33
+ Requires-Dist: highsbox; extra == "dev"
34
+ Requires-Dist: pre-commit; extra == "dev"
32
35
  Provides-Extra: docs
33
- Requires-Dist: mkdocs-material ==9.* ; extra == 'docs'
34
- Requires-Dist: mkdocstrings[python] ; extra == 'docs'
35
- Requires-Dist: mkdocs-git-revision-date-localized-plugin ; extra == 'docs'
36
- Requires-Dist: mkdocs-git-committers-plugin-2 ; extra == 'docs'
37
- Requires-Dist: mkdocs-gen-files ; extra == 'docs'
38
- Requires-Dist: mkdocs-section-index ; extra == 'docs'
39
- Requires-Dist: mkdocs-literate-nav ; extra == 'docs'
36
+ Requires-Dist: mkdocs-material==9.*; extra == "docs"
37
+ Requires-Dist: mkdocstrings[python]; extra == "docs"
38
+ Requires-Dist: mkdocs-git-revision-date-localized-plugin; extra == "docs"
39
+ Requires-Dist: mkdocs-git-committers-plugin-2; extra == "docs"
40
+ Requires-Dist: mkdocs-gen-files; extra == "docs"
41
+ Requires-Dist: mkdocs-section-index; extra == "docs"
42
+ Requires-Dist: mkdocs-literate-nav; extra == "docs"
43
+ Requires-Dist: mkdocs-table-reader-plugin; extra == "docs"
44
+ Provides-Extra: highs
45
+ Requires-Dist: highsbox; extra == "highs"
40
46
 
41
47
  # Pyoframe: Fast and low-memory linear programming models
42
48
 
@@ -50,16 +56,12 @@ Requires-Dist: mkdocs-literate-nav ; extra == 'docs'
50
56
 
51
57
  A library to rapidly and memory-efficiently formulate large and sparse optimization models using Pandas or Polars dataframes.
52
58
 
53
- ## Contribute
59
+ ## **[Documentation](https://bravos-power.github.io/pyoframe/)**
54
60
 
55
- Contributions are welcome! See [`CONTRIBUTE.md`](./CONTRIBUTE.md).
61
+ [Read the documentation](https://bravos-power.github.io/pyoframe/) to get started or to learn how to [contribute](https://bravos-power.github.io/pyoframe/contribute/).
56
62
 
57
- ## Acknowledgments
58
-
59
- Martin Staadecker first created this library while working for [Bravos Power](https://www.bravospower.com/) The library takes inspiration from Linopy and Pyomo, two prior libraries for optimization for which we are thankful.
60
63
 
61
- ## Troubleshooting Common Errors
64
+ ## Acknowledgments
62
65
 
63
- ### `datatypes of join keys don't match`
66
+ Martin Staadecker first created this library while working for [Bravos Power](https://www.bravospower.com/). The library takes inspiration from Linopy and Pyomo, two prior libraries for optimization for which we are thankful.
64
67
 
65
- Often, this error indicates that two dataframes in your inputs representing the same dimension have different datatypes (e.g. 16bit integer and 64bit integer). This is not allowed and you should ensure for the same dimensions, datatypes are identical.
@@ -0,0 +1,14 @@
1
+ pyoframe/__init__.py,sha256=nEN0OgqhevtsvxEiPbJLzwPojf3ngYAoT90M_1mc4kM,477
2
+ pyoframe/_arithmetic.py,sha256=LvuxI4pFYuqrqus4FxcIekUwXfdEMEVWBR-1h5hF7Ac,14764
3
+ pyoframe/constants.py,sha256=STQZufgBCS7QTmyQTK_8lINYNSDjXCxVFgF1mXoXen4,3769
4
+ pyoframe/core.py,sha256=C9T0wFDAgcsFVxwnLOYqQ2j9fwnCCS_usjlGSME_qmo,62743
5
+ pyoframe/model.py,sha256=d_WyLzdfroDYAtyXs3Ie_jo5c_CGxTX5qPT4vCVaiB8,11967
6
+ pyoframe/model_element.py,sha256=nCfe56CRWr6bwP8irUd2bmLAEGQ-7GwOQtWeqz2WxtU,5944
7
+ pyoframe/monkey_patch.py,sha256=9IfS14G6IPabmM9z80jzi_D4Rq0Mdx5aUCA39Yi2tgE,2044
8
+ pyoframe/objective.py,sha256=PBWxj30QkFlsvY6ijZ6KjyKdrJARD4to0ieF6GUqaQU,3238
9
+ pyoframe/util.py,sha256=Oyk8xh6FJHlb04X_cM4lN0UzdnKLXAMrKfyOf7IexiA,13480
10
+ pyoframe-0.1.1.dist-info/LICENSE,sha256=dkwA40ZzT-3x6eu2a6mf_o7PNSqHbdsyaFNhLxGHNQs,1065
11
+ pyoframe-0.1.1.dist-info/METADATA,sha256=fJyV3KirCM8UBwOGNYODi_6mRxKKU7chhvuYpWob4bE,3518
12
+ pyoframe-0.1.1.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
13
+ pyoframe-0.1.1.dist-info/top_level.txt,sha256=10z3OOJSVLriQ0IrFLMH8CH9zByugPWolqhlHlkNjV4,9
14
+ pyoframe-0.1.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.3.0)
2
+ Generator: setuptools (75.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
pyoframe/io.py DELETED
@@ -1,252 +0,0 @@
1
- """
2
- Module containing all import/export functionalities.
3
- """
4
-
5
- import sys
6
- import time
7
- from io import TextIOWrapper
8
- from pathlib import Path
9
- from tempfile import NamedTemporaryFile
10
- from typing import TYPE_CHECKING, Iterable, Optional, TypeVar, Union
11
-
12
- from pyoframe.constants import CONST_TERM, VAR_KEY, ObjSense
13
- from pyoframe.core import Constraint, Variable
14
- from pyoframe.io_mappers import (
15
- Base36ConstMapper,
16
- Base36VarMapper,
17
- IOMappers,
18
- Mapper,
19
- NamedMapper,
20
- NamedVariableMapper,
21
- )
22
-
23
- if TYPE_CHECKING: # pragma: no cover
24
- from pyoframe.model import Model
25
-
26
- import polars as pl
27
-
28
- T = TypeVar("T")
29
-
30
-
31
- def io_progress_bar(
32
- iterable: Iterable[T],
33
- prefix: str = "",
34
- suffix: str = "",
35
- length: int = 50,
36
- fill: str = "█",
37
- update_every: int = 1,
38
- ):
39
- """
40
- Display progress bar for I/O operations.
41
- """
42
- try:
43
- total = len(iterable)
44
- except TypeError:
45
- total = None
46
-
47
- start_time = time.time()
48
-
49
- def print_progress(iteration: int):
50
- if total is not None:
51
- percent = f"{100 * (iteration / float(total)):.1f}"
52
- filled_length = int(length * iteration // total)
53
- bar = fill * filled_length + "-" * (length - filled_length)
54
- else:
55
- percent = "N/A"
56
- bar = fill * (iteration % length) + "-" * (length - (iteration % length))
57
- elapsed_time = time.time() - start_time
58
- if iteration > 0:
59
- estimated_total_time = (
60
- elapsed_time * (total / iteration) if total else elapsed_time
61
- )
62
- estimated_remaining_time = estimated_total_time - elapsed_time
63
- eta = time.strftime("%H:%M:%S", time.gmtime(estimated_remaining_time))
64
- else:
65
- eta = "Estimating..." # pragma: no cover
66
- sys.stdout.write(
67
- f'\r{prefix} |{bar}| {percent}% Complete ({iteration}/{total if total else "?"}) ETA: {eta} {suffix}'
68
- )
69
- sys.stdout.flush()
70
-
71
- for i, item in enumerate(iterable):
72
- yield item
73
- if (i + 1) % update_every == 0 or total is None or i == total - 1:
74
- print_progress(i + 1)
75
-
76
- sys.stdout.write("\n")
77
- sys.stdout.flush()
78
-
79
-
80
- def objective_to_file(m: "Model", f: TextIOWrapper, var_map):
81
- """
82
- Write out the objective of a model to a lp file.
83
- """
84
- if m.objective is None:
85
- return
86
- objective_sense = "minimize" if m.sense == ObjSense.MIN else "maximize"
87
- f.write(f"{objective_sense}\n\nobj:\n\n")
88
- result = m.objective.to_str(
89
- var_map=var_map, include_prefix=False, include_const_variable=True
90
- )
91
- f.write(result)
92
-
93
-
94
- def constraints_to_file(m: "Model", f: TextIOWrapper, var_map, const_map):
95
- for constraint in create_section(
96
- io_progress_bar(
97
- m.constraints, prefix="Writing constraints to file", update_every=5
98
- ),
99
- f,
100
- "s.t.",
101
- ):
102
- f.write(constraint.to_str(var_map=var_map, const_map=const_map) + "\n")
103
-
104
-
105
- def bounds_to_file(m: "Model", f, var_map):
106
- """
107
- Write out variables of a model to a lp file.
108
- """
109
- if (m.objective is not None and m.objective.has_constant) or len(m.variables) != 0:
110
- f.write("\n\nbounds\n\n")
111
- if m.objective is not None and m.objective.has_constant:
112
- const_term_df = pl.DataFrame(
113
- {VAR_KEY: [CONST_TERM]}, schema={VAR_KEY: pl.UInt32}
114
- )
115
- f.write(f"{var_map.apply(const_term_df).item()} = 1\n")
116
-
117
- for variable in io_progress_bar(
118
- m.variables, prefix="Writing bounds to file", update_every=1
119
- ):
120
- terms = []
121
-
122
- if variable.lb != 0:
123
- terms.append(pl.lit(f"{variable.lb:.12g} <= "))
124
-
125
- terms.append(VAR_KEY)
126
-
127
- if variable.ub != float("inf"):
128
- terms.append(pl.lit(f" <= {variable.ub:.12g}"))
129
-
130
- terms.append(pl.lit("\n"))
131
-
132
- if len(terms) < 3:
133
- continue
134
-
135
- df = (
136
- var_map.apply(variable.data, to_col=None)
137
- .select(pl.concat_str(terms).str.concat(""))
138
- .item()
139
- )
140
-
141
- f.write(df)
142
-
143
-
144
- def binaries_to_file(m: "Model", f, var_map: Mapper):
145
- """
146
- Write out binaries of a model to a lp file.
147
- """
148
- for variable in create_section(
149
- io_progress_bar(
150
- m.binary_variables,
151
- prefix="Writing binary variables to file",
152
- update_every=1,
153
- ),
154
- f,
155
- "binary",
156
- ):
157
- lines = (
158
- var_map.apply(variable.data, to_col=None)
159
- .select(pl.col(VAR_KEY).str.concat("\n"))
160
- .item()
161
- )
162
- f.write(lines + "\n")
163
-
164
-
165
- def integers_to_file(m: "Model", f, var_map: Mapper):
166
- """
167
- Write out integers of a model to a lp file.
168
- """
169
- for variable in create_section(
170
- io_progress_bar(
171
- m.integer_variables,
172
- prefix="Writing integer variables to file",
173
- update_every=5,
174
- ),
175
- f,
176
- "general",
177
- ):
178
- lines = (
179
- var_map.apply(variable.data, to_col=None)
180
- .select(pl.col(VAR_KEY).str.concat("\n"))
181
- .item()
182
- )
183
- f.write(lines + "\n")
184
-
185
-
186
- def create_section(iterable: Iterable[T], f, section_header) -> Iterable[T]:
187
- wrote = False
188
- for item in iterable:
189
- if not wrote:
190
- f.write(f"\n\n{section_header}\n\n")
191
- wrote = True
192
- yield item
193
-
194
-
195
- def get_var_map(m: "Model", use_var_names):
196
- if use_var_names:
197
- if m.var_map is not None:
198
- return m.var_map
199
- var_map = NamedVariableMapper(Variable)
200
- else:
201
- var_map = Base36VarMapper(Variable)
202
-
203
- for v in m.variables:
204
- var_map.add(v)
205
- return var_map
206
-
207
-
208
- def to_file(
209
- m: "Model", file_path: Optional[Union[str, Path]] = None, use_var_names=False
210
- ) -> Path:
211
- """
212
- Write out a model to a lp file.
213
-
214
- Args:
215
- m: The model to write out.
216
- file_path: The path to write the model to. If None, a temporary file is created. The caller is responsible for
217
- deleting the file after use.
218
- use_var_names: If True, variable names are used in the lp file. Otherwise, variable
219
- indices are used.
220
-
221
- Returns:
222
- The path to the lp file.
223
- """
224
- if file_path is None:
225
- with NamedTemporaryFile(
226
- prefix="pyoframe-problem-", suffix=".lp", mode="w", delete=False
227
- ) as f:
228
- file_path = f.name
229
-
230
- file_path = Path(file_path)
231
- assert file_path.suffix == ".lp", f"File format `{file_path.suffix}` not supported."
232
-
233
- if file_path.exists():
234
- file_path.unlink()
235
-
236
- const_map = (
237
- NamedMapper(Constraint) if use_var_names else Base36ConstMapper(Constraint)
238
- )
239
- for c in m.constraints:
240
- const_map.add(c)
241
- var_map = get_var_map(m, use_var_names)
242
- m.io_mappers = IOMappers(var_map, const_map)
243
-
244
- with open(file_path, mode="w") as f:
245
- objective_to_file(m, f, var_map)
246
- constraints_to_file(m, f, var_map, const_map)
247
- bounds_to_file(m, f, var_map)
248
- binaries_to_file(m, f, var_map)
249
- integers_to_file(m, f, var_map)
250
- f.write("\nend\n")
251
-
252
- return file_path