pyoframe 0.2.1__py3-none-any.whl → 1.0.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,37 +1,49 @@
1
- """
2
- File containing utility functions and classes.
3
- """
1
+ """Contains utility functions and classes."""
4
2
 
3
+ from __future__ import annotations
4
+
5
+ import itertools
6
+ import sys
7
+ from collections.abc import Iterable
5
8
  from dataclasses import dataclass, field
6
9
  from functools import wraps
7
- from typing import (
8
- TYPE_CHECKING,
9
- Any,
10
- Dict,
11
- Iterable,
12
- List,
13
- Optional,
14
- Sequence,
15
- Type,
16
- Union,
17
- )
10
+ from typing import TYPE_CHECKING, Any, Callable
18
11
 
19
12
  import pandas as pd
20
13
  import polars as pl
21
14
 
22
- from pyoframe.constants import COEF_KEY, CONST_TERM, RESERVED_COL_KEYS, VAR_KEY, Config
15
+ from pyoframe._constants import (
16
+ COEF_KEY,
17
+ CONST_TERM,
18
+ RESERVED_COL_KEYS,
19
+ VAR_KEY,
20
+ Config,
21
+ )
23
22
 
24
23
  if TYPE_CHECKING: # pragma: no cover
25
- from pyoframe.model import Variable
26
- from pyoframe.model_element import ModelElementWithId
24
+ from pyoframe._core import SupportsMath
25
+ from pyoframe._model import Variable
26
+ from pyoframe._model_element import ModelElementWithId
27
27
 
28
+ if sys.version_info >= (3, 10):
29
+ pairwise = itertools.pairwise
30
+ else:
28
31
 
29
- def get_obj_repr(obj: object, _props: Iterable[str] = (), **kwargs):
30
- """
31
- Helper function to generate __repr__ strings for classes. See usage for examples.
32
+ def pairwise(iterable):
33
+ iterator = iter(iterable)
34
+ a = next(iterator)
35
+
36
+ for b in iterator:
37
+ yield a, b
38
+ a = b
39
+
40
+
41
+ def get_obj_repr(obj: object, *props: str | None, **kwargs):
42
+ """Generates __repr__() strings for classes.
43
+
44
+ See usage for examples.
32
45
  """
33
- props = {prop: getattr(obj, prop) for prop in _props}
34
- props_str = " ".join(f"{k}={v}" for k, v in props.items() if v is not None)
46
+ props_str = " ".join(f"'{v}'" for v in props if v is not None)
35
47
  if props_str:
36
48
  props_str += " "
37
49
  kwargs_str = " ".join(f"{k}={v}" for k, v in kwargs.items() if v is not None)
@@ -39,10 +51,10 @@ def get_obj_repr(obj: object, _props: Iterable[str] = (), **kwargs):
39
51
 
40
52
 
41
53
  def parse_inputs_as_iterable(
42
- *inputs: Union[Any, Iterable[Any]],
54
+ *inputs: Any | Iterable[Any],
43
55
  ) -> Iterable[Any]:
44
- """
45
- Converts a parameter *x: Any | Iteraable[Any] to a single Iterable[Any] object.
56
+ """Converts a parameter *x: Any | Iterable[Any] to a single Iterable[Any] object.
57
+
46
58
  This is helpful to support these two ways of passing arguments:
47
59
  - foo([1, 2, 3])
48
60
  - foo(1, 2, 3)
@@ -59,7 +71,7 @@ def parse_inputs_as_iterable(
59
71
  return inputs
60
72
 
61
73
 
62
- def _is_iterable(input: Union[Any, Iterable[Any]]) -> bool:
74
+ def _is_iterable(input: Any | Iterable[Any]) -> bool:
63
75
  # Inspired from the polars library, TODO: Consider using opposite check, i.e. equals list or tuple
64
76
  return isinstance(input, Iterable) and not isinstance(
65
77
  input,
@@ -78,15 +90,11 @@ def _is_iterable(input: Union[Any, Iterable[Any]]) -> bool:
78
90
 
79
91
 
80
92
  def concat_dimensions(
81
- df: pl.DataFrame,
82
- prefix: Optional[str] = None,
83
- keep_dims: bool = True,
84
- ignore_columns: Sequence[str] = RESERVED_COL_KEYS,
85
- replace_spaces: bool = True,
86
- to_col: str = "concated_dim",
93
+ df: pl.DataFrame, prefix: str, keep_dims: bool = True, to_col: str = "concated_dim"
87
94
  ) -> pl.DataFrame:
88
- """
89
- Returns a new DataFrame with the column 'concated_dim'. Reserved columns are ignored.
95
+ """Returns a new DataFrame with the column 'concated_dim'.
96
+
97
+ Reserved columns are ignored. Spaces are replaced with underscores.
90
98
 
91
99
  Parameters:
92
100
  df:
@@ -94,9 +102,7 @@ def concat_dimensions(
94
102
  prefix:
95
103
  The prefix to be added to the concated dimension.
96
104
  keep_dims:
97
- If True, the original dimensions are kept in the new DataFrame.
98
- replace_spaces : bool, optional
99
- If True, replaces spaces with underscores.
105
+ If `True`, the original dimensions are kept in the new DataFrame.
100
106
 
101
107
  Examples:
102
108
  >>> import polars as pl
@@ -106,20 +112,6 @@ def concat_dimensions(
106
112
  ... "dim2": ["Y", "Y", "Y", "N", "N", "N"],
107
113
  ... }
108
114
  ... )
109
- >>> concat_dimensions(df)
110
- shape: (6, 3)
111
- ┌──────┬──────┬──────────────┐
112
- │ dim1 ┆ dim2 ┆ concated_dim │
113
- │ --- ┆ --- ┆ --- │
114
- │ i64 ┆ str ┆ str │
115
- ╞══════╪══════╪══════════════╡
116
- │ 1 ┆ Y ┆ [1,Y] │
117
- │ 2 ┆ Y ┆ [2,Y] │
118
- │ 3 ┆ Y ┆ [3,Y] │
119
- │ 1 ┆ N ┆ [1,N] │
120
- │ 2 ┆ N ┆ [2,N] │
121
- │ 3 ┆ N ┆ [3,N] │
122
- └──────┴──────┴──────────────┘
123
115
  >>> concat_dimensions(df, prefix="x")
124
116
  shape: (6, 3)
125
117
  ┌──────┬──────┬──────────────┐
@@ -134,7 +126,7 @@ def concat_dimensions(
134
126
  │ 2 ┆ N ┆ x[2,N] │
135
127
  │ 3 ┆ N ┆ x[3,N] │
136
128
  └──────┴──────┴──────────────┘
137
- >>> concat_dimensions(df, keep_dims=False)
129
+ >>> concat_dimensions(df, prefix="", keep_dims=False)
138
130
  shape: (6, 1)
139
131
  ┌──────────────┐
140
132
  │ concated_dim │
@@ -148,7 +140,8 @@ def concat_dimensions(
148
140
  │ [2,N] │
149
141
  │ [3,N] │
150
142
  └──────────────┘
151
- >>> # Properly handles cases with no dimensions and ignores reserved columns
143
+
144
+ Properly handles cases with no dimensions and ignores reserved columns
152
145
  >>> df = pl.DataFrame({VAR_KEY: [1, 2]})
153
146
  >>> concat_dimensions(df, prefix="x")
154
147
  shape: (2, 2)
@@ -163,7 +156,7 @@ def concat_dimensions(
163
156
  """
164
157
  if prefix is None:
165
158
  prefix = ""
166
- dimensions = [col for col in df.columns if col not in ignore_columns]
159
+ dimensions = [col for col in df.columns if col not in RESERVED_COL_KEYS]
167
160
  if dimensions:
168
161
  query = pl.concat_str(
169
162
  pl.lit(prefix + "["),
@@ -173,10 +166,7 @@ def concat_dimensions(
173
166
  else:
174
167
  query = pl.lit(prefix)
175
168
 
176
- df = df.with_columns(query.alias(to_col))
177
-
178
- if replace_spaces:
179
- df = df.with_columns(pl.col(to_col).str.replace_all(" ", "_"))
169
+ df = df.with_columns(query.str.replace_all(" ", "_").alias(to_col))
180
170
 
181
171
  if not keep_dims:
182
172
  df = df.drop(*dimensions)
@@ -185,10 +175,12 @@ def concat_dimensions(
185
175
 
186
176
 
187
177
  def cast_coef_to_string(
188
- df: pl.DataFrame, column_name: str = COEF_KEY, drop_ones: bool = True
178
+ df: pl.DataFrame,
179
+ column_name: str = COEF_KEY,
180
+ drop_ones: bool = True,
181
+ always_show_sign: bool = True,
189
182
  ) -> pl.DataFrame:
190
- """
191
- Converts column `column_name` of the dataframe `df` to a string. Rounds to `Config.print_float_precision` decimal places if not None.
183
+ """Converts column `column_name` of the DataFrame `df` to a string. Round to `Config.print_float_precision` decimal places if not None.
192
184
 
193
185
  Parameters:
194
186
  df:
@@ -196,7 +188,9 @@ def cast_coef_to_string(
196
188
  column_name:
197
189
  The name of the column to be casted.
198
190
  drop_ones:
199
- If True, 1s are replaced with an empty string for non-constant terms.
191
+ If `True`, 1s are replaced with an empty string for non-constant terms.
192
+ always_show_sign:
193
+ If `True`, the sign of the coefficient is always shown, i.e. 1 becomes `+1` not just `1`.
200
194
 
201
195
  Examples:
202
196
  >>> import polars as pl
@@ -214,22 +208,26 @@ def cast_coef_to_string(
214
208
  │ +4 ┆ 4 │
215
209
  └─────┴───────────────┘
216
210
  """
217
- df = df.with_columns(
218
- pl.col(column_name).abs(),
219
- _sign=pl.when(pl.col(column_name) < 0).then(pl.lit("-")).otherwise(pl.lit("+")),
220
- )
221
-
222
211
  if Config.float_to_str_precision is not None:
223
212
  df = df.with_columns(pl.col(column_name).round(Config.float_to_str_precision))
224
213
 
214
+ if always_show_sign:
215
+ df = df.with_columns(
216
+ pl.col(column_name).abs(),
217
+ _sign=pl.when(pl.col(column_name) < 0)
218
+ .then(pl.lit("-"))
219
+ .otherwise(pl.lit("+")),
220
+ )
221
+
225
222
  df = df.with_columns(
226
- pl.when(pl.col(column_name) == pl.col(column_name).floor())
223
+ pl.when(pl.col(column_name) == pl.col(column_name).round())
227
224
  .then(pl.col(column_name).cast(pl.Int64).cast(pl.String))
228
225
  .otherwise(pl.col(column_name).cast(pl.String))
229
226
  .alias(column_name)
230
227
  )
231
228
 
232
229
  if drop_ones:
230
+ assert always_show_sign, "drop_ones requires always_show_sign=True"
233
231
  condition = pl.col(column_name) == str(1)
234
232
  if VAR_KEY in df.columns:
235
233
  condition = condition & (pl.col(VAR_KEY) != CONST_TERM)
@@ -239,15 +237,16 @@ def cast_coef_to_string(
239
237
  .otherwise(pl.col(column_name))
240
238
  .alias(column_name)
241
239
  )
242
- else:
243
- df = df.with_columns(pl.col(column_name).cast(pl.Utf8))
244
- return df.with_columns(pl.concat_str("_sign", column_name).alias(column_name)).drop(
245
- "_sign"
246
- )
240
+
241
+ if always_show_sign:
242
+ df = df.with_columns(
243
+ pl.concat_str("_sign", column_name).alias(column_name)
244
+ ).drop("_sign")
245
+ return df
247
246
 
248
247
 
249
248
  def unwrap_single_values(func):
250
- """Decorator for functions that return DataFrames. Returned dataframes with a single value will instead return the value."""
249
+ """Returns the DataFrame unless it is a single value in which case return the value."""
251
250
 
252
251
  @wraps(func)
253
252
  def wrapper(*args, **kwargs):
@@ -259,52 +258,20 @@ def unwrap_single_values(func):
259
258
  return wrapper
260
259
 
261
260
 
262
- def dataframe_to_tupled_list(
263
- df: pl.DataFrame, num_max_elements: Optional[int] = None
264
- ) -> str:
265
- """
266
- Converts a dataframe into a list of tuples. Used to print a Set to the console. See examples for behaviour.
267
-
268
- Examples:
269
- >>> df = pl.DataFrame({"x": [1, 2, 3, 4, 5]})
270
- >>> dataframe_to_tupled_list(df)
271
- '[1, 2, 3, 4, 5]'
272
- >>> dataframe_to_tupled_list(df, 3)
273
- '[1, 2, 3, ...]'
274
-
275
- >>> df = pl.DataFrame({"x": [1, 2, 3, 4, 5], "y": [2, 3, 4, 5, 6]})
276
- >>> dataframe_to_tupled_list(df, 3)
277
- '[(1, 2), (2, 3), (3, 4), ...]'
278
- """
279
- elipse = False
280
- if num_max_elements is not None:
281
- if len(df) > num_max_elements:
282
- elipse = True
283
- df = df.head(num_max_elements)
284
-
285
- res = (row for row in df.iter_rows())
286
- if len(df.columns) == 1:
287
- res = (row[0] for row in res)
288
-
289
- res = str(list(res))
290
- if elipse:
291
- res = res[:-1] + ", ...]"
292
- return res
293
-
294
-
295
261
  @dataclass
296
262
  class FuncArgs:
297
- args: List
298
- kwargs: Dict = field(default_factory=dict)
263
+ args: list
264
+ kwargs: dict = field(default_factory=dict)
299
265
 
300
266
 
301
267
  class Container:
302
- """
303
- A placeholder object that makes it easy to set and get attributes. Used in Model.attr and Model.params, for example.
268
+ """A placeholder object that makes it easy to set and get attributes. Used in Model.attr and Model.params, for example.
304
269
 
305
270
  Examples:
306
271
  >>> x = {}
307
- >>> params = Container(setter=lambda n, v: x.__setitem__(n, v), getter=lambda n: x[n])
272
+ >>> params = Container(
273
+ ... setter=lambda n, v: x.__setitem__(n, v), getter=lambda n: x[n]
274
+ ... )
308
275
  >>> params.a = 1
309
276
  >>> params.b = 2
310
277
  >>> params.a
@@ -329,22 +296,21 @@ class Container:
329
296
 
330
297
 
331
298
  class NamedVariableMapper:
332
- """
333
- Maps variables to a string representation using the object's name and dimensions.
299
+ """Maps variables to a string representation using the object's name and dimensions.
334
300
 
335
301
  Examples:
336
302
  >>> import polars as pl
337
303
  >>> m = pf.Model()
338
304
  >>> m.foo = pf.Variable(pl.DataFrame({"t": range(4)}))
339
- >>> pf.sum(m.foo)
340
- <Expression size=1 dimensions={} terms=4>
341
- foo[0] + foo[1] + foo[2] + foo[3]
305
+ >>> m.foo.sum()
306
+ <Expression terms=4 type=linear>
307
+ foo[0] + foo[1] + foo[2] + foo[3]
342
308
  """
343
309
 
344
310
  CONST_TERM_NAME = "_ONE"
345
311
  NAME_COL = "__name"
346
312
 
347
- def __init__(self, cls: Type["ModelElementWithId"]) -> None:
313
+ def __init__(self, cls: type[ModelElementWithId]) -> None:
348
314
  self._ID_COL = VAR_KEY
349
315
  self.mapping_registry = pl.DataFrame(
350
316
  {self._ID_COL: [], self.NAME_COL: []},
@@ -357,7 +323,7 @@ class NamedVariableMapper:
357
323
  )
358
324
  )
359
325
 
360
- def add(self, element: "Variable") -> None:
326
+ def add(self, element: Variable) -> None:
361
327
  self._extend_registry(self._element_to_map(element))
362
328
 
363
329
  def _extend_registry(self, df: pl.DataFrame) -> None:
@@ -375,16 +341,17 @@ class NamedVariableMapper:
375
341
  validate="m:1",
376
342
  left_on=id_col,
377
343
  right_on=self._ID_COL,
344
+ maintain_order="left" if Config.maintain_order else None,
378
345
  ).rename({self.NAME_COL: to_col})
379
346
 
380
- def _element_to_map(self, element) -> pl.DataFrame:
347
+ def _element_to_map(self, element: Variable) -> pl.DataFrame:
381
348
  element_name = element.name # type: ignore
382
349
  assert element_name is not None, (
383
350
  "Element must have a name to be used in a named mapping."
384
351
  )
385
352
  element._assert_has_ids()
386
353
  return concat_dimensions(
387
- element.data.select(element.dimensions_unsafe + [VAR_KEY]),
354
+ element.data.select(element._dimensions_unsafe + [VAR_KEY]),
388
355
  keep_dims=False,
389
356
  prefix=element_name,
390
357
  to_col=self.NAME_COL,
@@ -392,19 +359,32 @@ class NamedVariableMapper:
392
359
 
393
360
 
394
361
  def for_solvers(*solvers: str):
395
- """
396
- Decorator that limits the function to only be called when the solver is in the `only` list.
397
- """
362
+ """Limits the decorated function to only be available when the solver is in the `solvers` list."""
398
363
 
399
364
  def decorator(func):
400
365
  @wraps(func)
401
366
  def wrapper(self, *args, **kwargs):
402
- if self.solver_name not in solvers:
367
+ if self.solver.name not in solvers:
403
368
  raise NotImplementedError(
404
- f"Method '{func.__name__}' is not implemented for solver '{self.solver_name}'."
369
+ f"Method '{func.__name__}' is not implemented for solver '{self.solver}'."
405
370
  )
406
371
  return func(self, *args, **kwargs)
407
372
 
408
373
  return wrapper
409
374
 
410
375
  return decorator
376
+
377
+
378
+ # TODO: rename and change to return_expr once Set is split away from SupportsMath
379
+ def return_new(func: Callable[..., pl.DataFrame]) -> Callable[..., SupportsMath]:
380
+ """Decorator that upcasts the returned DataFrame to an Expression.
381
+
382
+ Requires the first argument (self) to support self._new().
383
+ """
384
+
385
+ @wraps(func)
386
+ def wrapper(self: SupportsMath, *args, **kwargs):
387
+ result = func(self, *args, **kwargs)
388
+ return self._new(result, name=f"{self.name}.{func.__name__}(…)")
389
+
390
+ return wrapper
pyoframe/_version.py CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.2.1'
32
- __version_tuple__ = version_tuple = (0, 2, 1)
31
+ __version__ = version = '1.0.0a0'
32
+ __version_tuple__ = version_tuple = (1, 0, 0, 'a0')
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyoframe
3
- Version: 0.2.1
3
+ Version: 1.0.0a0
4
4
  Summary: Blazing fast linear program interface
5
5
  Author-email: Bravos Power <dev@bravospower.com>
6
6
  License-Expression: MIT
@@ -20,30 +20,37 @@ Requires-Dist: numpy
20
20
  Requires-Dist: pyarrow
21
21
  Requires-Dist: pandas
22
22
  Requires-Dist: pyoptinterface<1,>=0.4.1
23
- Provides-Extra: dev
24
- Requires-Dist: ruff; extra == "dev"
25
- Requires-Dist: polars>=1.30.0; extra == "dev"
26
- Requires-Dist: bumpver; extra == "dev"
27
- Requires-Dist: pip-tools; extra == "dev"
28
- Requires-Dist: pytest; extra == "dev"
29
- Requires-Dist: pytest-cov; extra == "dev"
30
- Requires-Dist: pre-commit; extra == "dev"
31
- Requires-Dist: gurobipy; extra == "dev"
32
- Requires-Dist: highsbox; extra == "dev"
33
- Requires-Dist: coverage; extra == "dev"
34
- Requires-Dist: ipykernel; extra == "dev"
35
- Requires-Dist: pytest-markdown-docs; extra == "dev"
36
- Requires-Dist: mkdocs-material==9.*; extra == "dev"
37
- Requires-Dist: mkdocstrings[python]; extra == "dev"
38
- Requires-Dist: mkdocs-git-revision-date-localized-plugin; extra == "dev"
39
- Requires-Dist: mkdocs-git-committers-plugin-2; extra == "dev"
40
- Requires-Dist: mkdocs-gen-files; extra == "dev"
41
- Requires-Dist: mkdocs-section-index; extra == "dev"
42
- Requires-Dist: mkdocs-literate-nav; extra == "dev"
43
- Requires-Dist: mkdocs-table-reader-plugin; extra == "dev"
44
- Requires-Dist: markdown-hide-code>=0.1.1; extra == "dev"
45
23
  Provides-Extra: highs
46
24
  Requires-Dist: highsbox; extra == "highs"
25
+ Provides-Extra: ipopt
26
+ Requires-Dist: pyoptinterface[nlp]; extra == "ipopt"
27
+ Requires-Dist: llvmlite<=0.44.0; extra == "ipopt"
28
+ Provides-Extra: dev
29
+ Requires-Dist: ruff==0.12.11; extra == "dev"
30
+ Requires-Dist: polars>=1.32.3; extra == "dev"
31
+ Requires-Dist: pytest==8.4.1; extra == "dev"
32
+ Requires-Dist: pytest-cov==6.2.1; extra == "dev"
33
+ Requires-Dist: sybil[pytest]==9.2.0; extra == "dev"
34
+ Requires-Dist: pre-commit==4.3.0; extra == "dev"
35
+ Requires-Dist: gurobipy==12.0.3; extra == "dev"
36
+ Requires-Dist: coverage==7.10.6; extra == "dev"
37
+ Requires-Dist: ipykernel==6.30.1; extra == "dev"
38
+ Requires-Dist: highsbox; extra == "dev"
39
+ Requires-Dist: pyoptinterface[nlp]; extra == "dev"
40
+ Requires-Dist: llvmlite<=0.44.0; extra == "dev"
41
+ Provides-Extra: docs
42
+ Requires-Dist: mkdocs-material~=9.6.18; extra == "docs"
43
+ Requires-Dist: mkdocstrings[python]~=0.30.0; extra == "docs"
44
+ Requires-Dist: mkdocs-git-revision-date-localized-plugin~=1.4.7; extra == "docs"
45
+ Requires-Dist: mkdocs-git-committers-plugin-2~=2.5.0; extra == "docs"
46
+ Requires-Dist: mkdocs-gen-files~=0.5.0; extra == "docs"
47
+ Requires-Dist: mkdocs-section-index~=0.3.10; extra == "docs"
48
+ Requires-Dist: mkdocs-awesome-nav~=3.1.2; extra == "docs"
49
+ Requires-Dist: doccmd==2025.4.8; extra == "docs"
50
+ Requires-Dist: mkdocs-table-reader-plugin~=3.1.0; extra == "docs"
51
+ Requires-Dist: markdown-katex==202406.1035; extra == "docs"
52
+ Requires-Dist: mike==2.1.3; extra == "docs"
53
+ Requires-Dist: ruff==0.12.11; extra == "docs"
47
54
  Dynamic: license-file
48
55
 
49
56
  # Pyoframe: Fast and low-memory linear programming models
@@ -56,11 +63,11 @@ Dynamic: license-file
56
63
  [![Open Bugs](https://img.shields.io/github/issues-search/Bravos-Power/pyoframe?query=label%3Abug%20is%3Aopen&label=Open%20Bugs)](https://github.com/Bravos-Power/pyoframe/issues?q=is%3Aopen+is%3Aissue+label%3Abug)
57
64
 
58
65
 
59
- A library to rapidly and memory-efficiently formulate large and sparse optimization models using Pandas or Polars dataframes.
66
+ A library to rapidly and memory-efficiently formulate large and sparse optimization models using Pandas or Polars DataFrames.
60
67
 
61
68
  ## **[Documentation](https://bravos-power.github.io/pyoframe/)**
62
69
 
63
- [Read the documentation](https://bravos-power.github.io/pyoframe/) to get started or to learn how to [contribute](https://bravos-power.github.io/pyoframe/contribute/).
70
+ [Read the documentation](https://bravos-power.github.io/pyoframe/) to get started or to learn how to [contribute](https://bravos-power.github.io/pyoframe/contribute/index.md).
64
71
 
65
72
 
66
73
  ## Acknowledgments
@@ -0,0 +1,15 @@
1
+ pyoframe/__init__.py,sha256=Nlql3FYed7bXWumvUeMd3rjnoL4l8XC5orO4uxWrDAc,839
2
+ pyoframe/_arithmetic.py,sha256=3_LkyDbKZ74KxY_KF_99PENj6FgZQa_hVl5RkPyFNJU,20420
3
+ pyoframe/_constants.py,sha256=afwAHreaAKLIrpIEAHtuCn7q9aZuFDEgfLn0ySGZEeY,16066
4
+ pyoframe/_core.py,sha256=xCPU0Uw8g18cxBQbA644RY0fjDan2lJ2pLw6c0ODL6s,113495
5
+ pyoframe/_model.py,sha256=h2sx-JpkSv6o2eS6qcdbQCfQi9Me-zv4IGXumPziBT4,21689
6
+ pyoframe/_model_element.py,sha256=8dvPlRc3hVnlvd-8bxze_ol1fWRd3QiN8UBS25OgwZ4,5771
7
+ pyoframe/_monkey_patch.py,sha256=j206jGoP4Q2eSQrmAkI047gSxftb80Tva0UEc32cDKY,3309
8
+ pyoframe/_objective.py,sha256=Sadl6rhweAKSf2XpRiRCCyAPUVKszjF9s7GEd8g74zg,4375
9
+ pyoframe/_utils.py,sha256=48YTdB1Tlfu-A-xDWb06zA-pXoVtrZGOVMAWw0ClOWM,12554
10
+ pyoframe/_version.py,sha256=KQnBwkHr_bCl4qNrncHrejtChdGSaDU6A5ii5fv_e0U,712
11
+ pyoframe-1.0.0a0.dist-info/licenses/LICENSE,sha256=u_Spw4ynlwTMRZeCX-uacv_hBU547pBygiA6d2ONNV4,1074
12
+ pyoframe-1.0.0a0.dist-info/METADATA,sha256=SSJg9dGl5xKSaUXnZESRYx8W_P1wGF07rhLSQXLnz5k,4039
13
+ pyoframe-1.0.0a0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
14
+ pyoframe-1.0.0a0.dist-info/top_level.txt,sha256=10z3OOJSVLriQ0IrFLMH8CH9zByugPWolqhlHlkNjV4,9
15
+ pyoframe-1.0.0a0.dist-info/RECORD,,
pyoframe/constants.py DELETED
@@ -1,140 +0,0 @@
1
- """
2
- File containing shared constants used across the package.
3
- """
4
-
5
- import typing
6
- from enum import Enum
7
- from typing import Literal, Optional
8
-
9
- import polars as pl
10
- import pyoptinterface as poi
11
-
12
- COEF_KEY = "__coeff"
13
- VAR_KEY = "__variable_id"
14
- QUAD_VAR_KEY = "__quadratic_variable_id"
15
- CONSTRAINT_KEY = "__constraint_id"
16
- SOLUTION_KEY = "solution"
17
- DUAL_KEY = "dual"
18
- SUPPORTED_SOLVERS = ["gurobi", "highs"]
19
- SUPPORTED_SOLVER_TYPES = Literal["gurobi", "highs"]
20
- KEY_TYPE = pl.UInt32
21
-
22
- # Variable ID for constant terms. This variable ID is reserved.
23
- CONST_TERM = 0
24
-
25
- RESERVED_COL_KEYS = (
26
- COEF_KEY,
27
- VAR_KEY,
28
- QUAD_VAR_KEY,
29
- CONSTRAINT_KEY,
30
- SOLUTION_KEY,
31
- DUAL_KEY,
32
- )
33
-
34
-
35
- class _ConfigMeta(type):
36
- """Metaclass for Config that stores the default values of all configuration options."""
37
-
38
- def __init__(cls, name, bases, dct):
39
- super().__init__(name, bases, dct)
40
- cls._defaults = {
41
- k: v
42
- for k, v in dct.items()
43
- if not k.startswith("_") and type(v) != classmethod # noqa: E721 (didn't want to mess with it since it works)
44
- }
45
-
46
-
47
- class Config(metaclass=_ConfigMeta):
48
- """
49
- Configuration options that apply to the entire library.
50
- """
51
-
52
- default_solver: Optional[SUPPORTED_SOLVER_TYPES] = None
53
- disable_unmatched_checks: bool = False
54
- float_to_str_precision: Optional[int] = 5
55
- print_uses_variable_names: bool = True
56
- print_max_line_length: int = 80
57
- print_max_lines: int = 15
58
- print_max_set_elements: int = 50
59
- "Number of elements to show when printing a set to the console (additional elements are replaced with ...)"
60
-
61
- enable_is_duplicated_expression_safety_check: bool = False
62
-
63
- integer_tolerance: float = 1e-8
64
- """
65
- For convenience, Pyoframe returns the solution of integer and binary variables as integers not floating point values.
66
- To do so, Pyoframe must convert the solver-provided floating point values to integers. To avoid unexpected rounding errors,
67
- Pyoframe uses this tolerance to check that the floating point result is an integer as expected. Overly tight tolerances can trigger
68
- unexpected errors. Setting the tolerance to zero disables the check.
69
- """
70
-
71
- @classmethod
72
- def reset_defaults(cls):
73
- """
74
- Resets all configuration options to their default values.
75
- """
76
- for key, value in cls._defaults.items():
77
- setattr(cls, key, value)
78
-
79
-
80
- class ConstraintSense(Enum):
81
- LE = "<="
82
- GE = ">="
83
- EQ = "="
84
-
85
- def to_poi(self):
86
- if self == ConstraintSense.LE:
87
- return poi.ConstraintSense.LessEqual
88
- elif self == ConstraintSense.EQ:
89
- return poi.ConstraintSense.Equal
90
- elif self == ConstraintSense.GE:
91
- return poi.ConstraintSense.GreaterEqual
92
- else:
93
- raise ValueError(f"Invalid constraint type: {self}") # pragma: no cover
94
-
95
-
96
- class ObjSense(Enum):
97
- MIN = "min"
98
- MAX = "max"
99
-
100
- def to_poi(self):
101
- if self == ObjSense.MIN:
102
- return poi.ObjectiveSense.Minimize
103
- elif self == ObjSense.MAX:
104
- return poi.ObjectiveSense.Maximize
105
- else:
106
- raise ValueError(f"Invalid objective sense: {self}") # pragma: no cover
107
-
108
-
109
- class VType(Enum):
110
- CONTINUOUS = "continuous"
111
- BINARY = "binary"
112
- INTEGER = "integer"
113
-
114
- def to_poi(self):
115
- if self == VType.CONTINUOUS:
116
- return poi.VariableDomain.Continuous
117
- elif self == VType.BINARY:
118
- return poi.VariableDomain.Binary
119
- elif self == VType.INTEGER:
120
- return poi.VariableDomain.Integer
121
- else:
122
- raise ValueError(f"Invalid variable type: {self}") # pragma: no cover
123
-
124
-
125
- class UnmatchedStrategy(Enum):
126
- UNSET = "not_set"
127
- DROP = "drop"
128
- KEEP = "keep"
129
-
130
-
131
- # This is a hack to get the Literal type for VType
132
- # See: https://stackoverflow.com/questions/67292470/type-hinting-enum-member-value-in-python
133
- ObjSenseValue = Literal["min", "max"]
134
- VTypeValue = Literal["continuous", "binary", "integer"]
135
- for enum, type in [(ObjSense, ObjSenseValue), (VType, VTypeValue)]:
136
- assert set(typing.get_args(type)) == {vtype.value for vtype in enum}
137
-
138
-
139
- class PyoframeError(Exception):
140
- pass