or-tools 0.13.1 → 0.14.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +10 -0
- data/README.md +34 -7
- data/ext/or-tools/constraint.cpp +30 -50
- data/ext/or-tools/ext.cpp +2 -0
- data/ext/or-tools/extconf.rb +8 -2
- data/ext/or-tools/linear.cpp +100 -12
- data/ext/or-tools/math_opt.cpp +180 -0
- data/ext/or-tools/routing.cpp +13 -27
- data/lib/or-tools.rb +29 -18
- data/lib/or_tools/comparison.rb +7 -10
- data/lib/or_tools/constant.rb +16 -13
- data/lib/or_tools/cp_model.rb +8 -8
- data/lib/or_tools/cp_solver_solution_callback.rb +3 -3
- data/lib/or_tools/expression.rb +85 -0
- data/lib/or_tools/math_opt/model.rb +54 -0
- data/lib/or_tools/math_opt/variable.rb +15 -0
- data/lib/or_tools/product.rb +32 -0
- data/lib/or_tools/solver.rb +33 -15
- data/lib/or_tools/utils.rb +107 -0
- data/lib/or_tools/variable.rb +29 -0
- data/lib/or_tools/version.rb +1 -1
- metadata +12 -15
- data/lib/or_tools/bool_var.rb +0 -9
- data/lib/or_tools/comparison_operators.rb +0 -9
- data/lib/or_tools/int_var.rb +0 -5
- data/lib/or_tools/linear_constraint.rb +0 -50
- data/lib/or_tools/linear_expr.rb +0 -85
- data/lib/or_tools/mp_variable.rb +0 -11
- data/lib/or_tools/product_cst.rb +0 -35
- data/lib/or_tools/sat_int_var.rb +0 -29
- data/lib/or_tools/sat_linear_expr.rb +0 -59
- data/lib/or_tools/sum_array.rb +0 -23
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 5803be6e2ff6de980c6f1a89ab9ccda1e2ee264139229bca865103c8379dc4a2
|
4
|
+
data.tar.gz: 323fda85d36cc8dadb6720a23ac3e9eaa881e3618e74044c8b04773bb979c16e
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 48c9eba03b16bb2ce63edf62d52f33fc88595e691543f68dd1a2090adb7212958331534a97b801c7f54acce6626461bc0e268ac312ea34c1c50045dedcfbb3b4
|
7
|
+
data.tar.gz: 8d91150d41201f8e47129ef3b48cfb4b63b5cb91c09c87d7ef01f87de4b82ff6d287fb2ca5e3b5a4984152bdb2e5a33f4c5b8fe9495d6a9e89ab81b31c1674ac
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,13 @@
|
|
1
|
+
## 0.14.1 (2024-12-04)
|
2
|
+
|
3
|
+
- Added support for parameters to `Solver`
|
4
|
+
- Fixed error with `inspect` for `MathOpt` variables
|
5
|
+
|
6
|
+
## 0.14.0 (2024-10-22)
|
7
|
+
|
8
|
+
- Added experimental support for `MathOpt`
|
9
|
+
- Unified model building across solvers
|
10
|
+
|
1
11
|
## 0.13.1 (2024-10-05)
|
2
12
|
|
3
13
|
- Added binary installation for Debian 12
|
data/README.md
CHANGED
@@ -23,6 +23,10 @@ Higher Level Interfaces
|
|
23
23
|
- [Traveling Salesperson Problem (TSP)](#traveling-salesperson-problem-tsp)
|
24
24
|
- [Sudoku](#sudoku)
|
25
25
|
|
26
|
+
MathOpt
|
27
|
+
|
28
|
+
- [Basic example](#basic-example)
|
29
|
+
|
26
30
|
Linear Optimization
|
27
31
|
|
28
32
|
- [Solving an LP Problem](#solving-an-lp-problem)
|
@@ -89,15 +93,15 @@ Specify people and their availabililty
|
|
89
93
|
people = [
|
90
94
|
{
|
91
95
|
availability: [
|
92
|
-
{starts_at: Time.parse("
|
93
|
-
{starts_at: Time.parse("
|
96
|
+
{starts_at: Time.parse("2025-01-01 08:00:00"), ends_at: Time.parse("2025-01-01 16:00:00")},
|
97
|
+
{starts_at: Time.parse("2025-01-02 08:00:00"), ends_at: Time.parse("2025-01-02 16:00:00")}
|
94
98
|
],
|
95
99
|
max_hours: 40 # optional, applies to entire scheduling period
|
96
100
|
},
|
97
101
|
{
|
98
102
|
availability: [
|
99
|
-
{starts_at: Time.parse("
|
100
|
-
{starts_at: Time.parse("
|
103
|
+
{starts_at: Time.parse("2025-01-01 08:00:00"), ends_at: Time.parse("2025-01-01 16:00:00")},
|
104
|
+
{starts_at: Time.parse("2025-01-03 08:00:00"), ends_at: Time.parse("2025-01-03 16:00:00")}
|
101
105
|
],
|
102
106
|
max_hours: 20
|
103
107
|
}
|
@@ -108,9 +112,9 @@ Specify shifts
|
|
108
112
|
|
109
113
|
```ruby
|
110
114
|
shifts = [
|
111
|
-
{starts_at: Time.parse("
|
112
|
-
{starts_at: Time.parse("
|
113
|
-
{starts_at: Time.parse("
|
115
|
+
{starts_at: Time.parse("2025-01-01 08:00:00"), ends_at: Time.parse("2025-01-01 16:00:00")},
|
116
|
+
{starts_at: Time.parse("2025-01-02 08:00:00"), ends_at: Time.parse("2025-01-02 16:00:00")},
|
117
|
+
{starts_at: Time.parse("2025-01-03 08:00:00"), ends_at: Time.parse("2025-01-03 16:00:00")}
|
114
118
|
]
|
115
119
|
```
|
116
120
|
|
@@ -311,6 +315,29 @@ sudoku = ORTools::Sudoku.new(grid, x: true, anti_knight: true, magic_square: tru
|
|
311
315
|
sudoku.solution
|
312
316
|
```
|
313
317
|
|
318
|
+
## MathOpt
|
319
|
+
|
320
|
+
### Basic Example
|
321
|
+
|
322
|
+
[Guide](https://developers.google.com/optimization/math_opt/basic_example)
|
323
|
+
|
324
|
+
```ruby
|
325
|
+
# build the model
|
326
|
+
model = ORTools::MathOpt::Model.new("getting_started_lp")
|
327
|
+
x = model.add_variable(-1.0, 1.5, "x")
|
328
|
+
y = model.add_variable(0.0, 1.0, "y")
|
329
|
+
model.add_linear_constraint(x + y <= 1.5)
|
330
|
+
model.maximize(x + 2 * y)
|
331
|
+
|
332
|
+
# solve
|
333
|
+
result = model.solve
|
334
|
+
|
335
|
+
# inspect the solution
|
336
|
+
puts "Objective value: #{result.objective_value}"
|
337
|
+
puts "x: #{result.variable_values[x]}"
|
338
|
+
puts "y: #{result.variable_values[y]}"
|
339
|
+
```
|
340
|
+
|
314
341
|
## Linear Optimization
|
315
342
|
|
316
343
|
### Solving an LP Problem
|
data/ext/or-tools/constraint.cpp
CHANGED
@@ -44,63 +44,33 @@ namespace Rice::detail
|
|
44
44
|
public:
|
45
45
|
LinearExpr convert(VALUE v)
|
46
46
|
{
|
47
|
-
Object x(v);
|
48
47
|
LinearExpr expr;
|
49
48
|
|
50
|
-
|
51
|
-
|
52
|
-
} else if (x.respond_to("vars")) {
|
53
|
-
Array vars = x.call("vars");
|
54
|
-
for (const auto& v : vars) {
|
55
|
-
// TODO clean up
|
56
|
-
auto cvar = (Array) v;
|
57
|
-
Object var = cvar[0];
|
58
|
-
auto coeff = From_Ruby<int64_t>().convert(cvar[1].value());
|
49
|
+
Rice::Object utils = Rice::define_module("ORTools").const_get("Utils");
|
50
|
+
Rice::Hash coeffs = utils.call("index_expression", Object(v));
|
59
51
|
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
} else {
|
69
|
-
if (x.is_a(rb_cBoolVar)) {
|
70
|
-
expr = From_Ruby<BoolVar>().convert(x.value());
|
52
|
+
for (const auto& entry : coeffs) {
|
53
|
+
Object var = entry.key;
|
54
|
+
auto coeff = From_Ruby<int64_t>().convert(entry.value.value());
|
55
|
+
|
56
|
+
if (var.is_nil()) {
|
57
|
+
expr += coeff;
|
58
|
+
} else if (var.is_a(rb_cBoolVar)) {
|
59
|
+
expr += From_Ruby<BoolVar>().convert(var.value()) * coeff;
|
71
60
|
} else {
|
72
|
-
expr
|
61
|
+
expr += From_Ruby<IntVar>().convert(var.value()) * coeff;
|
73
62
|
}
|
74
63
|
}
|
75
64
|
|
76
65
|
return expr;
|
77
66
|
}
|
78
67
|
};
|
79
|
-
|
80
|
-
template<>
|
81
|
-
class From_Ruby<std::vector<BoolVar>>
|
82
|
-
{
|
83
|
-
public:
|
84
|
-
std::vector<BoolVar> convert(VALUE v)
|
85
|
-
{
|
86
|
-
auto a = Array(v);
|
87
|
-
std::vector<BoolVar> vec;
|
88
|
-
vec.reserve(a.size());
|
89
|
-
for (const Object v : a) {
|
90
|
-
if (v.is_a(rb_cSatIntVar)) {
|
91
|
-
vec.push_back(From_Ruby<IntVar>().convert(v.value()).ToBoolVar());
|
92
|
-
} else {
|
93
|
-
vec.push_back(From_Ruby<BoolVar>().convert(v.value()));
|
94
|
-
}
|
95
|
-
}
|
96
|
-
return vec;
|
97
|
-
}
|
98
|
-
};
|
99
68
|
}
|
100
69
|
|
101
70
|
void init_constraint(Rice::Module& m) {
|
102
71
|
Rice::define_class_under<Domain>(m, "Domain")
|
103
72
|
.define_constructor(Rice::Constructor<Domain, int64_t, int64_t>())
|
73
|
+
.define_singleton_function("from_values", &Domain::FromValues)
|
104
74
|
.define_method("min", &Domain::Min)
|
105
75
|
.define_method("max", &Domain::Max);
|
106
76
|
|
@@ -119,13 +89,23 @@ void init_constraint(Rice::Module& m) {
|
|
119
89
|
return self.OnlyEnforceIf(Rice::detail::From_Ruby<IntVar>().convert(literal).ToBoolVar());
|
120
90
|
} else if (literal.is_a(rb_cArray)) {
|
121
91
|
// TODO support IntVarSpan
|
122
|
-
|
92
|
+
auto a = Array(literal);
|
93
|
+
std::vector<BoolVar> vec;
|
94
|
+
vec.reserve(a.size());
|
95
|
+
for (const Object v : a) {
|
96
|
+
if (v.is_a(rb_cSatIntVar)) {
|
97
|
+
vec.push_back(Rice::detail::From_Ruby<IntVar>().convert(v.value()).ToBoolVar());
|
98
|
+
} else {
|
99
|
+
vec.push_back(Rice::detail::From_Ruby<BoolVar>().convert(v.value()));
|
100
|
+
}
|
101
|
+
}
|
102
|
+
return self.OnlyEnforceIf(vec);
|
123
103
|
} else {
|
124
104
|
return self.OnlyEnforceIf(Rice::detail::From_Ruby<BoolVar>().convert(literal));
|
125
105
|
}
|
126
106
|
});
|
127
107
|
|
128
|
-
rb_cBoolVar = Rice::define_class_under<BoolVar>(m, "
|
108
|
+
rb_cBoolVar = Rice::define_class_under<BoolVar>(m, "SatBoolVar")
|
129
109
|
.define_method("name", &BoolVar::Name)
|
130
110
|
.define_method("index", &BoolVar::index)
|
131
111
|
.define_method("not", &BoolVar::Not)
|
@@ -425,17 +405,17 @@ void init_constraint(Rice::Module& m) {
|
|
425
405
|
Model m;
|
426
406
|
|
427
407
|
if (!callback.is_nil()) {
|
428
|
-
//
|
408
|
+
// use a single worker since Ruby code cannot be run in a non-Ruby thread
|
429
409
|
parameters.set_num_search_workers(1);
|
430
410
|
|
431
411
|
m.Add(NewFeasibleSolutionObserver(
|
432
412
|
[&callback](const CpSolverResponse& r) {
|
433
|
-
|
434
|
-
|
435
|
-
// TODO find a better way to do this
|
436
|
-
callback.call("response=", r);
|
437
|
-
callback.call("on_solution_callback");
|
413
|
+
if (!ruby_native_thread_p()) {
|
414
|
+
throw std::runtime_error("Non-Ruby thread");
|
438
415
|
}
|
416
|
+
|
417
|
+
callback.call("response=", r);
|
418
|
+
callback.call("on_solution_callback");
|
439
419
|
})
|
440
420
|
);
|
441
421
|
}
|
data/ext/or-tools/ext.cpp
CHANGED
@@ -10,6 +10,7 @@ void init_assignment(Rice::Module& m);
|
|
10
10
|
void init_bin_packing(Rice::Module& m);
|
11
11
|
void init_constraint(Rice::Module& m);
|
12
12
|
void init_linear(Rice::Module& m);
|
13
|
+
void init_math_opt(Rice::Module& m);
|
13
14
|
void init_network_flows(Rice::Module& m);
|
14
15
|
void init_routing(Rice::Module& m);
|
15
16
|
|
@@ -24,6 +25,7 @@ void Init_ext()
|
|
24
25
|
init_bin_packing(m);
|
25
26
|
init_constraint(m);
|
26
27
|
init_linear(m);
|
28
|
+
init_math_opt(m);
|
27
29
|
init_network_flows(m);
|
28
30
|
init_routing(m);
|
29
31
|
|
data/ext/or-tools/extconf.rb
CHANGED
@@ -2,8 +2,14 @@ require "mkmf-rice"
|
|
2
2
|
|
3
3
|
$CXXFLAGS << " -std=c++17 $(optflags) -DUSE_CBC"
|
4
4
|
|
5
|
-
#
|
6
|
-
$CXXFLAGS << " -
|
5
|
+
# show warnings
|
6
|
+
$CXXFLAGS << " -Wall -Wextra"
|
7
|
+
|
8
|
+
# hide or-tools warnings
|
9
|
+
$CXXFLAGS << " -Wno-sign-compare -Wno-ignored-qualifiers -Wno-unused-parameter -Wno-missing-field-initializers"
|
10
|
+
|
11
|
+
# hide Rice warnings
|
12
|
+
$CXXFLAGS << " -Wno-implicit-fallthrough"
|
7
13
|
|
8
14
|
inc, lib = dir_config("or-tools")
|
9
15
|
if inc || lib
|
data/ext/or-tools/linear.cpp
CHANGED
@@ -2,10 +2,9 @@
|
|
2
2
|
|
3
3
|
#include "ext.h"
|
4
4
|
|
5
|
-
using operations_research::LinearExpr;
|
6
|
-
using operations_research::LinearRange;
|
7
5
|
using operations_research::MPConstraint;
|
8
6
|
using operations_research::MPObjective;
|
7
|
+
using operations_research::MPSolverParameters;
|
9
8
|
using operations_research::MPSolver;
|
10
9
|
using operations_research::MPVariable;
|
11
10
|
|
@@ -45,12 +44,7 @@ namespace Rice::detail
|
|
45
44
|
}
|
46
45
|
|
47
46
|
void init_linear(Rice::Module& m) {
|
48
|
-
Rice::define_class_under<
|
49
|
-
|
50
|
-
auto rb_cLinearExpr = Rice::define_class_under<LinearExpr>(m, "LinearExpr");
|
51
|
-
rb_cLinearExpr.define_constructor(Rice::Constructor<LinearExpr>());
|
52
|
-
|
53
|
-
Rice::define_class_under<MPVariable, LinearExpr>(m, "MPVariable")
|
47
|
+
Rice::define_class_under<MPVariable>(m, "MPVariable")
|
54
48
|
.define_method("name", &MPVariable::name)
|
55
49
|
.define_method("solution_value", &MPVariable::solution_value);
|
56
50
|
|
@@ -63,10 +57,104 @@ void init_linear(Rice::Module& m) {
|
|
63
57
|
.define_method("set_coefficient", &MPObjective::SetCoefficient)
|
64
58
|
.define_method("set_offset", &MPObjective::SetOffset)
|
65
59
|
.define_method("set_maximization", &MPObjective::SetMaximization)
|
60
|
+
.define_method("best_bound", &MPObjective::BestBound)
|
66
61
|
.define_method("set_minimization", &MPObjective::SetMinimization);
|
67
62
|
|
63
|
+
Rice::define_class_under<MPSolverParameters>(m, "MPSolverParameters")
|
64
|
+
.define_constructor(Rice::Constructor<MPSolverParameters>())
|
65
|
+
.define_method("reset", &MPSolverParameters::Reset)
|
66
|
+
.define_method(
|
67
|
+
"relative_mip_gap=",
|
68
|
+
[](MPSolverParameters& self, double relative_mip_gap) {
|
69
|
+
self.SetDoubleParam(MPSolverParameters::DoubleParam::RELATIVE_MIP_GAP, relative_mip_gap);
|
70
|
+
})
|
71
|
+
.define_method(
|
72
|
+
"relative_mip_gap",
|
73
|
+
[](MPSolverParameters& self) {
|
74
|
+
return self.GetDoubleParam(MPSolverParameters::DoubleParam::RELATIVE_MIP_GAP);
|
75
|
+
})
|
76
|
+
.define_method(
|
77
|
+
"primal_tolerance=",
|
78
|
+
[](MPSolverParameters& self, double primal_tolerance) {
|
79
|
+
self.SetDoubleParam(MPSolverParameters::DoubleParam::PRIMAL_TOLERANCE, primal_tolerance);
|
80
|
+
})
|
81
|
+
.define_method(
|
82
|
+
"primal_tolerance",
|
83
|
+
[](MPSolverParameters& self) {
|
84
|
+
return self.GetDoubleParam(MPSolverParameters::DoubleParam::PRIMAL_TOLERANCE);
|
85
|
+
})
|
86
|
+
.define_method(
|
87
|
+
"dual_tolerance=",
|
88
|
+
[](MPSolverParameters& self, double dual_tolerance) {
|
89
|
+
self.SetDoubleParam(MPSolverParameters::DoubleParam::DUAL_TOLERANCE, dual_tolerance);
|
90
|
+
})
|
91
|
+
.define_method(
|
92
|
+
"dual_tolerance",
|
93
|
+
[](MPSolverParameters& self) {
|
94
|
+
return self.GetDoubleParam(MPSolverParameters::DoubleParam::DUAL_TOLERANCE);
|
95
|
+
})
|
96
|
+
.define_method(
|
97
|
+
"presolve=",
|
98
|
+
[](MPSolverParameters& self, bool value) {
|
99
|
+
int presolve;
|
100
|
+
if (value) {
|
101
|
+
presolve = MPSolverParameters::PresolveValues::PRESOLVE_ON;
|
102
|
+
} else {
|
103
|
+
presolve = MPSolverParameters::PresolveValues::PRESOLVE_OFF;
|
104
|
+
}
|
105
|
+
self.SetIntegerParam(MPSolverParameters::IntegerParam::PRESOLVE, presolve);
|
106
|
+
})
|
107
|
+
.define_method(
|
108
|
+
"presolve",
|
109
|
+
[](MPSolverParameters& self) {
|
110
|
+
int presolve = self.GetIntegerParam(MPSolverParameters::IntegerParam::PRESOLVE);
|
111
|
+
return presolve != MPSolverParameters::PresolveValues::PRESOLVE_OFF;
|
112
|
+
})
|
113
|
+
.define_method(
|
114
|
+
"incrementality=",
|
115
|
+
[](MPSolverParameters& self, bool value) {
|
116
|
+
int incrementality;
|
117
|
+
if (value) {
|
118
|
+
incrementality = MPSolverParameters::IncrementalityValues::INCREMENTALITY_ON;
|
119
|
+
} else {
|
120
|
+
incrementality = MPSolverParameters::IncrementalityValues::INCREMENTALITY_OFF;
|
121
|
+
}
|
122
|
+
self.SetIntegerParam(MPSolverParameters::IntegerParam::INCREMENTALITY, incrementality);
|
123
|
+
})
|
124
|
+
.define_method(
|
125
|
+
"incrementality",
|
126
|
+
[](MPSolverParameters& self) {
|
127
|
+
int incrementality = self.GetIntegerParam(MPSolverParameters::IntegerParam::INCREMENTALITY);
|
128
|
+
return incrementality != MPSolverParameters::IncrementalityValues::INCREMENTALITY_OFF;
|
129
|
+
})
|
130
|
+
.define_method(
|
131
|
+
"scaling=",
|
132
|
+
[](MPSolverParameters& self, bool value) {
|
133
|
+
int scaling;
|
134
|
+
if (value) {
|
135
|
+
scaling = MPSolverParameters::ScalingValues::SCALING_ON;
|
136
|
+
} else {
|
137
|
+
scaling = MPSolverParameters::ScalingValues::SCALING_OFF;
|
138
|
+
}
|
139
|
+
self.SetIntegerParam(MPSolverParameters::IntegerParam::SCALING, scaling);
|
140
|
+
})
|
141
|
+
.define_method(
|
142
|
+
"scaling",
|
143
|
+
[](MPSolverParameters& self) {
|
144
|
+
int scaling = self.GetIntegerParam(MPSolverParameters::IntegerParam::SCALING);
|
145
|
+
return scaling != MPSolverParameters::ScalingValues::SCALING_OFF;
|
146
|
+
});
|
147
|
+
|
68
148
|
Rice::define_class_under<MPSolver>(m, "Solver")
|
69
|
-
.
|
149
|
+
.define_singleton_function(
|
150
|
+
"_new",
|
151
|
+
[](const std::string& name, MPSolver::OptimizationProblemType problem_type) {
|
152
|
+
std::unique_ptr<MPSolver> solver(new MPSolver(name, problem_type));
|
153
|
+
if (!solver) {
|
154
|
+
throw std::runtime_error("Unrecognized solver type");
|
155
|
+
}
|
156
|
+
return solver;
|
157
|
+
})
|
70
158
|
.define_singleton_function(
|
71
159
|
"_create",
|
72
160
|
[](const std::string& solver_id) {
|
@@ -108,9 +196,9 @@ void init_linear(Rice::Module& m) {
|
|
108
196
|
return self.MakeRowConstraint(lb, ub);
|
109
197
|
})
|
110
198
|
.define_method(
|
111
|
-
"
|
112
|
-
[](MPSolver& self) {
|
113
|
-
auto status = self.Solve();
|
199
|
+
"_solve",
|
200
|
+
[](MPSolver& self, MPSolverParameters& params) {
|
201
|
+
auto status = self.Solve(params);
|
114
202
|
|
115
203
|
if (status == MPSolver::ResultStatus::OPTIMAL) {
|
116
204
|
return Symbol("optimal");
|
@@ -0,0 +1,180 @@
|
|
1
|
+
#include "absl/log/check.h"
|
2
|
+
#include "absl/status/statusor.h"
|
3
|
+
#include "ortools/base/init_google.h"
|
4
|
+
#include "ortools/math_opt/cpp/math_opt.h"
|
5
|
+
|
6
|
+
#include "ext.h"
|
7
|
+
|
8
|
+
using operations_research::math_opt::LinearConstraint;
|
9
|
+
using operations_research::math_opt::Model;
|
10
|
+
using operations_research::math_opt::Solve;
|
11
|
+
using operations_research::math_opt::SolveArguments;
|
12
|
+
using operations_research::math_opt::SolveResult;
|
13
|
+
using operations_research::math_opt::SolverType;
|
14
|
+
using operations_research::math_opt::Termination;
|
15
|
+
using operations_research::math_opt::TerminationReason;
|
16
|
+
using operations_research::math_opt::Variable;
|
17
|
+
|
18
|
+
namespace Rice::detail
|
19
|
+
{
|
20
|
+
template<>
|
21
|
+
struct Type<SolverType>
|
22
|
+
{
|
23
|
+
static bool verify()
|
24
|
+
{
|
25
|
+
return true;
|
26
|
+
}
|
27
|
+
};
|
28
|
+
|
29
|
+
template<>
|
30
|
+
struct From_Ruby<SolverType>
|
31
|
+
{
|
32
|
+
static SolverType convert(VALUE x)
|
33
|
+
{
|
34
|
+
auto s = Symbol(x).str();
|
35
|
+
if (s == "gscip") {
|
36
|
+
return SolverType::kGscip;
|
37
|
+
} else if (s == "gurobi") {
|
38
|
+
return SolverType::kGurobi;
|
39
|
+
} else if (s == "glop") {
|
40
|
+
return SolverType::kGlop;
|
41
|
+
} else if (s == "cpsat") {
|
42
|
+
return SolverType::kCpSat;
|
43
|
+
} else if (s == "pdlp") {
|
44
|
+
return SolverType::kPdlp;
|
45
|
+
} else if (s == "glpk") {
|
46
|
+
return SolverType::kGlpk;
|
47
|
+
} else if (s == "ecos") {
|
48
|
+
return SolverType::kEcos;
|
49
|
+
} else if (s == "scs") {
|
50
|
+
return SolverType::kScs;
|
51
|
+
} else if (s == "highs") {
|
52
|
+
return SolverType::kHighs;
|
53
|
+
} else if (s == "santorini") {
|
54
|
+
return SolverType::kSantorini;
|
55
|
+
} else {
|
56
|
+
throw std::runtime_error("Unknown solver type: " + s);
|
57
|
+
}
|
58
|
+
}
|
59
|
+
};
|
60
|
+
}
|
61
|
+
|
62
|
+
void init_math_opt(Rice::Module& m) {
|
63
|
+
auto mathopt = Rice::define_module_under(m, "MathOpt");
|
64
|
+
|
65
|
+
Rice::define_class_under<Variable>(mathopt, "Variable")
|
66
|
+
.define_method("id", &Variable::id)
|
67
|
+
.define_method("name", &Variable::name)
|
68
|
+
.define_method(
|
69
|
+
"_eql?",
|
70
|
+
[](Variable& self, Variable &other) {
|
71
|
+
return (bool) (self == other);
|
72
|
+
});
|
73
|
+
|
74
|
+
Rice::define_class_under<LinearConstraint>(mathopt, "LinearConstraint");
|
75
|
+
|
76
|
+
Rice::define_class_under<Termination>(mathopt, "Termination")
|
77
|
+
.define_method(
|
78
|
+
"reason",
|
79
|
+
[](Termination& self) {
|
80
|
+
auto reason = self.reason;
|
81
|
+
|
82
|
+
if (reason == TerminationReason::kOptimal) {
|
83
|
+
return Rice::Symbol("optimal");
|
84
|
+
} else if (reason == TerminationReason::kInfeasible) {
|
85
|
+
return Rice::Symbol("infeasible");
|
86
|
+
} else if (reason == TerminationReason::kUnbounded) {
|
87
|
+
return Rice::Symbol("unbounded");
|
88
|
+
} else if (reason == TerminationReason::kInfeasibleOrUnbounded) {
|
89
|
+
return Rice::Symbol("infeasible_or_unbounded");
|
90
|
+
} else if (reason == TerminationReason::kImprecise) {
|
91
|
+
return Rice::Symbol("imprecise");
|
92
|
+
} else if (reason == TerminationReason::kFeasible) {
|
93
|
+
return Rice::Symbol("feasible");
|
94
|
+
} else if (reason == TerminationReason::kNoSolutionFound) {
|
95
|
+
return Rice::Symbol("no_solution_found");
|
96
|
+
} else if (reason == TerminationReason::kNumericalError) {
|
97
|
+
return Rice::Symbol("numerical_error");
|
98
|
+
} else if (reason == TerminationReason::kOtherError) {
|
99
|
+
return Rice::Symbol("other");
|
100
|
+
} else {
|
101
|
+
throw std::runtime_error("Unknown termination reason");
|
102
|
+
}
|
103
|
+
});
|
104
|
+
|
105
|
+
Rice::define_class_under<SolveResult>(mathopt, "SolveResult")
|
106
|
+
.define_method(
|
107
|
+
"termination",
|
108
|
+
[](SolveResult& self) {
|
109
|
+
return self.termination;
|
110
|
+
})
|
111
|
+
.define_method(
|
112
|
+
"objective_value",
|
113
|
+
[](SolveResult& self) {
|
114
|
+
return self.objective_value();
|
115
|
+
})
|
116
|
+
.define_method(
|
117
|
+
"variable_values",
|
118
|
+
[](SolveResult& self) {
|
119
|
+
Rice::Hash map;
|
120
|
+
for (auto& [k, v] : self.variable_values()) {
|
121
|
+
map[k] = v;
|
122
|
+
}
|
123
|
+
return map;
|
124
|
+
});
|
125
|
+
|
126
|
+
Rice::define_class_under<Model>(mathopt, "Model")
|
127
|
+
.define_constructor(Rice::Constructor<Model, std::string>())
|
128
|
+
.define_method("add_variable", &Model::AddContinuousVariable)
|
129
|
+
.define_method("add_integer_variable", &Model::AddIntegerVariable)
|
130
|
+
.define_method("add_binary_variable", &Model::AddBinaryVariable)
|
131
|
+
.define_method(
|
132
|
+
"_add_linear_constraint",
|
133
|
+
[](Model& self) {
|
134
|
+
return self.AddLinearConstraint();
|
135
|
+
})
|
136
|
+
.define_method(
|
137
|
+
"_set_upper_bound",
|
138
|
+
[](Model& self, LinearConstraint constraint, double upper_bound) {
|
139
|
+
self.set_upper_bound(constraint, upper_bound);
|
140
|
+
})
|
141
|
+
.define_method(
|
142
|
+
"_set_lower_bound",
|
143
|
+
[](Model& self, LinearConstraint constraint, double upper_bound) {
|
144
|
+
self.set_lower_bound(constraint, upper_bound);
|
145
|
+
})
|
146
|
+
.define_method("_set_coefficient", &Model::set_coefficient)
|
147
|
+
.define_method(
|
148
|
+
"_set_objective_coefficient",
|
149
|
+
[](Model& self, Variable variable, double value) {
|
150
|
+
self.set_objective_coefficient(variable, value);
|
151
|
+
})
|
152
|
+
.define_method("_clear_objective", &Model::clear_objective)
|
153
|
+
.define_method(
|
154
|
+
"_set_objective_offset",
|
155
|
+
[](Model& self, double value) {
|
156
|
+
self.set_objective_offset(value);
|
157
|
+
})
|
158
|
+
.define_method(
|
159
|
+
"_set_maximize",
|
160
|
+
[](Model& self) {
|
161
|
+
self.set_maximize();
|
162
|
+
})
|
163
|
+
.define_method(
|
164
|
+
"_set_minimize",
|
165
|
+
[](Model& self) {
|
166
|
+
self.set_minimize();
|
167
|
+
})
|
168
|
+
.define_method(
|
169
|
+
"_solve",
|
170
|
+
[](Model& self, SolverType solver_type) {
|
171
|
+
SolveArguments args;
|
172
|
+
auto result = Solve(self, solver_type, args);
|
173
|
+
|
174
|
+
if (!result.ok()) {
|
175
|
+
throw std::invalid_argument(std::string{result.status().message()});
|
176
|
+
}
|
177
|
+
|
178
|
+
return *result;
|
179
|
+
});
|
180
|
+
}
|