pycsp3-scheduling 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pycsp3_scheduling/__init__.py +220 -0
- pycsp3_scheduling/constraints/__init__.py +87 -0
- pycsp3_scheduling/constraints/_pycsp3.py +701 -0
- pycsp3_scheduling/constraints/cumulative.py +227 -0
- pycsp3_scheduling/constraints/grouping.py +382 -0
- pycsp3_scheduling/constraints/precedence.py +376 -0
- pycsp3_scheduling/constraints/sequence.py +814 -0
- pycsp3_scheduling/expressions/__init__.py +80 -0
- pycsp3_scheduling/expressions/element.py +313 -0
- pycsp3_scheduling/expressions/interval_expr.py +495 -0
- pycsp3_scheduling/expressions/sequence_expr.py +865 -0
- pycsp3_scheduling/functions/__init__.py +111 -0
- pycsp3_scheduling/functions/cumul_functions.py +891 -0
- pycsp3_scheduling/functions/state_functions.py +494 -0
- pycsp3_scheduling/interop.py +356 -0
- pycsp3_scheduling/output/__init__.py +13 -0
- pycsp3_scheduling/solvers/__init__.py +14 -0
- pycsp3_scheduling/solvers/adapters/__init__.py +7 -0
- pycsp3_scheduling/variables/__init__.py +45 -0
- pycsp3_scheduling/variables/interval.py +450 -0
- pycsp3_scheduling/variables/sequence.py +244 -0
- pycsp3_scheduling/visu.py +1315 -0
- pycsp3_scheduling-0.2.1.dist-info/METADATA +234 -0
- pycsp3_scheduling-0.2.1.dist-info/RECORD +26 -0
- pycsp3_scheduling-0.2.1.dist-info/WHEEL +4 -0
- pycsp3_scheduling-0.2.1.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,701 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Helpers for bridging IntervalVar objects to pycsp3 variables.
|
|
3
|
+
|
|
4
|
+
This module handles the translation of high-level IntervalVar scheduling objects
|
|
5
|
+
into low-level pycsp3 integer variables and constraints that can be exported to
|
|
6
|
+
XCSP3 XML format.
|
|
7
|
+
|
|
8
|
+
Key concepts:
|
|
9
|
+
- Each IntervalVar is decomposed into integer variables: start, length, presence
|
|
10
|
+
- Intensity functions (variable work rates) are discretized into table constraints
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from __future__ import annotations
|
|
14
|
+
|
|
15
|
+
import bisect
|
|
16
|
+
from typing import TYPE_CHECKING, Any
|
|
17
|
+
|
|
18
|
+
from pycsp3_scheduling.variables.interval import (
|
|
19
|
+
INTERVAL_MAX,
|
|
20
|
+
INTERVAL_MIN,
|
|
21
|
+
IntervalVar,
|
|
22
|
+
Step,
|
|
23
|
+
get_registered_intervals,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
if TYPE_CHECKING:
|
|
27
|
+
from pycsp3.classes.main.variables import Variable
|
|
28
|
+
|
|
29
|
+
_start_vars: dict[IntervalVar, Any] = {}
|
|
30
|
+
_length_vars: dict[IntervalVar, Any] = {}
|
|
31
|
+
_presence_vars: dict[IntervalVar, Any] = {}
|
|
32
|
+
# Track which intervals have had their intensity constraints posted
|
|
33
|
+
_intensity_constraints_posted: set[IntervalVar] = set()
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
# =============================================================================
|
|
37
|
+
# INTENSITY FUNCTION DISCRETIZATION
|
|
38
|
+
# =============================================================================
|
|
39
|
+
#
|
|
40
|
+
# Intensity functions model variable work rates over time. They relate three
|
|
41
|
+
# quantities for an interval:
|
|
42
|
+
#
|
|
43
|
+
# - size: the amount of "work" to be done (e.g., 10 units of processing)
|
|
44
|
+
# - length: the elapsed time (e.g., 20 time units)
|
|
45
|
+
# - intensity: the work rate at each time point (0 to granularity)
|
|
46
|
+
#
|
|
47
|
+
# The fundamental equation is:
|
|
48
|
+
#
|
|
49
|
+
# size * granularity = sum of intensity(t) for t in [start, start + length)
|
|
50
|
+
#
|
|
51
|
+
# Example: A task with size=10 and granularity=100
|
|
52
|
+
# - At 100% intensity (value=100): length = 10 (10 * 100 = 10 * 100)
|
|
53
|
+
# - At 50% intensity (value=50): length = 20 (10 * 100 = 20 * 50)
|
|
54
|
+
#
|
|
55
|
+
# For XCSP3, we discretize this relationship into a table constraint that
|
|
56
|
+
# maps (start, size) -> length, precomputing valid combinations.
|
|
57
|
+
# =============================================================================
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def _intensity_at_binary(step_positions: list[int], step_values: list[int], t: int) -> int:
|
|
61
|
+
"""
|
|
62
|
+
Evaluate the stepwise intensity function at time t using binary search.
|
|
63
|
+
|
|
64
|
+
Optimized version that uses pre-extracted positions and values arrays
|
|
65
|
+
with binary search for O(log n) lookup instead of O(n) linear scan.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
step_positions: Sorted list of step x-coordinates.
|
|
69
|
+
step_values: Corresponding intensity values at each position.
|
|
70
|
+
t: The time point to evaluate.
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
The intensity value at time t.
|
|
74
|
+
"""
|
|
75
|
+
if not step_positions:
|
|
76
|
+
return 0
|
|
77
|
+
|
|
78
|
+
# Binary search: find rightmost position <= t
|
|
79
|
+
# bisect_right gives insertion point, so index - 1 is the applicable step
|
|
80
|
+
idx = bisect.bisect_right(step_positions, t) - 1
|
|
81
|
+
|
|
82
|
+
if idx < 0:
|
|
83
|
+
# t is before the first step
|
|
84
|
+
return 0
|
|
85
|
+
|
|
86
|
+
return step_values[idx]
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def _intensity_at(steps: list[Step], t: int) -> int:
|
|
90
|
+
"""
|
|
91
|
+
Evaluate the stepwise intensity function at time t.
|
|
92
|
+
|
|
93
|
+
A stepwise function is defined by a list of (x, value) pairs where:
|
|
94
|
+
- The function equals 0 for t < first step's x
|
|
95
|
+
- The function equals the step's value for t >= step's x until the next step
|
|
96
|
+
|
|
97
|
+
Example:
|
|
98
|
+
steps = [(5, 80), (10, 50)]
|
|
99
|
+
- t < 5: returns 0 (default before first step)
|
|
100
|
+
- 5 <= t < 10: returns 80
|
|
101
|
+
- t >= 10: returns 50
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
steps: List of (x, value) pairs defining the stepwise function.
|
|
105
|
+
Must be sorted by x in strictly increasing order.
|
|
106
|
+
t: The time point to evaluate.
|
|
107
|
+
|
|
108
|
+
Returns:
|
|
109
|
+
The intensity value at time t.
|
|
110
|
+
"""
|
|
111
|
+
if not steps:
|
|
112
|
+
return 0
|
|
113
|
+
|
|
114
|
+
# Use binary search for O(log n) lookup
|
|
115
|
+
positions = [x for x, _ in steps]
|
|
116
|
+
idx = bisect.bisect_right(positions, t) - 1
|
|
117
|
+
|
|
118
|
+
if idx < 0:
|
|
119
|
+
return 0
|
|
120
|
+
|
|
121
|
+
return steps[idx][1]
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def _integrate_intensity(steps: list[Step], start: int, end: int) -> int:
|
|
125
|
+
"""
|
|
126
|
+
Compute the integral (sum) of intensity over the interval [start, end).
|
|
127
|
+
|
|
128
|
+
For discrete time, this is the sum of intensity(t) for t in [start, end).
|
|
129
|
+
This represents the total "work" that can be done in this time window.
|
|
130
|
+
|
|
131
|
+
Example:
|
|
132
|
+
steps = [(0, 100), (10, 50)] # 100% until t=10, then 50%
|
|
133
|
+
_integrate_intensity(steps, 0, 20) = 10*100 + 10*50 = 1500
|
|
134
|
+
|
|
135
|
+
The algorithm walks through time, accumulating intensity values.
|
|
136
|
+
For efficiency with large ranges, it processes step boundaries
|
|
137
|
+
rather than iterating over every time point.
|
|
138
|
+
|
|
139
|
+
Args:
|
|
140
|
+
steps: The stepwise intensity function.
|
|
141
|
+
start: Start of the integration interval (inclusive).
|
|
142
|
+
end: End of the integration interval (exclusive).
|
|
143
|
+
|
|
144
|
+
Returns:
|
|
145
|
+
The sum of intensity values over [start, end).
|
|
146
|
+
"""
|
|
147
|
+
if end <= start:
|
|
148
|
+
return 0
|
|
149
|
+
|
|
150
|
+
if not steps:
|
|
151
|
+
return 0
|
|
152
|
+
|
|
153
|
+
# Pre-extract positions and values for efficient binary search
|
|
154
|
+
step_positions = [x for x, _ in steps]
|
|
155
|
+
step_values = [v for _, v in steps]
|
|
156
|
+
|
|
157
|
+
# Use binary search to find relevant change points in range [start, end)
|
|
158
|
+
# Find first step position > start
|
|
159
|
+
left_idx = bisect.bisect_right(step_positions, start)
|
|
160
|
+
# Find first step position >= end
|
|
161
|
+
right_idx = bisect.bisect_left(step_positions, end)
|
|
162
|
+
|
|
163
|
+
# Build change points: start, relevant step positions, end
|
|
164
|
+
change_points = [start]
|
|
165
|
+
change_points.extend(step_positions[left_idx:right_idx])
|
|
166
|
+
change_points.append(end)
|
|
167
|
+
|
|
168
|
+
# Integrate segment by segment
|
|
169
|
+
# Between change points, intensity is constant
|
|
170
|
+
total = 0
|
|
171
|
+
for i in range(len(change_points) - 1):
|
|
172
|
+
seg_start = change_points[i]
|
|
173
|
+
seg_end = change_points[i + 1]
|
|
174
|
+
# Intensity is constant in this segment, evaluate at segment start
|
|
175
|
+
intensity = _intensity_at_binary(step_positions, step_values, seg_start)
|
|
176
|
+
# Add contribution: intensity * segment_length
|
|
177
|
+
total += intensity * (seg_end - seg_start)
|
|
178
|
+
|
|
179
|
+
return total
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def _find_length_for_work(
|
|
183
|
+
start: int,
|
|
184
|
+
target_work: int,
|
|
185
|
+
max_length: int,
|
|
186
|
+
*,
|
|
187
|
+
steps: list[Step] | None = None,
|
|
188
|
+
step_positions: list[int] | None = None,
|
|
189
|
+
step_values: list[int] | None = None,
|
|
190
|
+
) -> int | None:
|
|
191
|
+
"""
|
|
192
|
+
Find the length needed to complete target_work starting at time start.
|
|
193
|
+
|
|
194
|
+
Given the intensity function, we need to find length such that:
|
|
195
|
+
sum of intensity(t) for t in [start, start + length) = target_work
|
|
196
|
+
|
|
197
|
+
This is the inverse of integration: given the target integral, find the
|
|
198
|
+
upper bound of the integration interval.
|
|
199
|
+
|
|
200
|
+
Algorithm:
|
|
201
|
+
We incrementally extend the interval until we accumulate enough work.
|
|
202
|
+
For efficiency, we jump between step boundaries rather than iterating
|
|
203
|
+
one time unit at a time. Uses binary search and index-based iteration
|
|
204
|
+
for O(log n) lookups and O(1) step advancement.
|
|
205
|
+
|
|
206
|
+
Args:
|
|
207
|
+
start: The start time of the interval.
|
|
208
|
+
target_work: The work to complete (size * granularity).
|
|
209
|
+
max_length: Maximum allowed length (to bound the search).
|
|
210
|
+
steps: The stepwise intensity function as list of (x, value) tuples.
|
|
211
|
+
Either steps OR (step_positions, step_values) must be provided.
|
|
212
|
+
step_positions: Pre-extracted sorted list of step x-coordinates.
|
|
213
|
+
For better performance when calling repeatedly.
|
|
214
|
+
step_values: Pre-extracted list of intensity values at each position.
|
|
215
|
+
|
|
216
|
+
Returns:
|
|
217
|
+
The length needed, or None if target_work cannot be achieved
|
|
218
|
+
within max_length (e.g., if intensity is 0).
|
|
219
|
+
"""
|
|
220
|
+
if target_work <= 0:
|
|
221
|
+
return 0
|
|
222
|
+
|
|
223
|
+
# Extract positions and values if not provided
|
|
224
|
+
if step_positions is None or step_values is None:
|
|
225
|
+
if steps is None or not steps:
|
|
226
|
+
# No intensity steps means intensity is 0 everywhere
|
|
227
|
+
return None
|
|
228
|
+
step_positions = [x for x, _ in steps]
|
|
229
|
+
step_values = [v for _, v in steps]
|
|
230
|
+
|
|
231
|
+
if not step_positions:
|
|
232
|
+
return None
|
|
233
|
+
|
|
234
|
+
end_limit = start + max_length
|
|
235
|
+
|
|
236
|
+
# Use binary search to find first step position > start
|
|
237
|
+
step_idx = bisect.bisect_right(step_positions, start)
|
|
238
|
+
num_steps = len(step_positions)
|
|
239
|
+
|
|
240
|
+
accumulated_work = 0
|
|
241
|
+
current_pos = start
|
|
242
|
+
|
|
243
|
+
# Process segment by segment using index instead of list filtering
|
|
244
|
+
while current_pos < end_limit:
|
|
245
|
+
# Find intensity at current position using binary search
|
|
246
|
+
current_intensity = _intensity_at_binary(step_positions, step_values, current_pos)
|
|
247
|
+
|
|
248
|
+
# Find the next change point (either next step boundary or end_limit)
|
|
249
|
+
if step_idx < num_steps:
|
|
250
|
+
next_boundary = min(step_positions[step_idx], end_limit)
|
|
251
|
+
else:
|
|
252
|
+
next_boundary = end_limit
|
|
253
|
+
|
|
254
|
+
# How much work can we do in this segment?
|
|
255
|
+
segment_length = next_boundary - current_pos
|
|
256
|
+
segment_work = current_intensity * segment_length
|
|
257
|
+
|
|
258
|
+
# Check if we can finish within this segment
|
|
259
|
+
remaining_work = target_work - accumulated_work
|
|
260
|
+
if current_intensity > 0 and segment_work >= remaining_work:
|
|
261
|
+
# We can finish in this segment
|
|
262
|
+
# Find exact length: remaining_work = current_intensity * extra_length
|
|
263
|
+
# extra_length = remaining_work / current_intensity (ceiling division)
|
|
264
|
+
extra_length = (remaining_work + current_intensity - 1) // current_intensity
|
|
265
|
+
return current_pos - start + extra_length
|
|
266
|
+
|
|
267
|
+
# Accumulate and move to next segment
|
|
268
|
+
accumulated_work += segment_work
|
|
269
|
+
current_pos = next_boundary
|
|
270
|
+
|
|
271
|
+
# Advance step index instead of filtering list (O(1) vs O(n))
|
|
272
|
+
if step_idx < num_steps and step_positions[step_idx] <= current_pos:
|
|
273
|
+
step_idx += 1
|
|
274
|
+
|
|
275
|
+
# Couldn't complete the work within max_length
|
|
276
|
+
return None
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
def _compute_intensity_table(
|
|
280
|
+
interval: IntervalVar,
|
|
281
|
+
horizon: int,
|
|
282
|
+
) -> list[tuple[int, int, int]] | list[tuple[int, int]] | None:
|
|
283
|
+
"""
|
|
284
|
+
Compute the discretized (start, [size,] length) table for an interval with intensity.
|
|
285
|
+
|
|
286
|
+
This is the core discretization function. For each valid (start, size) combination,
|
|
287
|
+
it computes the required length to complete the work given the intensity function.
|
|
288
|
+
|
|
289
|
+
The relationship is:
|
|
290
|
+
size * granularity = integral of intensity over [start, start + length)
|
|
291
|
+
|
|
292
|
+
For fixed size intervals, returns list of (start, length) tuples.
|
|
293
|
+
For variable size intervals, returns list of (start, size, length) tuples.
|
|
294
|
+
|
|
295
|
+
Args:
|
|
296
|
+
interval: The interval variable with intensity function.
|
|
297
|
+
horizon: The scheduling horizon (max end time).
|
|
298
|
+
|
|
299
|
+
Returns:
|
|
300
|
+
List of valid tuples, or None if no intensity function is defined.
|
|
301
|
+
"""
|
|
302
|
+
if interval.intensity is None:
|
|
303
|
+
return None
|
|
304
|
+
|
|
305
|
+
steps = interval.intensity
|
|
306
|
+
granularity = interval.granularity
|
|
307
|
+
|
|
308
|
+
# Determine the domain bounds
|
|
309
|
+
start_min = interval.start_min
|
|
310
|
+
start_max = min(interval.start_max, horizon)
|
|
311
|
+
size_min = interval.size_min
|
|
312
|
+
size_max = interval.size_max
|
|
313
|
+
length_min = interval.length_min
|
|
314
|
+
length_max = interval.length_max
|
|
315
|
+
|
|
316
|
+
# Check if size is fixed (common case, more efficient)
|
|
317
|
+
size_is_fixed = size_min == size_max
|
|
318
|
+
|
|
319
|
+
# Pre-extract step positions and values ONCE for all iterations
|
|
320
|
+
# This avoids repeated list comprehensions in _find_length_for_work
|
|
321
|
+
step_positions = [x for x, _ in steps]
|
|
322
|
+
step_values = [v for _, v in steps]
|
|
323
|
+
|
|
324
|
+
table: list[tuple[int, ...]] = []
|
|
325
|
+
|
|
326
|
+
if size_is_fixed:
|
|
327
|
+
# Fixed size: compute (start, length) pairs
|
|
328
|
+
size = size_min
|
|
329
|
+
target_work = size * granularity
|
|
330
|
+
|
|
331
|
+
for start in range(start_min, start_max + 1):
|
|
332
|
+
# Use pre-extracted arrays for better performance
|
|
333
|
+
length = _find_length_for_work(
|
|
334
|
+
start, target_work, length_max,
|
|
335
|
+
step_positions=step_positions, step_values=step_values
|
|
336
|
+
)
|
|
337
|
+
if length is not None:
|
|
338
|
+
# Verify the length is within bounds
|
|
339
|
+
if length_min <= length <= length_max:
|
|
340
|
+
# Verify end time is within horizon
|
|
341
|
+
if start + length <= horizon:
|
|
342
|
+
table.append((start, length))
|
|
343
|
+
else:
|
|
344
|
+
# Variable size: compute (start, size, length) triples
|
|
345
|
+
for start in range(start_min, start_max + 1):
|
|
346
|
+
for size in range(size_min, size_max + 1):
|
|
347
|
+
target_work = size * granularity
|
|
348
|
+
# Use pre-extracted arrays for better performance
|
|
349
|
+
length = _find_length_for_work(
|
|
350
|
+
start, target_work, length_max,
|
|
351
|
+
step_positions=step_positions, step_values=step_values
|
|
352
|
+
)
|
|
353
|
+
if length is not None:
|
|
354
|
+
if length_min <= length <= length_max:
|
|
355
|
+
if start + length <= horizon:
|
|
356
|
+
table.append((start, size, length))
|
|
357
|
+
|
|
358
|
+
return table
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
# Memoization cache for intensity table computation
|
|
362
|
+
# Key: (steps_tuple, granularity, start_min, start_max, size_min, size_max, length_min, length_max, horizon)
|
|
363
|
+
_intensity_table_cache: dict[tuple, list[tuple[int, ...]]] = {}
|
|
364
|
+
|
|
365
|
+
|
|
366
|
+
def _compute_intensity_table_cached(
|
|
367
|
+
interval: IntervalVar,
|
|
368
|
+
horizon: int,
|
|
369
|
+
) -> list[tuple[int, int, int]] | list[tuple[int, int]] | None:
|
|
370
|
+
"""
|
|
371
|
+
Cached version of _compute_intensity_table.
|
|
372
|
+
|
|
373
|
+
For intervals with the same intensity function and bounds, reuses
|
|
374
|
+
previously computed tables.
|
|
375
|
+
|
|
376
|
+
Args:
|
|
377
|
+
interval: The interval variable with intensity function.
|
|
378
|
+
horizon: The scheduling horizon (max end time).
|
|
379
|
+
|
|
380
|
+
Returns:
|
|
381
|
+
List of valid tuples, or None if no intensity function is defined.
|
|
382
|
+
"""
|
|
383
|
+
if interval.intensity is None:
|
|
384
|
+
return None
|
|
385
|
+
|
|
386
|
+
# Create cache key from all relevant parameters
|
|
387
|
+
steps_tuple = tuple(interval.intensity)
|
|
388
|
+
cache_key = (
|
|
389
|
+
steps_tuple,
|
|
390
|
+
interval.granularity,
|
|
391
|
+
interval.start_min,
|
|
392
|
+
min(interval.start_max, horizon),
|
|
393
|
+
interval.size_min,
|
|
394
|
+
interval.size_max,
|
|
395
|
+
interval.length_min,
|
|
396
|
+
interval.length_max,
|
|
397
|
+
horizon,
|
|
398
|
+
)
|
|
399
|
+
|
|
400
|
+
if cache_key in _intensity_table_cache:
|
|
401
|
+
return _intensity_table_cache[cache_key]
|
|
402
|
+
|
|
403
|
+
# Compute and cache
|
|
404
|
+
result = _compute_intensity_table(interval, horizon)
|
|
405
|
+
if result is not None:
|
|
406
|
+
_intensity_table_cache[cache_key] = result
|
|
407
|
+
|
|
408
|
+
return result
|
|
409
|
+
|
|
410
|
+
|
|
411
|
+
def clear_intensity_table_cache() -> None:
|
|
412
|
+
"""Clear the intensity table memoization cache."""
|
|
413
|
+
_intensity_table_cache.clear()
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
def _post_intensity_constraint(interval: IntervalVar, horizon: int) -> None:
|
|
417
|
+
"""
|
|
418
|
+
Post the table constraint linking start, size, and length via intensity.
|
|
419
|
+
|
|
420
|
+
This creates a pycsp3 table constraint (extension constraint) that restricts
|
|
421
|
+
the valid combinations of (start, length) or (start, size, length) based on
|
|
422
|
+
the intensity function.
|
|
423
|
+
|
|
424
|
+
The constraint is only posted once per interval (tracked via _intensity_constraints_posted).
|
|
425
|
+
|
|
426
|
+
For XCSP3, this becomes an <extension> constraint with <supports> listing
|
|
427
|
+
all valid tuples:
|
|
428
|
+
|
|
429
|
+
<extension>
|
|
430
|
+
<list> start length </list>
|
|
431
|
+
<supports> (0,12)(1,12)(2,13)... </supports>
|
|
432
|
+
</extension>
|
|
433
|
+
|
|
434
|
+
Args:
|
|
435
|
+
interval: The interval variable with intensity function.
|
|
436
|
+
horizon: The scheduling horizon.
|
|
437
|
+
"""
|
|
438
|
+
if interval in _intensity_constraints_posted:
|
|
439
|
+
return
|
|
440
|
+
if interval.intensity is None:
|
|
441
|
+
return
|
|
442
|
+
|
|
443
|
+
# Import pycsp3 functions for posting constraints
|
|
444
|
+
# pycsp3 uses Table() for table/extension constraints
|
|
445
|
+
try:
|
|
446
|
+
from pycsp3 import Table, satisfy
|
|
447
|
+
except ImportError:
|
|
448
|
+
return
|
|
449
|
+
|
|
450
|
+
# Compute the discretized table
|
|
451
|
+
table = _compute_intensity_table(interval, horizon)
|
|
452
|
+
if not table:
|
|
453
|
+
# No valid combinations - this interval is infeasible
|
|
454
|
+
# Post a false constraint to signal infeasibility
|
|
455
|
+
satisfy(False)
|
|
456
|
+
_intensity_constraints_posted.add(interval)
|
|
457
|
+
return
|
|
458
|
+
|
|
459
|
+
# Get the variables
|
|
460
|
+
start = start_var(interval)
|
|
461
|
+
length = length_var(interval)
|
|
462
|
+
|
|
463
|
+
# Check if we need size variable
|
|
464
|
+
size_is_fixed = interval.size_min == interval.size_max
|
|
465
|
+
|
|
466
|
+
if size_is_fixed:
|
|
467
|
+
# Table is (start, length) pairs
|
|
468
|
+
# Use pycsp3 Table constraint with the list of valid tuples
|
|
469
|
+
satisfy(
|
|
470
|
+
Table(
|
|
471
|
+
scope=[start, length],
|
|
472
|
+
supports=table,
|
|
473
|
+
)
|
|
474
|
+
)
|
|
475
|
+
else:
|
|
476
|
+
# Table is (start, size, length) triples
|
|
477
|
+
# We need a size variable
|
|
478
|
+
Var, _ = _require_pycsp3()
|
|
479
|
+
size = Var(
|
|
480
|
+
dom=range(interval.size_min, interval.size_max + 1),
|
|
481
|
+
id=_var_id("iv_sz_", interval),
|
|
482
|
+
)
|
|
483
|
+
satisfy(
|
|
484
|
+
Table(
|
|
485
|
+
scope=[start, size, length],
|
|
486
|
+
supports=table,
|
|
487
|
+
)
|
|
488
|
+
)
|
|
489
|
+
|
|
490
|
+
_intensity_constraints_posted.add(interval)
|
|
491
|
+
|
|
492
|
+
|
|
493
|
+
def _var_id(prefix: str, interval: IntervalVar) -> str:
|
|
494
|
+
return f"{prefix}{interval._id}"
|
|
495
|
+
|
|
496
|
+
|
|
497
|
+
def _default_horizon(intervals: list[IntervalVar]) -> int:
|
|
498
|
+
horizon = 0
|
|
499
|
+
for interval in intervals:
|
|
500
|
+
if interval.end_max != INTERVAL_MAX:
|
|
501
|
+
horizon = max(horizon, interval.end_max)
|
|
502
|
+
elif interval.start_max != INTERVAL_MAX:
|
|
503
|
+
horizon = max(horizon, interval.start_max + interval.length_max)
|
|
504
|
+
else:
|
|
505
|
+
horizon += interval.length_max
|
|
506
|
+
return max(horizon, 0)
|
|
507
|
+
|
|
508
|
+
|
|
509
|
+
def _range_from_bounds(min_val: int, max_val: int, horizon: int) -> range:
|
|
510
|
+
if max_val == INTERVAL_MAX:
|
|
511
|
+
max_val = horizon
|
|
512
|
+
if max_val < min_val:
|
|
513
|
+
max_val = min_val
|
|
514
|
+
return range(min_val, max_val + 1)
|
|
515
|
+
|
|
516
|
+
|
|
517
|
+
def _ensure_options(options) -> None:
|
|
518
|
+
if hasattr(options, "checker"):
|
|
519
|
+
return
|
|
520
|
+
options.set_values(
|
|
521
|
+
"data",
|
|
522
|
+
"dataparser",
|
|
523
|
+
"dataexport",
|
|
524
|
+
"dataformat",
|
|
525
|
+
"variant",
|
|
526
|
+
"tocsp",
|
|
527
|
+
"checker",
|
|
528
|
+
"solver",
|
|
529
|
+
"output",
|
|
530
|
+
"suffix",
|
|
531
|
+
"callback",
|
|
532
|
+
)
|
|
533
|
+
options.set_flags(
|
|
534
|
+
"dataexport",
|
|
535
|
+
"datasober",
|
|
536
|
+
"solve",
|
|
537
|
+
"display",
|
|
538
|
+
"verbose",
|
|
539
|
+
"lzma",
|
|
540
|
+
"sober",
|
|
541
|
+
"ev",
|
|
542
|
+
"safe",
|
|
543
|
+
"recognizeSlides",
|
|
544
|
+
"keepHybrid",
|
|
545
|
+
"keepSmartTransitions",
|
|
546
|
+
"keepsum",
|
|
547
|
+
"unchangescalar",
|
|
548
|
+
"restrictTablesWrtDomains",
|
|
549
|
+
"dontruncompactor",
|
|
550
|
+
"dontcompactValues",
|
|
551
|
+
"groupsumcoeffs",
|
|
552
|
+
"usemeta",
|
|
553
|
+
"dontuseauxcache",
|
|
554
|
+
"dontadjustindexing",
|
|
555
|
+
"dontbuildsimilarconstraints",
|
|
556
|
+
"debug",
|
|
557
|
+
"mini",
|
|
558
|
+
"uncurse",
|
|
559
|
+
"existbyelement",
|
|
560
|
+
"safetables",
|
|
561
|
+
)
|
|
562
|
+
if options.checker is None:
|
|
563
|
+
options.checker = "fast"
|
|
564
|
+
|
|
565
|
+
|
|
566
|
+
def _require_pycsp3():
|
|
567
|
+
try:
|
|
568
|
+
from pycsp3 import Var
|
|
569
|
+
from pycsp3.dashboard import options
|
|
570
|
+
from pycsp3.classes.main.variables import Variable
|
|
571
|
+
except Exception as exc: # pragma: no cover - depends on runtime environment
|
|
572
|
+
raise ImportError(
|
|
573
|
+
"pycsp3 is required to build scheduling constraints. "
|
|
574
|
+
"Install a compatible version and Python runtime."
|
|
575
|
+
) from exc
|
|
576
|
+
_ensure_options(options)
|
|
577
|
+
return Var, Variable
|
|
578
|
+
|
|
579
|
+
|
|
580
|
+
def presence_var(interval: IntervalVar) -> Any:
|
|
581
|
+
"""Return (or create) a pycsp3 boolean variable for the interval presence."""
|
|
582
|
+
if not interval.optional:
|
|
583
|
+
# Non-optional intervals are always present - return constant 1
|
|
584
|
+
return 1
|
|
585
|
+
if interval in _presence_vars:
|
|
586
|
+
return _presence_vars[interval]
|
|
587
|
+
Var, _ = _require_pycsp3()
|
|
588
|
+
var = Var(dom={0, 1}, id=_var_id("iv_p_", interval))
|
|
589
|
+
_presence_vars[interval] = var
|
|
590
|
+
return var
|
|
591
|
+
|
|
592
|
+
|
|
593
|
+
def start_var(interval: IntervalVar) -> Any:
|
|
594
|
+
"""Return (or create) a pycsp3 variable for the interval start time."""
|
|
595
|
+
if interval in _start_vars:
|
|
596
|
+
return _start_vars[interval]
|
|
597
|
+
Var, _ = _require_pycsp3()
|
|
598
|
+
intervals = get_registered_intervals() or [interval]
|
|
599
|
+
horizon = _default_horizon(intervals)
|
|
600
|
+
dom = _range_from_bounds(interval.start_min, interval.start_max, horizon)
|
|
601
|
+
var = Var(dom=dom, id=_var_id("iv_s_", interval))
|
|
602
|
+
_start_vars[interval] = var
|
|
603
|
+
return var
|
|
604
|
+
|
|
605
|
+
|
|
606
|
+
def length_var(interval: IntervalVar) -> Any:
|
|
607
|
+
"""
|
|
608
|
+
Return (or create) a pycsp3 variable for the interval length.
|
|
609
|
+
|
|
610
|
+
Unlike length_value(), this always returns a variable, never a constant.
|
|
611
|
+
This is needed for intensity constraints where length must be a variable
|
|
612
|
+
to participate in table constraints.
|
|
613
|
+
|
|
614
|
+
If the interval has an intensity function, this also triggers posting
|
|
615
|
+
the intensity discretization constraint linking start and length.
|
|
616
|
+
|
|
617
|
+
Args:
|
|
618
|
+
interval: The interval variable.
|
|
619
|
+
|
|
620
|
+
Returns:
|
|
621
|
+
A pycsp3 variable representing the length.
|
|
622
|
+
"""
|
|
623
|
+
if interval in _length_vars:
|
|
624
|
+
return _length_vars[interval]
|
|
625
|
+
|
|
626
|
+
Var, _ = _require_pycsp3()
|
|
627
|
+
|
|
628
|
+
# For intervals with intensity, the length domain may need adjustment
|
|
629
|
+
# based on what's actually achievable given the intensity function
|
|
630
|
+
if interval.intensity is not None:
|
|
631
|
+
# Compute the valid lengths from the intensity table
|
|
632
|
+
intervals = get_registered_intervals() or [interval]
|
|
633
|
+
horizon = _default_horizon(intervals)
|
|
634
|
+
table = _compute_intensity_table(interval, horizon)
|
|
635
|
+
|
|
636
|
+
if table:
|
|
637
|
+
# Extract valid lengths from the table
|
|
638
|
+
# Table is either (start, length) or (start, size, length)
|
|
639
|
+
size_is_fixed = interval.size_min == interval.size_max
|
|
640
|
+
if size_is_fixed:
|
|
641
|
+
valid_lengths = sorted(set(t[1] for t in table))
|
|
642
|
+
else:
|
|
643
|
+
valid_lengths = sorted(set(t[2] for t in table))
|
|
644
|
+
dom = set(valid_lengths)
|
|
645
|
+
else:
|
|
646
|
+
# No valid combinations, use full range (will be infeasible)
|
|
647
|
+
dom = range(interval.length_min, interval.length_max + 1)
|
|
648
|
+
else:
|
|
649
|
+
dom = range(interval.length_min, interval.length_max + 1)
|
|
650
|
+
|
|
651
|
+
var = Var(dom=dom, id=_var_id("iv_l_", interval))
|
|
652
|
+
_length_vars[interval] = var
|
|
653
|
+
|
|
654
|
+
# Post intensity constraint if applicable
|
|
655
|
+
# This links the start and length variables via the intensity function
|
|
656
|
+
if interval.intensity is not None:
|
|
657
|
+
intervals = get_registered_intervals() or [interval]
|
|
658
|
+
horizon = _default_horizon(intervals)
|
|
659
|
+
_post_intensity_constraint(interval, horizon)
|
|
660
|
+
|
|
661
|
+
return var
|
|
662
|
+
|
|
663
|
+
|
|
664
|
+
def length_value(interval: IntervalVar) -> Any:
|
|
665
|
+
"""
|
|
666
|
+
Return a fixed length or a pycsp3 variable for interval length.
|
|
667
|
+
|
|
668
|
+
This is the main entry point for getting the length of an interval.
|
|
669
|
+
It returns:
|
|
670
|
+
- An integer constant if length is fixed AND no intensity function is defined
|
|
671
|
+
- A pycsp3 variable otherwise
|
|
672
|
+
|
|
673
|
+
When an intensity function is defined, length becomes a decision variable
|
|
674
|
+
because it depends on where the interval is scheduled (the start time).
|
|
675
|
+
|
|
676
|
+
Args:
|
|
677
|
+
interval: The interval variable.
|
|
678
|
+
|
|
679
|
+
Returns:
|
|
680
|
+
Either an integer (fixed length) or a pycsp3 variable.
|
|
681
|
+
"""
|
|
682
|
+
# If intensity is defined, length depends on start, so we need a variable
|
|
683
|
+
# even if length bounds are fixed, because the actual length varies with start
|
|
684
|
+
if interval.intensity is not None:
|
|
685
|
+
return length_var(interval)
|
|
686
|
+
|
|
687
|
+
# No intensity: check if length is fixed
|
|
688
|
+
if interval.length_min == interval.length_max:
|
|
689
|
+
return interval.length_min
|
|
690
|
+
|
|
691
|
+
# Variable length without intensity
|
|
692
|
+
return length_var(interval)
|
|
693
|
+
|
|
694
|
+
|
|
695
|
+
def clear_pycsp3_cache() -> None:
|
|
696
|
+
"""Clear cached pycsp3 variables and constraints for intervals."""
|
|
697
|
+
_start_vars.clear()
|
|
698
|
+
_length_vars.clear()
|
|
699
|
+
_presence_vars.clear()
|
|
700
|
+
_intensity_constraints_posted.clear()
|
|
701
|
+
_intensity_table_cache.clear()
|