dynflows 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dynflows-0.1.0/PKG-INFO +15 -0
- dynflows-0.1.0/README.md +3 -0
- dynflows-0.1.0/pyproject.toml +28 -0
- dynflows-0.1.0/src/dynflows/__init__.py +0 -0
- dynflows-0.1.0/src/dynflows/core/__init__.py +0 -0
- dynflows-0.1.0/src/dynflows/core/active_edges.py +62 -0
- dynflows-0.1.0/src/dynflows/core/bellman_ford.py +77 -0
- dynflows-0.1.0/src/dynflows/core/dijkstra.py +100 -0
- dynflows-0.1.0/src/dynflows/core/dynamic_flow.py +257 -0
- dynflows-0.1.0/src/dynflows/core/flow_builder.py +220 -0
- dynflows-0.1.0/src/dynflows/core/flow_rates_collection.py +121 -0
- dynflows-0.1.0/src/dynflows/core/graph.py +137 -0
- dynflows-0.1.0/src/dynflows/core/machine_precision.py +1 -0
- dynflows-0.1.0/src/dynflows/core/network.py +219 -0
- dynflows-0.1.0/src/dynflows/core/predictor.py +41 -0
- dynflows-0.1.0/src/dynflows/core/predictors/__init__.py +0 -0
- dynflows-0.1.0/src/dynflows/core/predictors/constant_predictor.py +26 -0
- dynflows-0.1.0/src/dynflows/core/predictors/linear_predictor.py +43 -0
- dynflows-0.1.0/src/dynflows/core/predictors/predictor_type.py +12 -0
- dynflows-0.1.0/src/dynflows/core/predictors/reg_linear_predictor.py +47 -0
- dynflows-0.1.0/src/dynflows/core/predictors/zero_predictor.py +21 -0
- dynflows-0.1.0/src/dynflows/eval/__init__.py +0 -0
- dynflows-0.1.0/src/dynflows/eval/evaluate.py +295 -0
- dynflows-0.1.0/src/dynflows/eval/evaluate_network.py +413 -0
- dynflows-0.1.0/src/dynflows/eval/predicted_delay.py +189 -0
- dynflows-0.1.0/src/dynflows/importer/__init__.py +0 -0
- dynflows-0.1.0/src/dynflows/importer/csv_importer.py +97 -0
- dynflows-0.1.0/src/dynflows/importer/sioux_falls_importer.py +77 -0
- dynflows-0.1.0/src/dynflows/importer/tntp_importer.py +84 -0
- dynflows-0.1.0/src/dynflows/utilities/__init__.py +0 -0
- dynflows-0.1.0/src/dynflows/utilities/arrays.py +117 -0
- dynflows-0.1.0/src/dynflows/utilities/build_with_times.py +93 -0
- dynflows-0.1.0/src/dynflows/utilities/combine_commodities.py +37 -0
- dynflows-0.1.0/src/dynflows/utilities/demand_utils.py +8 -0
- dynflows-0.1.0/src/dynflows/utilities/file_lock.py +48 -0
- dynflows-0.1.0/src/dynflows/utilities/get_tn_path.py +14 -0
- dynflows-0.1.0/src/dynflows/utilities/json_encoder.py +24 -0
- dynflows-0.1.0/src/dynflows/utilities/no_op.py +2 -0
- dynflows-0.1.0/src/dynflows/utilities/piecewise_linear.py +628 -0
- dynflows-0.1.0/src/dynflows/utilities/queues.py +164 -0
- dynflows-0.1.0/src/dynflows/utilities/right_constant.py +178 -0
- dynflows-0.1.0/src/dynflows/utilities/status_logger.py +47 -0
- dynflows-0.1.0/src/dynflows/visualization/make_tikz_boxplot.py +54 -0
- dynflows-0.1.0/src/dynflows/visualization/to_json.py +77 -0
dynflows-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: dynflows
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A library for computing, illustrating, and evaluating dynamic flows, a.k.a. flows over time, in Vickrey's fluid queuing model mainly used in dynamic traffic assignment.
|
|
5
|
+
Keywords: flows over time,dynamic traffic assignment,scientific computing
|
|
6
|
+
Author: Michael Markl
|
|
7
|
+
Author-email: Michael Markl <marklmichael98@gmail.com>
|
|
8
|
+
Requires-Dist: numpy>=1.23.4,<2
|
|
9
|
+
Requires-Dist: pandas>=2.0.0,<3
|
|
10
|
+
Requires-Python: >=3.11
|
|
11
|
+
Description-Content-Type: text/markdown
|
|
12
|
+
|
|
13
|
+
# dynflows
|
|
14
|
+
|
|
15
|
+
TODO: Add Content.
|
dynflows-0.1.0/README.md
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "dynflows"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = "A library for computing, illustrating, and evaluating dynamic flows, a.k.a. flows over time, in Vickrey's fluid queuing model mainly used in dynamic traffic assignment."
|
|
5
|
+
authors = [
|
|
6
|
+
{name = "Michael Markl", email = "marklmichael98@gmail.com"}
|
|
7
|
+
]
|
|
8
|
+
readme = "README.md"
|
|
9
|
+
requires-python=">=3.11"
|
|
10
|
+
keywords = [ "flows over time", "dynamic traffic assignment", "scientific computing" ]
|
|
11
|
+
|
|
12
|
+
dependencies = [
|
|
13
|
+
"numpy >= 1.23.4, < 2",
|
|
14
|
+
"pandas >= 2.0.0, < 3"
|
|
15
|
+
]
|
|
16
|
+
|
|
17
|
+
[dependency-groups]
|
|
18
|
+
dev = [
|
|
19
|
+
"matplotlib >= 3.8.3, < 4",
|
|
20
|
+
"black >= 23.3.0, < 24",
|
|
21
|
+
"isort >= 5.12.0, < 6",
|
|
22
|
+
"pylint >= 4, < 5",
|
|
23
|
+
"pytest >= 8.0.2, < 9"
|
|
24
|
+
]
|
|
25
|
+
|
|
26
|
+
[build-system]
|
|
27
|
+
requires = ["uv_build>=0.11.14,<0.12"]
|
|
28
|
+
build-backend = "uv_build"
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
from typing import Callable, Dict, FrozenSet, List, Sequence, Set
|
|
2
|
+
|
|
3
|
+
from dynflows.core.dijkstra import dynamic_dijkstra
|
|
4
|
+
from dynflows.core.graph import DirectedGraph, Edge, Node
|
|
5
|
+
from dynflows.utilities.piecewise_linear import PiecewiseLinear
|
|
6
|
+
|
|
7
|
+
identity = PiecewiseLinear([0.0], [0.0], 1.0, 1.0)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def backward_search(
|
|
11
|
+
costs: Sequence[Callable[[float], float]],
|
|
12
|
+
arrivals: Dict[Node, float],
|
|
13
|
+
source: Node,
|
|
14
|
+
sink: Node,
|
|
15
|
+
) -> Set[Edge]:
|
|
16
|
+
active_edges = set()
|
|
17
|
+
queue: List[Node] = [sink]
|
|
18
|
+
nodes_enqueued: Set[Node] = {sink}
|
|
19
|
+
while len(queue) > 0:
|
|
20
|
+
w = queue.pop()
|
|
21
|
+
for e in w.incoming_edges:
|
|
22
|
+
v = e.node_from
|
|
23
|
+
if v not in arrivals.keys():
|
|
24
|
+
continue
|
|
25
|
+
if arrivals[v] + costs[e.id](arrivals[v]) <= arrivals[w]:
|
|
26
|
+
if v == source:
|
|
27
|
+
active_edges.add(e)
|
|
28
|
+
if v not in nodes_enqueued:
|
|
29
|
+
queue.append(v)
|
|
30
|
+
nodes_enqueued.add(v)
|
|
31
|
+
return active_edges
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def get_active_edges(
|
|
35
|
+
costs: List[PiecewiseLinear],
|
|
36
|
+
theta: float,
|
|
37
|
+
source: Node,
|
|
38
|
+
sink: Node,
|
|
39
|
+
relevant_nodes: FrozenSet[Node],
|
|
40
|
+
graph: DirectedGraph,
|
|
41
|
+
strong_fifo: bool,
|
|
42
|
+
) -> Set[Edge]:
|
|
43
|
+
if len([e for e in source.outgoing_edges if e.node_to in relevant_nodes]) <= 1:
|
|
44
|
+
return set(source.outgoing_edges)
|
|
45
|
+
arrivals, _ = dynamic_dijkstra(theta, source, sink, relevant_nodes, costs)
|
|
46
|
+
if strong_fifo:
|
|
47
|
+
return backward_search(costs, arrivals, source, sink)
|
|
48
|
+
else: # Second run of Dijkstra on the reverse graph.
|
|
49
|
+
graph.reverse()
|
|
50
|
+
traversals = [cost.plus(identity) for cost in costs]
|
|
51
|
+
new_costs: List[Callable[[float], float]] = [
|
|
52
|
+
lambda t: -trav.reversal(-t) - t for trav in traversals
|
|
53
|
+
]
|
|
54
|
+
neg_departures = dynamic_dijkstra(
|
|
55
|
+
arrivals[sink], sink, source, relevant_nodes, new_costs
|
|
56
|
+
)
|
|
57
|
+
graph.reverse()
|
|
58
|
+
active_edges = set()
|
|
59
|
+
for e in source.outgoing_edges:
|
|
60
|
+
if traversals[e.id](theta) <= -neg_departures[e.node_to]:
|
|
61
|
+
active_edges.add(e)
|
|
62
|
+
return active_edges
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Dict, List, Set
|
|
4
|
+
|
|
5
|
+
from dynflows.core.graph import Node
|
|
6
|
+
from dynflows.core.machine_precision import eps
|
|
7
|
+
from dynflows.utilities.piecewise_linear import PiecewiseLinear
|
|
8
|
+
from dynflows.utilities.queues import PriorityQueue
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def bellman_ford(
|
|
12
|
+
sink: Node,
|
|
13
|
+
costs: List[PiecewiseLinear],
|
|
14
|
+
interesting_nodes: Set[Node],
|
|
15
|
+
phi: float,
|
|
16
|
+
horizon: float = float("inf"),
|
|
17
|
+
) -> Dict[Node, PiecewiseLinear]:
|
|
18
|
+
"""
|
|
19
|
+
Calculates the earliest arrival time at `sink` as functions (l_v).
|
|
20
|
+
"""
|
|
21
|
+
identity = PiecewiseLinear([phi], [phi], 1.0, 1.0, (phi, horizon))
|
|
22
|
+
# g_v(t) = earliest arrival at sink when starting in v at time t
|
|
23
|
+
g: Dict[Node, PiecewiseLinear] = {sink: identity}
|
|
24
|
+
node_distance: Dict[Node, int] = {sink: 0}
|
|
25
|
+
|
|
26
|
+
def get_fifo_arrival_time(traversal: PiecewiseLinear):
|
|
27
|
+
new_values = traversal.values.copy()
|
|
28
|
+
for i in range(len(new_values) - 1):
|
|
29
|
+
assert new_values[i] <= new_values[i + 1] + eps
|
|
30
|
+
new_values[i + 1] = max(
|
|
31
|
+
new_values[i], new_values[i + 1], traversal.times[i + 1]
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
new_traversal = PiecewiseLinear(
|
|
35
|
+
traversal.times,
|
|
36
|
+
new_values,
|
|
37
|
+
traversal.first_slope,
|
|
38
|
+
traversal.last_slope,
|
|
39
|
+
traversal.domain,
|
|
40
|
+
)
|
|
41
|
+
if new_traversal.last_slope < 1:
|
|
42
|
+
new_traversal.last_slope = 1
|
|
43
|
+
return new_traversal
|
|
44
|
+
|
|
45
|
+
edge_arrival_times = [
|
|
46
|
+
get_fifo_arrival_time(identity.plus(cost)).simplify() for cost in costs
|
|
47
|
+
]
|
|
48
|
+
|
|
49
|
+
changes_detected_at = PriorityQueue([(sink, 0.0)])
|
|
50
|
+
|
|
51
|
+
while len(changes_detected_at) > 0:
|
|
52
|
+
changed_nodes = changes_detected_at
|
|
53
|
+
changes_detected_at = PriorityQueue([])
|
|
54
|
+
|
|
55
|
+
for w in changed_nodes.sorted():
|
|
56
|
+
for edge in w.incoming_edges:
|
|
57
|
+
v = edge.node_from
|
|
58
|
+
if v not in interesting_nodes:
|
|
59
|
+
continue
|
|
60
|
+
T = edge_arrival_times[edge.id]
|
|
61
|
+
restr_domain = (
|
|
62
|
+
T.min_t_above(g[w].domain[0]),
|
|
63
|
+
T.max_t_below(g[w].domain[1]),
|
|
64
|
+
)
|
|
65
|
+
if restr_domain[0] is None or restr_domain[1] is None:
|
|
66
|
+
continue
|
|
67
|
+
relaxation = g[w].compose(T.restrict(restr_domain)) # type: ignore
|
|
68
|
+
if v not in g.keys():
|
|
69
|
+
node_distance[v] = node_distance[w] + 1
|
|
70
|
+
if v not in changes_detected_at:
|
|
71
|
+
changes_detected_at.push(v, node_distance[v])
|
|
72
|
+
g[v] = relaxation.simplify()
|
|
73
|
+
elif not g[v].smaller_equals(relaxation):
|
|
74
|
+
if not changes_detected_at.has(v):
|
|
75
|
+
changes_detected_at.push(v, node_distance[v])
|
|
76
|
+
g[v] = g[v].outer_minimum(relaxation).simplify()
|
|
77
|
+
return g
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Callable, Dict, FrozenSet, List, NamedTuple, Sequence, Set, Tuple
|
|
4
|
+
|
|
5
|
+
from dynflows.core.graph import Edge, Node
|
|
6
|
+
from dynflows.core.machine_precision import eps
|
|
7
|
+
from dynflows.utilities.queues import PriorityQueue
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def reverse_dijkstra(
|
|
11
|
+
sink: Node, costs: List[float], nodes: Set[Node]
|
|
12
|
+
) -> Dict[Node, float]:
|
|
13
|
+
dist: Dict[Node, float] = {sink: 0}
|
|
14
|
+
queue = PriorityQueue([(sink, dist[sink])])
|
|
15
|
+
|
|
16
|
+
assert sink in nodes
|
|
17
|
+
|
|
18
|
+
while len(queue) > 0:
|
|
19
|
+
w = queue.pop()
|
|
20
|
+
for edge in w.incoming_edges:
|
|
21
|
+
v = edge.node_from
|
|
22
|
+
if v not in nodes:
|
|
23
|
+
continue
|
|
24
|
+
relaxation = costs[edge.id] + dist[w]
|
|
25
|
+
if v not in dist.keys():
|
|
26
|
+
dist[v] = relaxation
|
|
27
|
+
queue.push(v, dist[v])
|
|
28
|
+
elif relaxation < dist[v] - eps:
|
|
29
|
+
dist[v] = relaxation
|
|
30
|
+
if queue.has(v):
|
|
31
|
+
queue.decrease_key(v, relaxation)
|
|
32
|
+
else:
|
|
33
|
+
queue.push(v, relaxation)
|
|
34
|
+
|
|
35
|
+
return dist
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class DynamicDijkstraResult(NamedTuple):
|
|
39
|
+
arrival_times: Dict[Node, float]
|
|
40
|
+
realized_cost: Dict[Edge, float]
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def dynamic_dijkstra(
|
|
44
|
+
phi: float,
|
|
45
|
+
source: Node,
|
|
46
|
+
sink: Node,
|
|
47
|
+
relevant_nodes: Set[Node],
|
|
48
|
+
costs: Sequence[Callable[[float], float]],
|
|
49
|
+
) -> DynamicDijkstraResult:
|
|
50
|
+
"""
|
|
51
|
+
Assumes costs to follow the FIFO rule and relevant_nodes to contain
|
|
52
|
+
all nodes that lie on a path from source to sink.
|
|
53
|
+
Returns the earliest arrival times when departing from source at
|
|
54
|
+
time phi for nodes that source can reach up to the arrival at sink.
|
|
55
|
+
"""
|
|
56
|
+
arrival_times: Dict[Node, float] = {}
|
|
57
|
+
queue: PriorityQueue[Node] = PriorityQueue([(source, phi)])
|
|
58
|
+
realized_cost = {}
|
|
59
|
+
while len(queue) > 0:
|
|
60
|
+
arrival_time, v = queue.min_key(), queue.pop()
|
|
61
|
+
arrival_times[v] = arrival_time
|
|
62
|
+
if v == sink:
|
|
63
|
+
break
|
|
64
|
+
for e in v.outgoing_edges:
|
|
65
|
+
w = e.node_to
|
|
66
|
+
if w in arrival_times or w not in relevant_nodes:
|
|
67
|
+
continue
|
|
68
|
+
realized_cost[e] = costs[e.id](arrival_times[v])
|
|
69
|
+
relaxation = arrival_times[v] + realized_cost[e]
|
|
70
|
+
if not queue.has(w):
|
|
71
|
+
queue.push(w, relaxation)
|
|
72
|
+
elif relaxation < queue.key_of(w):
|
|
73
|
+
queue.decrease_key(w, relaxation)
|
|
74
|
+
return DynamicDijkstraResult(arrival_times, realized_cost)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def get_active_edges_from_dijkstra(
|
|
78
|
+
dijkstra_result: DynamicDijkstraResult,
|
|
79
|
+
source: Node,
|
|
80
|
+
sink: Node,
|
|
81
|
+
) -> List[Edge]:
|
|
82
|
+
arrival_times, realised_cost = dijkstra_result
|
|
83
|
+
active_edges = []
|
|
84
|
+
touched_nodes = {sink}
|
|
85
|
+
queue: List[Node] = [sink]
|
|
86
|
+
while queue:
|
|
87
|
+
w = queue.pop()
|
|
88
|
+
for e in w.incoming_edges:
|
|
89
|
+
if e not in realised_cost.keys():
|
|
90
|
+
continue
|
|
91
|
+
v: Node = e.node_from
|
|
92
|
+
if arrival_times[v] + realised_cost[e] <= arrival_times[w] + eps:
|
|
93
|
+
if v == source:
|
|
94
|
+
active_edges.append(e)
|
|
95
|
+
if v not in touched_nodes:
|
|
96
|
+
touched_nodes.add(v)
|
|
97
|
+
queue.append(v)
|
|
98
|
+
|
|
99
|
+
assert len(active_edges) > 0
|
|
100
|
+
return active_edges
|
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from functools import lru_cache
|
|
4
|
+
from typing import Dict, List, Optional, Set, Tuple
|
|
5
|
+
|
|
6
|
+
from dynflows.core.flow_rates_collection import FlowRatesCollection
|
|
7
|
+
from dynflows.core.machine_precision import eps
|
|
8
|
+
from dynflows.core.network import Network
|
|
9
|
+
from dynflows.utilities.piecewise_linear import PiecewiseLinear
|
|
10
|
+
from dynflows.utilities.queues import PriorityQueue
|
|
11
|
+
from dynflows.utilities.right_constant import RightConstant
|
|
12
|
+
|
|
13
|
+
ChangeEventValue = Tuple[Dict[int, float], float]
|
|
14
|
+
ChangeEvent = Optional[Tuple[float, ChangeEventValue]]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class DepletionQueue:
|
|
18
|
+
depletions: PriorityQueue[int]
|
|
19
|
+
change_times: PriorityQueue[int]
|
|
20
|
+
new_outflow: Dict[int, ChangeEventValue] # time, comm: outflow, sum over outflow
|
|
21
|
+
|
|
22
|
+
def __init__(self):
|
|
23
|
+
self.depletions = PriorityQueue()
|
|
24
|
+
self.change_times = PriorityQueue()
|
|
25
|
+
self.new_outflow = {}
|
|
26
|
+
|
|
27
|
+
def set(
|
|
28
|
+
self, edge: int, depletion_time: float, change_event: ChangeEvent = None
|
|
29
|
+
) -> None:
|
|
30
|
+
assert depletion_time > float("-inf")
|
|
31
|
+
self.depletions.set(edge, depletion_time)
|
|
32
|
+
|
|
33
|
+
if change_event is not None:
|
|
34
|
+
(change_time, change_value) = change_event
|
|
35
|
+
self.new_outflow[edge] = change_value
|
|
36
|
+
self.change_times.set(edge, change_time)
|
|
37
|
+
elif edge in self.change_times:
|
|
38
|
+
self.change_times.remove(edge)
|
|
39
|
+
self.new_outflow.pop(edge)
|
|
40
|
+
|
|
41
|
+
def __contains__(self, edge) -> bool:
|
|
42
|
+
return edge in self.depletions
|
|
43
|
+
|
|
44
|
+
def __len__(self) -> int:
|
|
45
|
+
return len(self.depletions)
|
|
46
|
+
|
|
47
|
+
def remove(self, edge: int) -> None:
|
|
48
|
+
self.depletions.remove(edge)
|
|
49
|
+
if edge in self.change_times:
|
|
50
|
+
self.change_times.remove(edge)
|
|
51
|
+
self.new_outflow.pop(edge)
|
|
52
|
+
|
|
53
|
+
def min_change_time(self) -> float:
|
|
54
|
+
return self.change_times.min_key()
|
|
55
|
+
|
|
56
|
+
def min_depletion(self) -> float:
|
|
57
|
+
return self.depletions.min_key()
|
|
58
|
+
|
|
59
|
+
def pop_by_depletion(self) -> Tuple[int, float, ChangeEvent]:
|
|
60
|
+
depl_time, e = self.depletions.min_key(), self.depletions.pop()
|
|
61
|
+
change_event = None
|
|
62
|
+
if e in self.change_times:
|
|
63
|
+
change_time = self.change_times.key_of(e)
|
|
64
|
+
self.change_times.remove(e)
|
|
65
|
+
new_outflow, new_outflow_sum = self.new_outflow.pop(e)
|
|
66
|
+
change_event = (change_time, (new_outflow, new_outflow_sum))
|
|
67
|
+
return e, depl_time, change_event
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class DynamicFlow:
|
|
71
|
+
"""
|
|
72
|
+
This is a representation of a flow with right-constant edge inflow rates on intervals.
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
phi: float
|
|
76
|
+
# inflow[e][i] is the function fᵢₑ⁺
|
|
77
|
+
inflow: List[FlowRatesCollection]
|
|
78
|
+
# outflow[e][i] is the function fᵢₑ⁻
|
|
79
|
+
outflow: List[FlowRatesCollection]
|
|
80
|
+
queues: List[PiecewiseLinear] # queues[e] is the queue length at e
|
|
81
|
+
# A priority queue with times when some edge outflow changes
|
|
82
|
+
outflow_changes: PriorityQueue[Tuple[int, float]]
|
|
83
|
+
depletions: DepletionQueue # A priority queue with events at which queues deplete
|
|
84
|
+
_network: Network
|
|
85
|
+
|
|
86
|
+
def __init__(self, network: Network):
|
|
87
|
+
self._network = network
|
|
88
|
+
self.phi = 0.0
|
|
89
|
+
self.inflow = [FlowRatesCollection() for _ in network.graph.edges]
|
|
90
|
+
self.queues = [
|
|
91
|
+
PiecewiseLinear([self.phi], [0.0], 0.0, 0.0) for _ in network.graph.edges
|
|
92
|
+
]
|
|
93
|
+
self.outflow = [FlowRatesCollection() for _ in network.graph.edges]
|
|
94
|
+
self.outflow_changes = PriorityQueue()
|
|
95
|
+
self.depletions = DepletionQueue()
|
|
96
|
+
|
|
97
|
+
def __getstate__(self):
|
|
98
|
+
"""Return state values to be pickled."""
|
|
99
|
+
state = self.__dict__.copy()
|
|
100
|
+
# Don't pickle _network b.c. of recursive structure
|
|
101
|
+
del state["_network"]
|
|
102
|
+
return state
|
|
103
|
+
|
|
104
|
+
def __setstate__(self, state):
|
|
105
|
+
self.__dict__.update(state)
|
|
106
|
+
print("Please reset network on flow before accessing its functions")
|
|
107
|
+
|
|
108
|
+
def _extend_case_i(self, e: int, cur_queue: float):
|
|
109
|
+
capacity, travel_time = self._network.capacity[e], self._network.travel_time[e]
|
|
110
|
+
arrival = self.phi + cur_queue / capacity + travel_time
|
|
111
|
+
|
|
112
|
+
self.outflow[e].extend(arrival, {}, 0.0)
|
|
113
|
+
|
|
114
|
+
self.outflow_changes.set((e, arrival), arrival)
|
|
115
|
+
|
|
116
|
+
queue_slope = 0.0 if cur_queue == 0.0 else -capacity
|
|
117
|
+
self.queues[e].extend_with_slope(self.phi, queue_slope)
|
|
118
|
+
if cur_queue > 0:
|
|
119
|
+
depl_time = self.phi + cur_queue / capacity
|
|
120
|
+
assert self.queues[e](depl_time) < 1000 * eps
|
|
121
|
+
self.depletions.set(e, depl_time)
|
|
122
|
+
elif e in self.depletions:
|
|
123
|
+
self.depletions.remove(e)
|
|
124
|
+
|
|
125
|
+
def _extend_case_ii(
|
|
126
|
+
self, e: int, new_inflow: Dict[int, float], cur_queue: float, acc_in: float
|
|
127
|
+
):
|
|
128
|
+
capacity, travel_time = self._network.capacity[e], self._network.travel_time[e]
|
|
129
|
+
arrival = self.phi + cur_queue / capacity + travel_time
|
|
130
|
+
|
|
131
|
+
acc_out = min(capacity, acc_in)
|
|
132
|
+
factor = acc_out / acc_in
|
|
133
|
+
|
|
134
|
+
new_outflow = {i: factor * value for i, value in new_inflow.items()}
|
|
135
|
+
self.outflow[e].extend(arrival, new_outflow, acc_out)
|
|
136
|
+
|
|
137
|
+
self.outflow_changes.set((e, arrival), arrival)
|
|
138
|
+
|
|
139
|
+
queue_slope = max(acc_in - capacity, 0.0)
|
|
140
|
+
self.queues[e].extend_with_slope(self.phi, queue_slope)
|
|
141
|
+
if e in self.depletions:
|
|
142
|
+
self.depletions.remove(e)
|
|
143
|
+
|
|
144
|
+
def _extend_case_iii(
|
|
145
|
+
self, e: int, new_inflow: Dict[int, float], cur_queue: float, acc_in: float
|
|
146
|
+
):
|
|
147
|
+
capacity, travel_time = self._network.capacity[e], self._network.travel_time[e]
|
|
148
|
+
arrival = self.phi + cur_queue / capacity + travel_time
|
|
149
|
+
|
|
150
|
+
factor = capacity / acc_in
|
|
151
|
+
|
|
152
|
+
new_outflow = {i: factor * value for i, value in new_inflow.items()}
|
|
153
|
+
self.outflow[e].extend(arrival, new_outflow, capacity)
|
|
154
|
+
|
|
155
|
+
self.outflow_changes.set((e, arrival), arrival)
|
|
156
|
+
|
|
157
|
+
queue_slope = acc_in - capacity
|
|
158
|
+
self.queues[e].extend_with_slope(self.phi, queue_slope)
|
|
159
|
+
|
|
160
|
+
depl_time = self.phi - cur_queue / queue_slope
|
|
161
|
+
planned_change_time = depl_time + travel_time
|
|
162
|
+
planned_change_value = (new_inflow, acc_in)
|
|
163
|
+
assert self.queues[e](depl_time) < 1000 * eps
|
|
164
|
+
|
|
165
|
+
self.depletions.set(e, depl_time, (planned_change_time, planned_change_value))
|
|
166
|
+
|
|
167
|
+
def _process_depletions(self):
|
|
168
|
+
while self.depletions.min_depletion() <= self.phi and len(self.depletions) > 0:
|
|
169
|
+
(e, depl_time, change_event) = self.depletions.pop_by_depletion()
|
|
170
|
+
self.queues[e].extend_with_slope(depl_time, 0.0)
|
|
171
|
+
assert abs(self.queues[e].values[-1]) < 1000 * eps
|
|
172
|
+
self.queues[e].values[-1] = 0.0
|
|
173
|
+
if change_event is not None:
|
|
174
|
+
(change_time, (new_outflow, new_outflow_sum)) = change_event
|
|
175
|
+
self.outflow_changes.set((e, change_time), change_time)
|
|
176
|
+
self.outflow[e].extend(change_time, new_outflow, new_outflow_sum)
|
|
177
|
+
|
|
178
|
+
def extend(
|
|
179
|
+
self, new_inflow: Dict[int, Dict[int, float]], max_extension_time: float
|
|
180
|
+
) -> Set[int]:
|
|
181
|
+
"""
|
|
182
|
+
Extends the flow with constant inflows new_inflow until some edge outflow changes.
|
|
183
|
+
Edge inflows not in new_inflow are extended with their previous values.
|
|
184
|
+
The user can also specify a maximum extension length using max_extension_length.
|
|
185
|
+
:returns set of edges where the outflow has changed at the new time self.phi
|
|
186
|
+
"""
|
|
187
|
+
self.get_edge_loads.cache_clear()
|
|
188
|
+
capacity = self._network.capacity
|
|
189
|
+
|
|
190
|
+
for e in new_inflow.keys():
|
|
191
|
+
if self.inflow[e].get_values_at_time(self.phi) == new_inflow[e]:
|
|
192
|
+
continue
|
|
193
|
+
acc_in = sum(new_inflow[e].values())
|
|
194
|
+
cur_queue = max(self.queues[e].eval_from_end(self.phi), 0.0)
|
|
195
|
+
|
|
196
|
+
self.inflow[e].extend(self.phi, new_inflow[e], acc_in)
|
|
197
|
+
if acc_in == 0.0:
|
|
198
|
+
self._extend_case_i(e, cur_queue)
|
|
199
|
+
elif cur_queue == 0.0 or acc_in >= capacity[e] - eps:
|
|
200
|
+
self._extend_case_ii(e, new_inflow[e], cur_queue, acc_in)
|
|
201
|
+
else:
|
|
202
|
+
self._extend_case_iii(e, new_inflow[e], cur_queue, acc_in)
|
|
203
|
+
|
|
204
|
+
self.phi = min(
|
|
205
|
+
self.depletions.min_change_time(),
|
|
206
|
+
self.outflow_changes.min_key(),
|
|
207
|
+
max_extension_time,
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
self._process_depletions()
|
|
211
|
+
|
|
212
|
+
changed_edges: Set[int] = set()
|
|
213
|
+
while (
|
|
214
|
+
self.outflow_changes.min_key() <= self.phi and len(self.outflow_changes) > 0
|
|
215
|
+
):
|
|
216
|
+
changed_edges.add(self.outflow_changes.pop()[0])
|
|
217
|
+
return changed_edges
|
|
218
|
+
|
|
219
|
+
def avg_travel_time(self, i: int, horizon: float) -> float:
|
|
220
|
+
commodity = self._network.commodities[i]
|
|
221
|
+
net_outflow: RightConstant = sum(
|
|
222
|
+
(
|
|
223
|
+
self.outflow[e.id]._functions_dict[i]
|
|
224
|
+
for e in commodity.sink.incoming_edges
|
|
225
|
+
if i in self.outflow[e.id]._functions_dict
|
|
226
|
+
),
|
|
227
|
+
start=RightConstant([0.0], [0.0], (0, float("inf"))),
|
|
228
|
+
)
|
|
229
|
+
accum_net_outflow = net_outflow.integral()
|
|
230
|
+
net_inflow: RightConstant = sum(
|
|
231
|
+
(inflow for inflow in commodity.sources.values()),
|
|
232
|
+
start=RightConstant([0.0], [0.0], (0, float("inf"))),
|
|
233
|
+
)
|
|
234
|
+
accum_net_inflow = net_inflow.integral()
|
|
235
|
+
|
|
236
|
+
avg_travel_time = (
|
|
237
|
+
accum_net_inflow.integrate(0.0, horizon)
|
|
238
|
+
- accum_net_outflow.integrate(0.0, horizon)
|
|
239
|
+
) / accum_net_inflow(horizon)
|
|
240
|
+
return avg_travel_time
|
|
241
|
+
|
|
242
|
+
@lru_cache()
|
|
243
|
+
def get_edge_loads(self) -> List[PiecewiseLinear]:
|
|
244
|
+
edge_loads: List[PiecewiseLinear] = [
|
|
245
|
+
self.inflow[e].accumulative - self.outflow[e].accumulative
|
|
246
|
+
for e in range(len(self.inflow))
|
|
247
|
+
]
|
|
248
|
+
assert all(edge_load.domain[0] == 0.0 for edge_load in edge_loads)
|
|
249
|
+
assert all(abs(edge_load(0.0)) < 1e-10 for edge_load in edge_loads)
|
|
250
|
+
for edge_load in edge_loads:
|
|
251
|
+
if edge_load.values[0] != 0.0:
|
|
252
|
+
edge_load.times.insert(0, 0.0)
|
|
253
|
+
edge_load.values.insert(0, 0.0)
|
|
254
|
+
edge_load.first_slope = 0.0
|
|
255
|
+
edge_load.domain = (float("-inf"), edge_load.domain[1])
|
|
256
|
+
|
|
257
|
+
return edge_loads
|