dynamic-des 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dynamic_des-0.1.0/PKG-INFO +250 -0
- dynamic_des-0.1.0/README.md +204 -0
- dynamic_des-0.1.0/pyproject.toml +87 -0
- dynamic_des-0.1.0/src/dynamic_des/__init__.py +34 -0
- dynamic_des-0.1.0/src/dynamic_des/connectors/__init__.py +0 -0
- dynamic_des-0.1.0/src/dynamic_des/connectors/admin/__init__.py +0 -0
- dynamic_des-0.1.0/src/dynamic_des/connectors/admin/kafka.py +192 -0
- dynamic_des-0.1.0/src/dynamic_des/connectors/egress/__init__.py +0 -0
- dynamic_des-0.1.0/src/dynamic_des/connectors/egress/base.py +29 -0
- dynamic_des-0.1.0/src/dynamic_des/connectors/egress/kafka.py +129 -0
- dynamic_des-0.1.0/src/dynamic_des/connectors/egress/local.py +52 -0
- dynamic_des-0.1.0/src/dynamic_des/connectors/egress/postgres.py +48 -0
- dynamic_des-0.1.0/src/dynamic_des/connectors/egress/redis.py +53 -0
- dynamic_des-0.1.0/src/dynamic_des/connectors/ingress/__init__.py +0 -0
- dynamic_des-0.1.0/src/dynamic_des/connectors/ingress/base.py +29 -0
- dynamic_des-0.1.0/src/dynamic_des/connectors/ingress/kafka.py +89 -0
- dynamic_des-0.1.0/src/dynamic_des/connectors/ingress/local.py +55 -0
- dynamic_des-0.1.0/src/dynamic_des/connectors/ingress/postgres.py +40 -0
- dynamic_des-0.1.0/src/dynamic_des/connectors/ingress/redis.py +40 -0
- dynamic_des-0.1.0/src/dynamic_des/core/__init__.py +0 -0
- dynamic_des-0.1.0/src/dynamic_des/core/environment.py +293 -0
- dynamic_des-0.1.0/src/dynamic_des/core/registry.py +213 -0
- dynamic_des-0.1.0/src/dynamic_des/core/sampler.py +67 -0
- dynamic_des-0.1.0/src/dynamic_des/examples/__init__.py +118 -0
- dynamic_des-0.1.0/src/dynamic_des/examples/compose-kafka.yml +21 -0
- dynamic_des-0.1.0/src/dynamic_des/examples/kafka_dashboard.py +244 -0
- dynamic_des-0.1.0/src/dynamic_des/examples/kafka_example.py +128 -0
- dynamic_des-0.1.0/src/dynamic_des/examples/local_example.py +105 -0
- dynamic_des-0.1.0/src/dynamic_des/models/__init__.py +0 -0
- dynamic_des-0.1.0/src/dynamic_des/models/params.py +64 -0
- dynamic_des-0.1.0/src/dynamic_des/models/schemas.py +66 -0
- dynamic_des-0.1.0/src/dynamic_des/py.typed +0 -0
- dynamic_des-0.1.0/src/dynamic_des/resources/__init__.py +0 -0
- dynamic_des-0.1.0/src/dynamic_des/resources/base.py +25 -0
- dynamic_des-0.1.0/src/dynamic_des/resources/container.py +17 -0
- dynamic_des-0.1.0/src/dynamic_des/resources/resource.py +141 -0
- dynamic_des-0.1.0/src/dynamic_des/resources/store.py +17 -0
|
@@ -0,0 +1,250 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: dynamic-des
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Real-time SimPy control plane to dynamically update parameters and stream outputs via external systems like Kafka, Redis, or Postgres. Built for event-driven digital twins.
|
|
5
|
+
Keywords: simpy,simulation,digital-twin,discrete-event-simulation,kafka,redis,postgres,real-time,industry-4-0
|
|
6
|
+
Author: Jaehyeon Kim
|
|
7
|
+
Author-email: Jaehyeon Kim <dottami@gmail.com>
|
|
8
|
+
Classifier: Development Status :: 3 - Alpha
|
|
9
|
+
Classifier: Intended Audience :: Developers
|
|
10
|
+
Classifier: Intended Audience :: Information Technology
|
|
11
|
+
Classifier: Intended Audience :: Manufacturing
|
|
12
|
+
Classifier: Intended Audience :: Science/Research
|
|
13
|
+
Classifier: Intended Audience :: System Administrators
|
|
14
|
+
Classifier: Intended Audience :: Education
|
|
15
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
16
|
+
Classifier: Operating System :: OS Independent
|
|
17
|
+
Classifier: Programming Language :: Python :: 3
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
23
|
+
Classifier: Topic :: Scientific/Engineering
|
|
24
|
+
Requires-Dist: simpy>=4.0.0
|
|
25
|
+
Requires-Dist: numpy>=1.26.0
|
|
26
|
+
Requires-Dist: orjson>=3.10.0
|
|
27
|
+
Requires-Dist: pydantic>=2.0.0
|
|
28
|
+
Requires-Dist: python-on-whales>=0.70.0
|
|
29
|
+
Requires-Dist: dynamic-des[kafka,redis,postgres,dashboard] ; extra == 'all'
|
|
30
|
+
Requires-Dist: nicegui>=1.4.0 ; extra == 'dashboard'
|
|
31
|
+
Requires-Dist: aiokafka[lz4]>=0.11.0 ; extra == 'kafka'
|
|
32
|
+
Requires-Dist: kafka-python>=2.0.2 ; extra == 'kafka'
|
|
33
|
+
Requires-Dist: asyncpg>=0.29.0 ; extra == 'postgres'
|
|
34
|
+
Requires-Dist: redis>=5.0.0 ; extra == 'redis'
|
|
35
|
+
Requires-Python: >=3.10
|
|
36
|
+
Project-URL: Homepage, https://jaehyeon-kim.github.io/dynamic-des/
|
|
37
|
+
Project-URL: Repository, https://github.com/jaehyeon-kim/dynamic-des
|
|
38
|
+
Project-URL: Documentation, https://jaehyeon-kim.github.io/dynamic-des/
|
|
39
|
+
Project-URL: Issues, https://github.com/jaehyeon-kim/dynamic-des/issues
|
|
40
|
+
Provides-Extra: all
|
|
41
|
+
Provides-Extra: dashboard
|
|
42
|
+
Provides-Extra: kafka
|
|
43
|
+
Provides-Extra: postgres
|
|
44
|
+
Provides-Extra: redis
|
|
45
|
+
Description-Content-Type: text/markdown
|
|
46
|
+
|
|
47
|
+
# Dynamic DES
|
|
48
|
+
|
|
49
|
+
[](https://github.com/jaehyeon-kim/dynamic-des/actions/workflows/pipeline.yml)
|
|
50
|
+
[](https://jaehyeon-kim.github.io/dynamic-des/)
|
|
51
|
+
[](https://badge.fury.io/py/dynamic-des)
|
|
52
|
+
[](https://pypi.org/project/dynamic-des/)
|
|
53
|
+
[](https://opensource.org/licenses/MIT)
|
|
54
|
+
|
|
55
|
+
**Real-time SimPy control plane for event-driven digital twins.**
|
|
56
|
+
|
|
57
|
+
<div align="center">
|
|
58
|
+
<img src="docs/assets/dashboard-preview.gif" alt="Dashboard Screenshot" width="800" />
|
|
59
|
+
</div>
|
|
60
|
+
|
|
61
|
+
Dynamic DES bridges the gap between static discrete-event simulations and the live world. It allows you to update simulation parameters (arrivals, service times, capacities) and stream telemetry via **Kafka**, **Redis**, or **PostgreSQL** without stopping the simulation.
|
|
62
|
+
|
|
63
|
+
## Key Features
|
|
64
|
+
|
|
65
|
+
- **⚡ Real-Time Control**: Synchronize SimPy with the system clock using `DynamicRealtimeEnvironment`.
|
|
66
|
+
- **🔗 Dynamic Registry**: Dynamic, path-based updates (e.g., `Line_A.arrival.rate`) that trigger instant logic changes.
|
|
67
|
+
- **🚀 High Throughput**: Optimized to handle high throughput using `orjson` and local batching.
|
|
68
|
+
- **🔋 Flexible Resources**: `DynamicResource` provides prioritized queuing with graceful capacity shrinking.
|
|
69
|
+
- **🔌 Modular Connectors**: Plugin-based architecture for Kafka, Redis, Postgres and Local testing.
|
|
70
|
+
- **📊 System Observability**: Built-in lag monitoring to track simulation drift from real-world time, exposed via the telemetry stream.
|
|
71
|
+
|
|
72
|
+
## Installation
|
|
73
|
+
|
|
74
|
+
Install the core library:
|
|
75
|
+
|
|
76
|
+
```bash
|
|
77
|
+
pip install dynamic-des
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
To include specific backends:
|
|
81
|
+
|
|
82
|
+
```bash
|
|
83
|
+
# For Kafka support
|
|
84
|
+
pip install "dynamic-des[kafka]"
|
|
85
|
+
|
|
86
|
+
# For Kafka and Dashboard support
|
|
87
|
+
pip install "dynamic-des[kafka,dashboard]"
|
|
88
|
+
|
|
89
|
+
# For all backends (Kafka, Redis, Postgres, Dashboard)
|
|
90
|
+
pip install "dynamic-des[all]"
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
## Quick Start: Zero-Setup Demos
|
|
94
|
+
|
|
95
|
+
Dynamic DES comes with built-in examples and infrastructure orchestration so you can see it in action immediately.
|
|
96
|
+
|
|
97
|
+
**Run the local, dependency-free simulation:**
|
|
98
|
+
|
|
99
|
+
```bash
|
|
100
|
+
ddes-local-example
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
**Run the full Real-Time Digital Twin stack with Kafka and a live UI:**
|
|
104
|
+
|
|
105
|
+
```bash
|
|
106
|
+
# Start the background Kafka cluster (requires Docker)
|
|
107
|
+
ddes-kafka-infra-up
|
|
108
|
+
|
|
109
|
+
# Open a new terminal and run the simulation
|
|
110
|
+
# Ctrl + C to stop
|
|
111
|
+
ddes-kafka-example
|
|
112
|
+
|
|
113
|
+
# Open a new terminal and start the control dashboard (opens in browser)
|
|
114
|
+
# Visit http://localhost:8080
|
|
115
|
+
# Ctrl + C to stop
|
|
116
|
+
ddes-kafka-dashboard
|
|
117
|
+
|
|
118
|
+
# Clean up the infrastructure when finished
|
|
119
|
+
ddes-kafka-infra-down
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
## Building Your Own Simulation (Local Example)
|
|
123
|
+
|
|
124
|
+
The following snippet demonstrates a simple example. It initializes a production line, schedules an external capacity update, and streams telemetry to the console.
|
|
125
|
+
|
|
126
|
+
```python
|
|
127
|
+
import numpy as np
|
|
128
|
+
from dynamic_des import (
|
|
129
|
+
CapacityConfig, ConsoleEgress, DistributionConfig,
|
|
130
|
+
DynamicRealtimeEnvironment, DynamicResource, LocalIngress, SimParameter
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
# 1. Define initial system state
|
|
134
|
+
params = SimParameter(
|
|
135
|
+
sim_id="Line_A",
|
|
136
|
+
arrival={"standard": DistributionConfig(dist="exponential", rate=1)},
|
|
137
|
+
resources={"lathe": CapacityConfig(current_cap=1, max_cap=5)},
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
# 2. Setup Environment with Local Connectors
|
|
141
|
+
# Schedule capacity to jump from 1 to 3 at t=5s
|
|
142
|
+
ingress = LocalIngress([(5.0, "Line_A.resources.lathe.current_cap", 3)])
|
|
143
|
+
egress = ConsoleEgress()
|
|
144
|
+
|
|
145
|
+
env = DynamicRealtimeEnvironment(factor=1.0)
|
|
146
|
+
env.registry.register_sim_parameter(params)
|
|
147
|
+
env.setup_ingress([ingress])
|
|
148
|
+
env.setup_egress([egress])
|
|
149
|
+
|
|
150
|
+
# 3. Create Resource
|
|
151
|
+
res = DynamicResource(env, "Line_A", "lathe")
|
|
152
|
+
|
|
153
|
+
def telemetry_monitor(env: DynamicRealtimeEnvironment, res: DynamicResource):
|
|
154
|
+
"""Streams system health metrics every 2 seconds."""
|
|
155
|
+
while True:
|
|
156
|
+
env.publish_telemetry("Line_A.resources.lathe.capacity", res.capacity)
|
|
157
|
+
yield env.timeout(2.0)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
env.process(telemetry_monitor(env, res))
|
|
161
|
+
|
|
162
|
+
# 4. Run
|
|
163
|
+
print("Simulation started. Watch capacity change at t=5s...")
|
|
164
|
+
try:
|
|
165
|
+
env.run(until=10.1)
|
|
166
|
+
finally:
|
|
167
|
+
env.teardown()
|
|
168
|
+
```
|
|
169
|
+
|
|
170
|
+
### What this does
|
|
171
|
+
|
|
172
|
+
1. **Registry Initialization**: The `SimParameter` defines the initial state. The Registry flattens this into addressable paths (e.g., `Line_A.resources.lathe.current_cap`).
|
|
173
|
+
2. **Live Ingress**: The `LocalIngress` simulates an external event (like a Kafka message) arriving 5 seconds into the run.
|
|
174
|
+
3. **Zero-Polling Update**: The `DynamicResource` listens to the Registry. The moment the ingress updates the value, the resource automatically expands its internal token pool without any manual checking.
|
|
175
|
+
4. **Telemetry Egress**: The `ConsoleEgress` prints system vitals to your terminal, mimicking a live dashboard feed.
|
|
176
|
+
|
|
177
|
+
### Data Egress JSON Schemas
|
|
178
|
+
|
|
179
|
+
To ensure strict data contracts with external consumers (like Kafka, Redis, or PostgreSQL), `dynamic-des` uses Pydantic to validate all outbound payloads. Users can expect two distinct JSON structures depending on the stream type:
|
|
180
|
+
|
|
181
|
+
#### Telemetry Stream
|
|
182
|
+
|
|
183
|
+
Used for scalar metrics like resource utilization, queue lengths, or simulation lag.
|
|
184
|
+
|
|
185
|
+
```json
|
|
186
|
+
{
|
|
187
|
+
"stream_type": "telemetry",
|
|
188
|
+
"path_id": "Line_A.resources.lathe.utilization",
|
|
189
|
+
"value": 85.5,
|
|
190
|
+
"sim_ts": 120.5,
|
|
191
|
+
"timestamp": "2023-10-25T14:30:00.000Z"
|
|
192
|
+
}
|
|
193
|
+
```
|
|
194
|
+
|
|
195
|
+
#### Event Stream
|
|
196
|
+
|
|
197
|
+
Used for discrete task lifecycle events (e.g., a part arriving, entering a queue, or finishing processing).
|
|
198
|
+
|
|
199
|
+
```json
|
|
200
|
+
{
|
|
201
|
+
"stream_type": "event",
|
|
202
|
+
"key": "task-001",
|
|
203
|
+
"value": {
|
|
204
|
+
"status": "finished",
|
|
205
|
+
"duration": 45.2,
|
|
206
|
+
"path_id": "Line_A.service.lathe"
|
|
207
|
+
},
|
|
208
|
+
"sim_ts": 125.0,
|
|
209
|
+
"timestamp": "2023-10-25T14:30:04.500Z"
|
|
210
|
+
}
|
|
211
|
+
```
|
|
212
|
+
|
|
213
|
+
### More Examples
|
|
214
|
+
|
|
215
|
+
For more examples, including implementations using **Kafka** providers, please explore the [examples](./src/dynamic_des/examples/) folder.
|
|
216
|
+
|
|
217
|
+
## Core Concepts
|
|
218
|
+
|
|
219
|
+
**Dynamic DES** is built on the **Switchboard Pattern**, decoupling data sourcing from simulation logic.
|
|
220
|
+
|
|
221
|
+
### Switchboard Pattern
|
|
222
|
+
|
|
223
|
+
Instead of resources polling Kafka directly, the architecture is split into three layers:
|
|
224
|
+
|
|
225
|
+
1. **Connectors (Ingress/Egress)**: Background threads handle heavy I/O (Kafka, Redis).
|
|
226
|
+
2. **Registry (Switchboard)**: A centralized state manager that flattens data into dot-notation paths.
|
|
227
|
+
3. **Resources (SimPy Objects)**: Passive observers that "wake up" only when the Registry signals a change.
|
|
228
|
+
|
|
229
|
+
### Event-Driven Capacity
|
|
230
|
+
|
|
231
|
+
Standard SimPy resources have static capacities. `DynamicResource` wraps a `Container` and a `PriorityStore`. When the Registry updates:
|
|
232
|
+
|
|
233
|
+
- **Growing**: Extra tokens are added to the pool immediately.
|
|
234
|
+
- **Shrinking**: The resource requests tokens back. If they are busy, it waits until they are released, ensuring no work-in-progress is lost.
|
|
235
|
+
|
|
236
|
+
### High-Throughput Events
|
|
237
|
+
|
|
238
|
+
To handle high throughput, the `EgressMixIn` uses:
|
|
239
|
+
|
|
240
|
+
- **Batching**: Pushing lists of events to the I/O thread to reduce lock contention.
|
|
241
|
+
- **orjson**: Rust-powered serialization for maximum speed.
|
|
242
|
+
|
|
243
|
+
## Documentation
|
|
244
|
+
|
|
245
|
+
For full documentation, architecture details, and API reference, visit:
|
|
246
|
+
[https://jaehyeon.me/dynamic-des/](https://jaehyeon.me/dynamic-des/).
|
|
247
|
+
|
|
248
|
+
## License
|
|
249
|
+
|
|
250
|
+
MIT
|
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
# Dynamic DES
|
|
2
|
+
|
|
3
|
+
[](https://github.com/jaehyeon-kim/dynamic-des/actions/workflows/pipeline.yml)
|
|
4
|
+
[](https://jaehyeon-kim.github.io/dynamic-des/)
|
|
5
|
+
[](https://badge.fury.io/py/dynamic-des)
|
|
6
|
+
[](https://pypi.org/project/dynamic-des/)
|
|
7
|
+
[](https://opensource.org/licenses/MIT)
|
|
8
|
+
|
|
9
|
+
**Real-time SimPy control plane for event-driven digital twins.**
|
|
10
|
+
|
|
11
|
+
<div align="center">
|
|
12
|
+
<img src="docs/assets/dashboard-preview.gif" alt="Dashboard Screenshot" width="800" />
|
|
13
|
+
</div>
|
|
14
|
+
|
|
15
|
+
Dynamic DES bridges the gap between static discrete-event simulations and the live world. It allows you to update simulation parameters (arrivals, service times, capacities) and stream telemetry via **Kafka**, **Redis**, or **PostgreSQL** without stopping the simulation.
|
|
16
|
+
|
|
17
|
+
## Key Features
|
|
18
|
+
|
|
19
|
+
- **⚡ Real-Time Control**: Synchronize SimPy with the system clock using `DynamicRealtimeEnvironment`.
|
|
20
|
+
- **🔗 Dynamic Registry**: Dynamic, path-based updates (e.g., `Line_A.arrival.rate`) that trigger instant logic changes.
|
|
21
|
+
- **🚀 High Throughput**: Optimized to handle high throughput using `orjson` and local batching.
|
|
22
|
+
- **🔋 Flexible Resources**: `DynamicResource` provides prioritized queuing with graceful capacity shrinking.
|
|
23
|
+
- **🔌 Modular Connectors**: Plugin-based architecture for Kafka, Redis, Postgres and Local testing.
|
|
24
|
+
- **📊 System Observability**: Built-in lag monitoring to track simulation drift from real-world time, exposed via the telemetry stream.
|
|
25
|
+
|
|
26
|
+
## Installation
|
|
27
|
+
|
|
28
|
+
Install the core library:
|
|
29
|
+
|
|
30
|
+
```bash
|
|
31
|
+
pip install dynamic-des
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
To include specific backends:
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
# For Kafka support
|
|
38
|
+
pip install "dynamic-des[kafka]"
|
|
39
|
+
|
|
40
|
+
# For Kafka and Dashboard support
|
|
41
|
+
pip install "dynamic-des[kafka,dashboard]"
|
|
42
|
+
|
|
43
|
+
# For all backends (Kafka, Redis, Postgres, Dashboard)
|
|
44
|
+
pip install "dynamic-des[all]"
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
## Quick Start: Zero-Setup Demos
|
|
48
|
+
|
|
49
|
+
Dynamic DES comes with built-in examples and infrastructure orchestration so you can see it in action immediately.
|
|
50
|
+
|
|
51
|
+
**Run the local, dependency-free simulation:**
|
|
52
|
+
|
|
53
|
+
```bash
|
|
54
|
+
ddes-local-example
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
**Run the full Real-Time Digital Twin stack with Kafka and a live UI:**
|
|
58
|
+
|
|
59
|
+
```bash
|
|
60
|
+
# Start the background Kafka cluster (requires Docker)
|
|
61
|
+
ddes-kafka-infra-up
|
|
62
|
+
|
|
63
|
+
# Open a new terminal and run the simulation
|
|
64
|
+
# Ctrl + C to stop
|
|
65
|
+
ddes-kafka-example
|
|
66
|
+
|
|
67
|
+
# Open a new terminal and start the control dashboard (opens in browser)
|
|
68
|
+
# Visit http://localhost:8080
|
|
69
|
+
# Ctrl + C to stop
|
|
70
|
+
ddes-kafka-dashboard
|
|
71
|
+
|
|
72
|
+
# Clean up the infrastructure when finished
|
|
73
|
+
ddes-kafka-infra-down
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
## Building Your Own Simulation (Local Example)
|
|
77
|
+
|
|
78
|
+
The following snippet demonstrates a simple example. It initializes a production line, schedules an external capacity update, and streams telemetry to the console.
|
|
79
|
+
|
|
80
|
+
```python
|
|
81
|
+
import numpy as np
|
|
82
|
+
from dynamic_des import (
|
|
83
|
+
CapacityConfig, ConsoleEgress, DistributionConfig,
|
|
84
|
+
DynamicRealtimeEnvironment, DynamicResource, LocalIngress, SimParameter
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
# 1. Define initial system state
|
|
88
|
+
params = SimParameter(
|
|
89
|
+
sim_id="Line_A",
|
|
90
|
+
arrival={"standard": DistributionConfig(dist="exponential", rate=1)},
|
|
91
|
+
resources={"lathe": CapacityConfig(current_cap=1, max_cap=5)},
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
# 2. Setup Environment with Local Connectors
|
|
95
|
+
# Schedule capacity to jump from 1 to 3 at t=5s
|
|
96
|
+
ingress = LocalIngress([(5.0, "Line_A.resources.lathe.current_cap", 3)])
|
|
97
|
+
egress = ConsoleEgress()
|
|
98
|
+
|
|
99
|
+
env = DynamicRealtimeEnvironment(factor=1.0)
|
|
100
|
+
env.registry.register_sim_parameter(params)
|
|
101
|
+
env.setup_ingress([ingress])
|
|
102
|
+
env.setup_egress([egress])
|
|
103
|
+
|
|
104
|
+
# 3. Create Resource
|
|
105
|
+
res = DynamicResource(env, "Line_A", "lathe")
|
|
106
|
+
|
|
107
|
+
def telemetry_monitor(env: DynamicRealtimeEnvironment, res: DynamicResource):
|
|
108
|
+
"""Streams system health metrics every 2 seconds."""
|
|
109
|
+
while True:
|
|
110
|
+
env.publish_telemetry("Line_A.resources.lathe.capacity", res.capacity)
|
|
111
|
+
yield env.timeout(2.0)
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
env.process(telemetry_monitor(env, res))
|
|
115
|
+
|
|
116
|
+
# 4. Run
|
|
117
|
+
print("Simulation started. Watch capacity change at t=5s...")
|
|
118
|
+
try:
|
|
119
|
+
env.run(until=10.1)
|
|
120
|
+
finally:
|
|
121
|
+
env.teardown()
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
### What this does
|
|
125
|
+
|
|
126
|
+
1. **Registry Initialization**: The `SimParameter` defines the initial state. The Registry flattens this into addressable paths (e.g., `Line_A.resources.lathe.current_cap`).
|
|
127
|
+
2. **Live Ingress**: The `LocalIngress` simulates an external event (like a Kafka message) arriving 5 seconds into the run.
|
|
128
|
+
3. **Zero-Polling Update**: The `DynamicResource` listens to the Registry. The moment the ingress updates the value, the resource automatically expands its internal token pool without any manual checking.
|
|
129
|
+
4. **Telemetry Egress**: The `ConsoleEgress` prints system vitals to your terminal, mimicking a live dashboard feed.
|
|
130
|
+
|
|
131
|
+
### Data Egress JSON Schemas
|
|
132
|
+
|
|
133
|
+
To ensure strict data contracts with external consumers (like Kafka, Redis, or PostgreSQL), `dynamic-des` uses Pydantic to validate all outbound payloads. Users can expect two distinct JSON structures depending on the stream type:
|
|
134
|
+
|
|
135
|
+
#### Telemetry Stream
|
|
136
|
+
|
|
137
|
+
Used for scalar metrics like resource utilization, queue lengths, or simulation lag.
|
|
138
|
+
|
|
139
|
+
```json
|
|
140
|
+
{
|
|
141
|
+
"stream_type": "telemetry",
|
|
142
|
+
"path_id": "Line_A.resources.lathe.utilization",
|
|
143
|
+
"value": 85.5,
|
|
144
|
+
"sim_ts": 120.5,
|
|
145
|
+
"timestamp": "2023-10-25T14:30:00.000Z"
|
|
146
|
+
}
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
#### Event Stream
|
|
150
|
+
|
|
151
|
+
Used for discrete task lifecycle events (e.g., a part arriving, entering a queue, or finishing processing).
|
|
152
|
+
|
|
153
|
+
```json
|
|
154
|
+
{
|
|
155
|
+
"stream_type": "event",
|
|
156
|
+
"key": "task-001",
|
|
157
|
+
"value": {
|
|
158
|
+
"status": "finished",
|
|
159
|
+
"duration": 45.2,
|
|
160
|
+
"path_id": "Line_A.service.lathe"
|
|
161
|
+
},
|
|
162
|
+
"sim_ts": 125.0,
|
|
163
|
+
"timestamp": "2023-10-25T14:30:04.500Z"
|
|
164
|
+
}
|
|
165
|
+
```
|
|
166
|
+
|
|
167
|
+
### More Examples
|
|
168
|
+
|
|
169
|
+
For more examples, including implementations using **Kafka** providers, please explore the [examples](./src/dynamic_des/examples/) folder.
|
|
170
|
+
|
|
171
|
+
## Core Concepts
|
|
172
|
+
|
|
173
|
+
**Dynamic DES** is built on the **Switchboard Pattern**, decoupling data sourcing from simulation logic.
|
|
174
|
+
|
|
175
|
+
### Switchboard Pattern
|
|
176
|
+
|
|
177
|
+
Instead of resources polling Kafka directly, the architecture is split into three layers:
|
|
178
|
+
|
|
179
|
+
1. **Connectors (Ingress/Egress)**: Background threads handle heavy I/O (Kafka, Redis).
|
|
180
|
+
2. **Registry (Switchboard)**: A centralized state manager that flattens data into dot-notation paths.
|
|
181
|
+
3. **Resources (SimPy Objects)**: Passive observers that "wake up" only when the Registry signals a change.
|
|
182
|
+
|
|
183
|
+
### Event-Driven Capacity
|
|
184
|
+
|
|
185
|
+
Standard SimPy resources have static capacities. `DynamicResource` wraps a `Container` and a `PriorityStore`. When the Registry updates:
|
|
186
|
+
|
|
187
|
+
- **Growing**: Extra tokens are added to the pool immediately.
|
|
188
|
+
- **Shrinking**: The resource requests tokens back. If they are busy, it waits until they are released, ensuring no work-in-progress is lost.
|
|
189
|
+
|
|
190
|
+
### High-Throughput Events
|
|
191
|
+
|
|
192
|
+
To handle high throughput, the `EgressMixIn` uses:
|
|
193
|
+
|
|
194
|
+
- **Batching**: Pushing lists of events to the I/O thread to reduce lock contention.
|
|
195
|
+
- **orjson**: Rust-powered serialization for maximum speed.
|
|
196
|
+
|
|
197
|
+
## Documentation
|
|
198
|
+
|
|
199
|
+
For full documentation, architecture details, and API reference, visit:
|
|
200
|
+
[https://jaehyeon.me/dynamic-des/](https://jaehyeon.me/dynamic-des/).
|
|
201
|
+
|
|
202
|
+
## License
|
|
203
|
+
|
|
204
|
+
MIT
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "dynamic-des"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = "Real-time SimPy control plane to dynamically update parameters and stream outputs via external systems like Kafka, Redis, or Postgres. Built for event-driven digital twins."
|
|
5
|
+
readme = "README.md"
|
|
6
|
+
authors = [
|
|
7
|
+
{ name = "Jaehyeon Kim", email = "dottami@gmail.com" }
|
|
8
|
+
]
|
|
9
|
+
keywords = [
|
|
10
|
+
"simpy",
|
|
11
|
+
"simulation",
|
|
12
|
+
"digital-twin",
|
|
13
|
+
"discrete-event-simulation",
|
|
14
|
+
"kafka",
|
|
15
|
+
"redis",
|
|
16
|
+
"postgres",
|
|
17
|
+
"real-time",
|
|
18
|
+
"industry-4-0"
|
|
19
|
+
]
|
|
20
|
+
requires-python = ">=3.10"
|
|
21
|
+
classifiers = [
|
|
22
|
+
"Development Status :: 3 - Alpha",
|
|
23
|
+
"Intended Audience :: Developers",
|
|
24
|
+
"Intended Audience :: Information Technology",
|
|
25
|
+
"Intended Audience :: Manufacturing",
|
|
26
|
+
"Intended Audience :: Science/Research",
|
|
27
|
+
"Intended Audience :: System Administrators",
|
|
28
|
+
"Intended Audience :: Education",
|
|
29
|
+
"License :: OSI Approved :: MIT License",
|
|
30
|
+
"Operating System :: OS Independent",
|
|
31
|
+
"Programming Language :: Python :: 3",
|
|
32
|
+
"Programming Language :: Python :: 3.10",
|
|
33
|
+
"Programming Language :: Python :: 3.11",
|
|
34
|
+
"Programming Language :: Python :: 3.12",
|
|
35
|
+
"Programming Language :: Python :: 3.13",
|
|
36
|
+
"Programming Language :: Python :: 3.14",
|
|
37
|
+
"Topic :: Scientific/Engineering",
|
|
38
|
+
]
|
|
39
|
+
dependencies = [
|
|
40
|
+
"simpy>=4.0.0",
|
|
41
|
+
"numpy>=1.26.0",
|
|
42
|
+
"orjson>=3.10.0",
|
|
43
|
+
"pydantic>=2.0.0",
|
|
44
|
+
"python-on-whales>=0.70.0"
|
|
45
|
+
]
|
|
46
|
+
|
|
47
|
+
[project.urls]
|
|
48
|
+
Homepage = "https://jaehyeon-kim.github.io/dynamic-des/"
|
|
49
|
+
Repository = "https://github.com/jaehyeon-kim/dynamic-des"
|
|
50
|
+
Documentation = "https://jaehyeon-kim.github.io/dynamic-des/"
|
|
51
|
+
Issues = "https://github.com/jaehyeon-kim/dynamic-des/issues"
|
|
52
|
+
|
|
53
|
+
[project.optional-dependencies]
|
|
54
|
+
kafka =[
|
|
55
|
+
"aiokafka[lz4]>=0.11.0",
|
|
56
|
+
"kafka-python>=2.0.2"
|
|
57
|
+
]
|
|
58
|
+
redis = ["redis>=5.0.0"]
|
|
59
|
+
postgres =["asyncpg>=0.29.0"]
|
|
60
|
+
dashboard = ["nicegui>=1.4.0"]
|
|
61
|
+
all = [
|
|
62
|
+
"dynamic-des[kafka,redis,postgres,dashboard]"
|
|
63
|
+
]
|
|
64
|
+
[build-system]
|
|
65
|
+
requires = ["uv_build>=0.9.16,<0.11.0"]
|
|
66
|
+
build-backend = "uv_build"
|
|
67
|
+
|
|
68
|
+
[dependency-groups]
|
|
69
|
+
dev = [
|
|
70
|
+
"mkdocs<2.0",
|
|
71
|
+
"mkdocs-material>=9.7.4",
|
|
72
|
+
"mkdocstrings[python]>=1.0.3",
|
|
73
|
+
"mike>=2.1.0",
|
|
74
|
+
"mypy>=1.19.1",
|
|
75
|
+
"pytest>=9.0.2",
|
|
76
|
+
"pytest-asyncio>=1.3.0",
|
|
77
|
+
"ruff>=0.15.5",
|
|
78
|
+
"pytest-cov>=5.0.0",
|
|
79
|
+
]
|
|
80
|
+
[project.scripts]
|
|
81
|
+
# Infrastructure
|
|
82
|
+
ddes-kafka-infra-up = "dynamic_des.examples:kafka_infra_up"
|
|
83
|
+
ddes-kafka-infra-down = "dynamic_des.examples:kafka_infra_down"
|
|
84
|
+
# Examples & Dashboard
|
|
85
|
+
ddes-local-example = "dynamic_des.examples:local_demo"
|
|
86
|
+
ddes-kafka-example = "dynamic_des.examples:kafka_demo"
|
|
87
|
+
ddes-kafka-dashboard = "dynamic_des.examples:kafka_dashboard_demo"
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
from dynamic_des.connectors.admin.kafka import KafkaAdminConnector
|
|
4
|
+
from dynamic_des.connectors.egress.kafka import KafkaEgress
|
|
5
|
+
from dynamic_des.connectors.egress.local import ConsoleEgress
|
|
6
|
+
from dynamic_des.connectors.ingress.kafka import KafkaIngress
|
|
7
|
+
from dynamic_des.connectors.ingress.local import LocalIngress
|
|
8
|
+
from dynamic_des.core.environment import DynamicRealtimeEnvironment
|
|
9
|
+
from dynamic_des.core.registry import SimulationRegistry
|
|
10
|
+
from dynamic_des.core.sampler import Sampler
|
|
11
|
+
from dynamic_des.models.params import CapacityConfig, DistributionConfig, SimParameter
|
|
12
|
+
from dynamic_des.models.schemas import EventPayload, TelemetryPayload
|
|
13
|
+
from dynamic_des.resources.resource import DynamicResource
|
|
14
|
+
|
|
15
|
+
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
|
16
|
+
|
|
17
|
+
__version__ = "0.1.0"
|
|
18
|
+
|
|
19
|
+
__all__ = [
|
|
20
|
+
"DynamicRealtimeEnvironment",
|
|
21
|
+
"Sampler",
|
|
22
|
+
"SimulationRegistry",
|
|
23
|
+
"DynamicResource",
|
|
24
|
+
"SimParameter",
|
|
25
|
+
"DistributionConfig",
|
|
26
|
+
"CapacityConfig",
|
|
27
|
+
"KafkaAdminConnector",
|
|
28
|
+
"KafkaIngress",
|
|
29
|
+
"LocalIngress",
|
|
30
|
+
"KafkaEgress",
|
|
31
|
+
"ConsoleEgress",
|
|
32
|
+
"EventPayload",
|
|
33
|
+
"TelemetryPayload",
|
|
34
|
+
]
|
|
File without changes
|
|
File without changes
|