antioch-py 2.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of antioch-py might be problematic. Click here for more details.
- antioch/__init__.py +0 -0
- antioch/message.py +87 -0
- antioch/module/__init__.py +53 -0
- antioch/module/clock.py +62 -0
- antioch/module/execution.py +278 -0
- antioch/module/input.py +127 -0
- antioch/module/module.py +218 -0
- antioch/module/node.py +357 -0
- antioch/module/token.py +42 -0
- antioch/session/__init__.py +150 -0
- antioch/session/ark.py +504 -0
- antioch/session/asset.py +65 -0
- antioch/session/error.py +80 -0
- antioch/session/record.py +158 -0
- antioch/session/scene.py +1521 -0
- antioch/session/session.py +220 -0
- antioch/session/task.py +323 -0
- antioch/session/views/__init__.py +40 -0
- antioch/session/views/animation.py +189 -0
- antioch/session/views/articulation.py +245 -0
- antioch/session/views/basis_curve.py +186 -0
- antioch/session/views/camera.py +92 -0
- antioch/session/views/collision.py +75 -0
- antioch/session/views/geometry.py +74 -0
- antioch/session/views/ground_plane.py +63 -0
- antioch/session/views/imu.py +73 -0
- antioch/session/views/joint.py +64 -0
- antioch/session/views/light.py +175 -0
- antioch/session/views/pir_sensor.py +140 -0
- antioch/session/views/radar.py +73 -0
- antioch/session/views/rigid_body.py +282 -0
- antioch/session/views/xform.py +119 -0
- antioch_py-2.0.6.dist-info/METADATA +115 -0
- antioch_py-2.0.6.dist-info/RECORD +99 -0
- antioch_py-2.0.6.dist-info/WHEEL +5 -0
- antioch_py-2.0.6.dist-info/entry_points.txt +2 -0
- antioch_py-2.0.6.dist-info/top_level.txt +2 -0
- common/__init__.py +0 -0
- common/ark/__init__.py +60 -0
- common/ark/ark.py +128 -0
- common/ark/hardware.py +121 -0
- common/ark/kinematics.py +31 -0
- common/ark/module.py +85 -0
- common/ark/node.py +94 -0
- common/ark/scheduler.py +439 -0
- common/ark/sim.py +33 -0
- common/assets/__init__.py +3 -0
- common/constants.py +47 -0
- common/core/__init__.py +52 -0
- common/core/agent.py +296 -0
- common/core/auth.py +305 -0
- common/core/registry.py +331 -0
- common/core/task.py +36 -0
- common/message/__init__.py +59 -0
- common/message/annotation.py +89 -0
- common/message/array.py +500 -0
- common/message/base.py +517 -0
- common/message/camera.py +91 -0
- common/message/color.py +139 -0
- common/message/frame.py +50 -0
- common/message/image.py +171 -0
- common/message/imu.py +14 -0
- common/message/joint.py +47 -0
- common/message/log.py +31 -0
- common/message/pir.py +16 -0
- common/message/point.py +109 -0
- common/message/point_cloud.py +63 -0
- common/message/pose.py +148 -0
- common/message/quaternion.py +273 -0
- common/message/radar.py +58 -0
- common/message/types.py +37 -0
- common/message/vector.py +786 -0
- common/rome/__init__.py +9 -0
- common/rome/client.py +430 -0
- common/rome/error.py +16 -0
- common/session/__init__.py +54 -0
- common/session/environment.py +31 -0
- common/session/sim.py +240 -0
- common/session/views/__init__.py +263 -0
- common/session/views/animation.py +73 -0
- common/session/views/articulation.py +184 -0
- common/session/views/basis_curve.py +102 -0
- common/session/views/camera.py +147 -0
- common/session/views/collision.py +59 -0
- common/session/views/geometry.py +102 -0
- common/session/views/ground_plane.py +41 -0
- common/session/views/imu.py +66 -0
- common/session/views/joint.py +81 -0
- common/session/views/light.py +96 -0
- common/session/views/pir_sensor.py +115 -0
- common/session/views/radar.py +82 -0
- common/session/views/rigid_body.py +236 -0
- common/session/views/viewport.py +21 -0
- common/session/views/xform.py +39 -0
- common/utils/__init__.py +4 -0
- common/utils/comms.py +571 -0
- common/utils/logger.py +123 -0
- common/utils/time.py +42 -0
- common/utils/usd.py +12 -0
common/ark/scheduler.py
ADDED
|
@@ -0,0 +1,439 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
|
|
3
|
+
from sortedcontainers import SortedDict
|
|
4
|
+
|
|
5
|
+
from common.ark.module import Module
|
|
6
|
+
from common.message import Message
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class NodeEdge(Message):
|
|
10
|
+
"""
|
|
11
|
+
Directed edge representing data flow between nodes.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
_type = "antioch/ark/node_edge"
|
|
15
|
+
source_module: str
|
|
16
|
+
source_node: str
|
|
17
|
+
source_output_name: str
|
|
18
|
+
target_module: str
|
|
19
|
+
target_node: str
|
|
20
|
+
target_input_name: str
|
|
21
|
+
type: str
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class InputToken(Message):
|
|
25
|
+
"""
|
|
26
|
+
Input token representing data flow to a node.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
_type = "antioch/ark/input_token"
|
|
30
|
+
source_module: str
|
|
31
|
+
source_node: str
|
|
32
|
+
source_output_name: str
|
|
33
|
+
target_input_name: str
|
|
34
|
+
let_us: int
|
|
35
|
+
budget_us: int
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class ScheduleEvent(Message, ABC):
|
|
39
|
+
"""
|
|
40
|
+
Base class for schedule events.
|
|
41
|
+
|
|
42
|
+
Events represent discrete occurrences in the execution schedule,
|
|
43
|
+
ordered by logical execution time (LET).
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
module: str
|
|
47
|
+
node: str
|
|
48
|
+
|
|
49
|
+
@property
|
|
50
|
+
@abstractmethod
|
|
51
|
+
def let_us(self) -> int:
|
|
52
|
+
"""
|
|
53
|
+
Logical execution time in microseconds.
|
|
54
|
+
"""
|
|
55
|
+
|
|
56
|
+
@property
|
|
57
|
+
@abstractmethod
|
|
58
|
+
def priority(self) -> int:
|
|
59
|
+
"""
|
|
60
|
+
Event priority for ordering (0=complete, 1=start).
|
|
61
|
+
"""
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class NodeCompleteEvent(ScheduleEvent):
|
|
65
|
+
"""
|
|
66
|
+
Node execution completes.
|
|
67
|
+
"""
|
|
68
|
+
|
|
69
|
+
_type = "antioch/ark/node_complete_event"
|
|
70
|
+
completion_let_us: int
|
|
71
|
+
|
|
72
|
+
@property
|
|
73
|
+
def let_us(self) -> int:
|
|
74
|
+
return self.completion_let_us
|
|
75
|
+
|
|
76
|
+
@property
|
|
77
|
+
def priority(self) -> int:
|
|
78
|
+
return 0
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class NodeStartEvent(ScheduleEvent):
|
|
82
|
+
"""
|
|
83
|
+
Node execution starts.
|
|
84
|
+
"""
|
|
85
|
+
|
|
86
|
+
_type = "antioch/ark/node_start_event"
|
|
87
|
+
start_let_us: int
|
|
88
|
+
input_tokens: list[InputToken]
|
|
89
|
+
|
|
90
|
+
@property
|
|
91
|
+
def let_us(self) -> int:
|
|
92
|
+
return self.start_let_us
|
|
93
|
+
|
|
94
|
+
@property
|
|
95
|
+
def priority(self) -> int:
|
|
96
|
+
return 1
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
class NodeState:
|
|
100
|
+
"""
|
|
101
|
+
Runtime state of a node during scheduling.
|
|
102
|
+
|
|
103
|
+
Tracks execution status and pending inputs for a single node.
|
|
104
|
+
"""
|
|
105
|
+
|
|
106
|
+
def __init__(self, module: str, node: str, required_inputs: set[str]):
|
|
107
|
+
"""
|
|
108
|
+
Create a new node state.
|
|
109
|
+
|
|
110
|
+
:param module: Module containing the node.
|
|
111
|
+
:param node: Name of the node.
|
|
112
|
+
:param required_inputs: Set of input names that are required for execution.
|
|
113
|
+
"""
|
|
114
|
+
|
|
115
|
+
self.module = module
|
|
116
|
+
self.node = node
|
|
117
|
+
self.is_executing = False
|
|
118
|
+
self.queued_execution: int | None = None
|
|
119
|
+
self.pending_tokens: dict[str, list[InputToken]] = {}
|
|
120
|
+
self.required_inputs = required_inputs
|
|
121
|
+
|
|
122
|
+
def record_token(self, token: InputToken) -> None:
|
|
123
|
+
"""
|
|
124
|
+
Buffer a token for this input.
|
|
125
|
+
|
|
126
|
+
:param token: Input token to buffer.
|
|
127
|
+
"""
|
|
128
|
+
|
|
129
|
+
if token.target_input_name not in self.pending_tokens:
|
|
130
|
+
self.pending_tokens[token.target_input_name] = []
|
|
131
|
+
self.pending_tokens[token.target_input_name].append(token)
|
|
132
|
+
|
|
133
|
+
def has_all_required_inputs(self) -> bool:
|
|
134
|
+
"""
|
|
135
|
+
Check if all required inputs have at least one token.
|
|
136
|
+
|
|
137
|
+
:return: True if all required inputs have tokens, False otherwise.
|
|
138
|
+
"""
|
|
139
|
+
|
|
140
|
+
return all(self.pending_tokens.get(inp, []) for inp in self.required_inputs)
|
|
141
|
+
|
|
142
|
+
def collect_tokens(self) -> list[InputToken]:
|
|
143
|
+
"""
|
|
144
|
+
Collect all buffered tokens, ordered by let_us.
|
|
145
|
+
|
|
146
|
+
:return: List of all tokens sorted by start time.
|
|
147
|
+
"""
|
|
148
|
+
|
|
149
|
+
all_tokens: list[InputToken] = []
|
|
150
|
+
for tokens in self.pending_tokens.values():
|
|
151
|
+
all_tokens.extend(tokens)
|
|
152
|
+
self.pending_tokens.clear()
|
|
153
|
+
all_tokens.sort(key=lambda t: t.let_us)
|
|
154
|
+
return all_tokens
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
class OnlineScheduler:
|
|
158
|
+
"""
|
|
159
|
+
Online deterministic scheduler for Ark execution.
|
|
160
|
+
|
|
161
|
+
Computes execution schedule deterministically from edges and modules using
|
|
162
|
+
Logical Execution Time (LET) semantics. Events are computed lazily on-demand,
|
|
163
|
+
enabling infinite schedules driven by timer nodes. Nodes execute in parallel
|
|
164
|
+
with only per-node sequential constraints.
|
|
165
|
+
|
|
166
|
+
The scheduler implements:
|
|
167
|
+
- Timer-driven execution with periodic node firing
|
|
168
|
+
- Token-based data flow between nodes
|
|
169
|
+
- Queued execution for timer overruns
|
|
170
|
+
- Deterministic event ordering (completions before starts at same LET)
|
|
171
|
+
"""
|
|
172
|
+
|
|
173
|
+
def __init__(self, edges: list[NodeEdge], modules: list[Module]):
|
|
174
|
+
"""
|
|
175
|
+
Create a new online scheduler.
|
|
176
|
+
|
|
177
|
+
Initializes the scheduler and seeds initial timer fires.
|
|
178
|
+
Events are computed lazily on-demand via next().
|
|
179
|
+
|
|
180
|
+
:param edges: List of node edges.
|
|
181
|
+
:param modules: List of modules.
|
|
182
|
+
:raises ValueError: If a module or node is not found.
|
|
183
|
+
:raises RuntimeError: If the schedule is exhausted.
|
|
184
|
+
"""
|
|
185
|
+
|
|
186
|
+
# Build outgoing edge map for O(1) lookups
|
|
187
|
+
self.modules: dict[str, Module] = {m.name: m for m in modules}
|
|
188
|
+
self.outgoing_edges: dict[tuple[str, str], list[NodeEdge]] = {}
|
|
189
|
+
for edge in edges:
|
|
190
|
+
key = (edge.source_module, edge.source_node)
|
|
191
|
+
if key not in self.outgoing_edges:
|
|
192
|
+
self.outgoing_edges[key] = []
|
|
193
|
+
self.outgoing_edges[key].append(edge)
|
|
194
|
+
|
|
195
|
+
# Compute timer periods once
|
|
196
|
+
self.timer_periods_us: dict[tuple[str, str], int] = {}
|
|
197
|
+
for module in self.modules.values():
|
|
198
|
+
for node_name, node_def in module.nodes.items():
|
|
199
|
+
if node_def.timer:
|
|
200
|
+
key = (module.name, node_name)
|
|
201
|
+
self.timer_periods_us[key] = node_def.timer.to_period_us()
|
|
202
|
+
|
|
203
|
+
# Initialize state
|
|
204
|
+
self.node_states: dict[tuple[str, str], NodeState] = {}
|
|
205
|
+
self.events: SortedDict = SortedDict()
|
|
206
|
+
self.processed_events: SortedDict = SortedDict()
|
|
207
|
+
self.last_event_let_us: int = 0
|
|
208
|
+
self.last_event_index: int = -1
|
|
209
|
+
for module in self.modules.values():
|
|
210
|
+
for node_name, node_def in module.nodes.items():
|
|
211
|
+
key = (module.name, node_name)
|
|
212
|
+
required_inputs = {name for name, inp in node_def.inputs.items() if inp.required}
|
|
213
|
+
self.node_states[key] = NodeState(module.name, node_name, required_inputs)
|
|
214
|
+
|
|
215
|
+
# Seed timer fires
|
|
216
|
+
for module_name, node_name in self.timer_periods_us:
|
|
217
|
+
self._try_schedule_node_start(module_name, node_name, 0)
|
|
218
|
+
|
|
219
|
+
def next(self) -> ScheduleEvent:
|
|
220
|
+
"""
|
|
221
|
+
Get the next event in the schedule.
|
|
222
|
+
|
|
223
|
+
Returns the next event in chronological order, processing multiple events
|
|
224
|
+
at the same LET in priority order. Computes schedule lazily as needed.
|
|
225
|
+
|
|
226
|
+
:return: The next event in the schedule.
|
|
227
|
+
:raises RuntimeError: If the schedule is exhausted.
|
|
228
|
+
"""
|
|
229
|
+
|
|
230
|
+
while True:
|
|
231
|
+
# Check if we have more events at current LET
|
|
232
|
+
if self.last_event_let_us in self.processed_events:
|
|
233
|
+
events = self.processed_events[self.last_event_let_us]
|
|
234
|
+
next_index = 0 if self.last_event_index == -1 else self.last_event_index + 1
|
|
235
|
+
if next_index < len(events):
|
|
236
|
+
self.last_event_index = next_index
|
|
237
|
+
return events[next_index]
|
|
238
|
+
|
|
239
|
+
# Move to next LET with events
|
|
240
|
+
range_start = self.last_event_let_us if self.last_event_index == -1 else self.last_event_let_us + 1
|
|
241
|
+
|
|
242
|
+
# Find next processed LET
|
|
243
|
+
for let_us in self.processed_events.irange(minimum=range_start):
|
|
244
|
+
if self.processed_events[let_us]:
|
|
245
|
+
self.last_event_let_us = let_us
|
|
246
|
+
self.last_event_index = 0
|
|
247
|
+
return self.processed_events[let_us][0]
|
|
248
|
+
|
|
249
|
+
# No more processed events, need to process next unprocessed LET
|
|
250
|
+
if not self.events:
|
|
251
|
+
raise RuntimeError("Schedule exhausted")
|
|
252
|
+
|
|
253
|
+
next_let_us = next(iter(self.events.keys()))
|
|
254
|
+
events = self._process_let(next_let_us)
|
|
255
|
+
self.processed_events[next_let_us] = events
|
|
256
|
+
|
|
257
|
+
def _process_let(self, let_us: int) -> list[ScheduleEvent]:
|
|
258
|
+
"""
|
|
259
|
+
Process all events at a specific LET and return them.
|
|
260
|
+
|
|
261
|
+
Retrieves all events scheduled for this LET, sorts them by priority
|
|
262
|
+
(completions before starts), and processes each one. May generate
|
|
263
|
+
additional events at the same LET through cascading effects.
|
|
264
|
+
|
|
265
|
+
:param let_us: Logical execution time to process.
|
|
266
|
+
:return: List of all events processed at this LET.
|
|
267
|
+
"""
|
|
268
|
+
|
|
269
|
+
all_events: list[ScheduleEvent] = []
|
|
270
|
+
while let_us in self.events:
|
|
271
|
+
events: list[ScheduleEvent] = self.events.pop(let_us, []) # type: ignore[assignment]
|
|
272
|
+
if not events:
|
|
273
|
+
break
|
|
274
|
+
|
|
275
|
+
# Sort: completions before starts
|
|
276
|
+
events.sort(key=lambda e: (e.let_us, e.priority))
|
|
277
|
+
all_events.extend(events)
|
|
278
|
+
for event in events:
|
|
279
|
+
if isinstance(event, NodeCompleteEvent):
|
|
280
|
+
self._handle_node_complete(event)
|
|
281
|
+
elif isinstance(event, NodeStartEvent):
|
|
282
|
+
self._handle_node_start(event)
|
|
283
|
+
|
|
284
|
+
return all_events
|
|
285
|
+
|
|
286
|
+
def _handle_node_complete(self, event: NodeCompleteEvent) -> None:
|
|
287
|
+
"""
|
|
288
|
+
Handle node completion event.
|
|
289
|
+
|
|
290
|
+
Marks node as idle, processes queued executions, and creates InputTokens
|
|
291
|
+
for all downstream nodes connected via edges.
|
|
292
|
+
|
|
293
|
+
:param event: Node complete event to process.
|
|
294
|
+
"""
|
|
295
|
+
|
|
296
|
+
# Mark node as idle and process queued execution
|
|
297
|
+
key = (event.module, event.node)
|
|
298
|
+
if key in self.node_states:
|
|
299
|
+
state = self.node_states[key]
|
|
300
|
+
state.is_executing = False
|
|
301
|
+
if state.queued_execution is not None:
|
|
302
|
+
queued_let = state.queued_execution
|
|
303
|
+
state.queued_execution = None
|
|
304
|
+
if state.has_all_required_inputs():
|
|
305
|
+
input_tokens = state.collect_tokens()
|
|
306
|
+
self._schedule_event(
|
|
307
|
+
NodeStartEvent(
|
|
308
|
+
module=event.module,
|
|
309
|
+
node=event.node,
|
|
310
|
+
start_let_us=queued_let,
|
|
311
|
+
input_tokens=input_tokens,
|
|
312
|
+
)
|
|
313
|
+
)
|
|
314
|
+
|
|
315
|
+
# Create InputTokens and buffer them in target nodes
|
|
316
|
+
if key in self.outgoing_edges:
|
|
317
|
+
source_node_def = self._get_node(event.module, event.node)
|
|
318
|
+
budget_us = source_node_def.budget_us
|
|
319
|
+
for edge in self.outgoing_edges[key]:
|
|
320
|
+
token = InputToken(
|
|
321
|
+
source_module=edge.source_module,
|
|
322
|
+
source_node=edge.source_node,
|
|
323
|
+
source_output_name=edge.source_output_name,
|
|
324
|
+
target_input_name=edge.target_input_name,
|
|
325
|
+
let_us=event.completion_let_us - budget_us,
|
|
326
|
+
budget_us=budget_us,
|
|
327
|
+
)
|
|
328
|
+
|
|
329
|
+
target_key = (edge.target_module, edge.target_node)
|
|
330
|
+
if target_key in self.node_states:
|
|
331
|
+
target_state = self.node_states[target_key]
|
|
332
|
+
target_state.record_token(token)
|
|
333
|
+
|
|
334
|
+
# Only schedule nodes without timers when inputs arrive
|
|
335
|
+
# Timer nodes are scheduled by their timer, inputs are just buffered
|
|
336
|
+
if target_key not in self.timer_periods_us and target_state.has_all_required_inputs() and not target_state.is_executing:
|
|
337
|
+
self._try_schedule_node_start(edge.target_module, edge.target_node, event.completion_let_us)
|
|
338
|
+
|
|
339
|
+
def _handle_node_start(self, event: NodeStartEvent) -> None:
|
|
340
|
+
"""
|
|
341
|
+
Handle node start event.
|
|
342
|
+
|
|
343
|
+
For nodes with all required inputs: marks as executing and schedules completion.
|
|
344
|
+
For timer nodes without required inputs: skips execution but schedules next timer fire.
|
|
345
|
+
|
|
346
|
+
:param event: Node start event to process.
|
|
347
|
+
"""
|
|
348
|
+
|
|
349
|
+
key = (event.module, event.node)
|
|
350
|
+
node_def = self._get_node(event.module, event.node)
|
|
351
|
+
|
|
352
|
+
# Get current input tokens from node state (not from event)
|
|
353
|
+
state = self.node_states.get(key)
|
|
354
|
+
current_input_tokens: list[InputToken] = (
|
|
355
|
+
[token for tokens_list in state.pending_tokens.values() for token in tokens_list] if state else []
|
|
356
|
+
)
|
|
357
|
+
|
|
358
|
+
# Check if required inputs are satisfied
|
|
359
|
+
required_inputs = {name for name, inp in node_def.inputs.items() if inp.required}
|
|
360
|
+
provided_inputs = {token.target_input_name for token in current_input_tokens}
|
|
361
|
+
|
|
362
|
+
# Only execute if required inputs are satisfied
|
|
363
|
+
if state and all(req in provided_inputs for req in required_inputs):
|
|
364
|
+
state.is_executing = True
|
|
365
|
+
state.collect_tokens() # Clear tokens now that we're executing
|
|
366
|
+
|
|
367
|
+
# Schedule completion
|
|
368
|
+
self._schedule_event(
|
|
369
|
+
NodeCompleteEvent(
|
|
370
|
+
module=event.module,
|
|
371
|
+
node=event.node,
|
|
372
|
+
completion_let_us=event.start_let_us + node_def.budget_us,
|
|
373
|
+
)
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
# For timer nodes, always schedule next timer fire (regardless of whether we executed)
|
|
377
|
+
if key in self.timer_periods_us:
|
|
378
|
+
period_us = self.timer_periods_us[key]
|
|
379
|
+
next_start_let = event.start_let_us + period_us
|
|
380
|
+
self._try_schedule_node_start(event.module, event.node, next_start_let)
|
|
381
|
+
|
|
382
|
+
def _try_schedule_node_start(self, module: str, node: str, let_us: int) -> None:
|
|
383
|
+
"""
|
|
384
|
+
Try to schedule a node start if ready and not executing.
|
|
385
|
+
|
|
386
|
+
For timer nodes: always creates start events to maintain schedule.
|
|
387
|
+
For non-timer nodes: only creates start events when all required inputs are available.
|
|
388
|
+
|
|
389
|
+
:param module: Module containing the node.
|
|
390
|
+
:param node: Name of the node to schedule.
|
|
391
|
+
:param let_us: Logical execution time when node should start.
|
|
392
|
+
"""
|
|
393
|
+
|
|
394
|
+
key = (module, node)
|
|
395
|
+
state = self.node_states.get(key)
|
|
396
|
+
if state is None:
|
|
397
|
+
return
|
|
398
|
+
|
|
399
|
+
# If node is currently executing, queue this execution
|
|
400
|
+
if state.is_executing:
|
|
401
|
+
state.queued_execution = let_us
|
|
402
|
+
return
|
|
403
|
+
|
|
404
|
+
# For timer nodes, always create start events to maintain schedule
|
|
405
|
+
# For non-timer nodes, only create start events when inputs are ready
|
|
406
|
+
if key in self.timer_periods_us or state.has_all_required_inputs():
|
|
407
|
+
input_tokens = [token for tokens_list in state.pending_tokens.values() for token in tokens_list]
|
|
408
|
+
self._schedule_event(NodeStartEvent(module=module, node=node, start_let_us=let_us, input_tokens=input_tokens))
|
|
409
|
+
|
|
410
|
+
def _schedule_event(self, event: ScheduleEvent) -> None:
|
|
411
|
+
"""
|
|
412
|
+
Schedule an event for future processing.
|
|
413
|
+
|
|
414
|
+
Adds the event to the unprocessed events queue at its LET for
|
|
415
|
+
later processing by the scheduler.
|
|
416
|
+
|
|
417
|
+
:param event: Event to schedule (NodeStartEvent or NodeCompleteEvent).
|
|
418
|
+
"""
|
|
419
|
+
|
|
420
|
+
let_us = event.let_us
|
|
421
|
+
if let_us not in self.events:
|
|
422
|
+
self.events[let_us] = []
|
|
423
|
+
self.events[let_us].append(event)
|
|
424
|
+
|
|
425
|
+
def _get_node(self, module: str, node: str):
|
|
426
|
+
"""
|
|
427
|
+
Get node definition from modules.
|
|
428
|
+
|
|
429
|
+
:param module: Module containing the node.
|
|
430
|
+
:param node: Name of the node.
|
|
431
|
+
:return: Node definition.
|
|
432
|
+
:raises ValueError: If module or node is not found.
|
|
433
|
+
"""
|
|
434
|
+
|
|
435
|
+
if module not in self.modules:
|
|
436
|
+
raise ValueError(f"Module '{module}' not found")
|
|
437
|
+
if node not in self.modules[module].nodes:
|
|
438
|
+
raise ValueError(f"Node '{module}::{node}' not found")
|
|
439
|
+
return self.modules[module].nodes[node]
|
common/ark/sim.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from common.ark.scheduler import InputToken
|
|
2
|
+
from common.message import Message
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class SimNodeStart(Message):
|
|
6
|
+
"""
|
|
7
|
+
Ark signals node to start execution (sim mode).
|
|
8
|
+
|
|
9
|
+
Sent from Ark to node via publisher to trigger node start with hardware reads.
|
|
10
|
+
Includes expected input tokens for overrun detection and authoritative start timestamp.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
_type = "antioch/ark/sim_node_start"
|
|
14
|
+
module_name: str
|
|
15
|
+
node_name: str
|
|
16
|
+
start_let_us: int
|
|
17
|
+
start_timestamp_us: int
|
|
18
|
+
input_tokens: list[InputToken]
|
|
19
|
+
hardware_reads: dict[str, bytes]
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class SimNodeComplete(Message):
|
|
23
|
+
"""
|
|
24
|
+
Node signals completion to Ark (sim mode).
|
|
25
|
+
|
|
26
|
+
Sent from node to Ark to indicate completion with optional hardware writes.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
_type = "antioch/ark/sim_node_complete"
|
|
30
|
+
module_name: str
|
|
31
|
+
node_name: str
|
|
32
|
+
completion_let_us: int
|
|
33
|
+
hardware_writes: dict[str, bytes] | None = None
|
common/constants.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
ANTIOCH_API_URL = os.environ.get("ANTIOCH_API_URL", "https://staging.api.antioch.com")
|
|
5
|
+
ANTIOCH_DIR = os.environ.get("ANTIOCH_DIR", str(Path.home() / ".antioch"))
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def get_auth_dir() -> Path:
|
|
9
|
+
"""
|
|
10
|
+
Get the auth storage directory path.
|
|
11
|
+
|
|
12
|
+
Creates the auth directory if it doesn't exist.
|
|
13
|
+
|
|
14
|
+
:return: Path to the auth directory.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
auth_dir = Path(ANTIOCH_DIR) / "auth"
|
|
18
|
+
auth_dir.mkdir(parents=True, exist_ok=True)
|
|
19
|
+
return auth_dir
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def get_ark_dir() -> Path:
|
|
23
|
+
"""
|
|
24
|
+
Get the arks storage directory path.
|
|
25
|
+
|
|
26
|
+
Creates the arks directory if it doesn't exist.
|
|
27
|
+
|
|
28
|
+
:return: Path to the arks directory.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
ark_dir = Path(ANTIOCH_DIR) / "arks"
|
|
32
|
+
ark_dir.mkdir(parents=True, exist_ok=True)
|
|
33
|
+
return ark_dir
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def get_asset_dir() -> Path:
|
|
37
|
+
"""
|
|
38
|
+
Get the assets storage directory path.
|
|
39
|
+
|
|
40
|
+
Creates the assets directory if it doesn't exist.
|
|
41
|
+
|
|
42
|
+
:return: Path to the assets directory.
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
asset_dir = Path(ANTIOCH_DIR) / "assets"
|
|
46
|
+
asset_dir.mkdir(parents=True, exist_ok=True)
|
|
47
|
+
return asset_dir
|
common/core/__init__.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
from common.core.agent import (
|
|
2
|
+
Agent,
|
|
3
|
+
AgentError,
|
|
4
|
+
AgentResponse,
|
|
5
|
+
AgentStateResponse,
|
|
6
|
+
AgentValidationError,
|
|
7
|
+
ArkStateResponse,
|
|
8
|
+
ContainerSource,
|
|
9
|
+
ContainerState,
|
|
10
|
+
RecordTelemetryRequest,
|
|
11
|
+
StartArkRequest,
|
|
12
|
+
)
|
|
13
|
+
from common.core.auth import AuthError, AuthHandler, Organization
|
|
14
|
+
from common.core.registry import (
|
|
15
|
+
get_ark_version_reference,
|
|
16
|
+
get_asset_path,
|
|
17
|
+
list_local_arks,
|
|
18
|
+
list_local_assets,
|
|
19
|
+
list_remote_arks,
|
|
20
|
+
list_remote_assets,
|
|
21
|
+
load_local_ark,
|
|
22
|
+
pull_remote_ark,
|
|
23
|
+
pull_remote_asset,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
__all__ = [
|
|
27
|
+
# Agent
|
|
28
|
+
"Agent",
|
|
29
|
+
"AgentError",
|
|
30
|
+
"AgentResponse",
|
|
31
|
+
"AgentStateResponse",
|
|
32
|
+
"AgentValidationError",
|
|
33
|
+
"ArkStateResponse",
|
|
34
|
+
"ContainerSource",
|
|
35
|
+
"ContainerState",
|
|
36
|
+
"RecordTelemetryRequest",
|
|
37
|
+
"StartArkRequest",
|
|
38
|
+
# Auth
|
|
39
|
+
"AuthError",
|
|
40
|
+
"AuthHandler",
|
|
41
|
+
"Organization",
|
|
42
|
+
# Registry
|
|
43
|
+
"get_ark_version_reference",
|
|
44
|
+
"get_asset_path",
|
|
45
|
+
"list_local_arks",
|
|
46
|
+
"list_local_assets",
|
|
47
|
+
"list_remote_arks",
|
|
48
|
+
"list_remote_assets",
|
|
49
|
+
"load_local_ark",
|
|
50
|
+
"pull_remote_ark",
|
|
51
|
+
"pull_remote_asset",
|
|
52
|
+
]
|