pyRDDLGym-jax 1.2__py3-none-any.whl → 2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyRDDLGym_jax/__init__.py +1 -1
- pyRDDLGym_jax/core/compiler.py +16 -1
- pyRDDLGym_jax/core/logic.py +36 -9
- pyRDDLGym_jax/core/planner.py +517 -129
- pyRDDLGym_jax/core/simulator.py +20 -0
- pyRDDLGym_jax/core/tuning.py +15 -0
- pyRDDLGym_jax/core/visualization.py +48 -0
- pyRDDLGym_jax/examples/configs/Cartpole_Continuous_gym_replan.cfg +3 -3
- pyRDDLGym_jax/examples/configs/Cartpole_Continuous_gym_slp.cfg +4 -4
- pyRDDLGym_jax/examples/configs/Quadcopter_drp.cfg +1 -0
- pyRDDLGym_jax/examples/configs/Quadcopter_slp.cfg +4 -3
- pyRDDLGym_jax/examples/configs/Reservoir_Continuous_drp.cfg +1 -0
- pyRDDLGym_jax/examples/configs/Reservoir_Continuous_slp.cfg +1 -0
- pyRDDLGym_jax/examples/configs/UAV_Continuous_slp.cfg +1 -0
- pyRDDLGym_jax/examples/configs/Wildfire_MDP_ippc2014_drp.cfg +1 -0
- pyRDDLGym_jax/examples/configs/Wildfire_MDP_ippc2014_replan.cfg +1 -0
- pyRDDLGym_jax/examples/configs/Wildfire_MDP_ippc2014_slp.cfg +1 -0
- {pyRDDLGym_jax-1.2.dist-info → pyRDDLGym_jax-2.0.dist-info}/METADATA +1 -1
- {pyRDDLGym_jax-1.2.dist-info → pyRDDLGym_jax-2.0.dist-info}/RECORD +23 -23
- {pyRDDLGym_jax-1.2.dist-info → pyRDDLGym_jax-2.0.dist-info}/LICENSE +0 -0
- {pyRDDLGym_jax-1.2.dist-info → pyRDDLGym_jax-2.0.dist-info}/WHEEL +0 -0
- {pyRDDLGym_jax-1.2.dist-info → pyRDDLGym_jax-2.0.dist-info}/entry_points.txt +0 -0
- {pyRDDLGym_jax-1.2.dist-info → pyRDDLGym_jax-2.0.dist-info}/top_level.txt +0 -0
pyRDDLGym_jax/core/simulator.py
CHANGED
|
@@ -1,3 +1,23 @@
|
|
|
1
|
+
# ***********************************************************************
|
|
2
|
+
# JAXPLAN
|
|
3
|
+
#
|
|
4
|
+
# Author: Michael Gimelfarb
|
|
5
|
+
#
|
|
6
|
+
# REFERENCES:
|
|
7
|
+
#
|
|
8
|
+
# [1] Gimelfarb, Michael, Ayal Taitler, and Scott Sanner. "JaxPlan and GurobiPlan:
|
|
9
|
+
# Optimization Baselines for Replanning in Discrete and Mixed Discrete-Continuous
|
|
10
|
+
# Probabilistic Domains." Proceedings of the International Conference on Automated
|
|
11
|
+
# Planning and Scheduling. Vol. 34. 2024.
|
|
12
|
+
#
|
|
13
|
+
# [2] Taitler, Ayal, Michael Gimelfarb, Jihwan Jeong, Sriram Gopalakrishnan, Martin
|
|
14
|
+
# Mladenov, Xiaotian Liu, and Scott Sanner. "pyRDDLGym: From RDDL to Gym Environments."
|
|
15
|
+
# In PRL Workshop Series {\textendash} Bridging the Gap Between AI Planning and
|
|
16
|
+
# Reinforcement Learning.
|
|
17
|
+
#
|
|
18
|
+
# ***********************************************************************
|
|
19
|
+
|
|
20
|
+
|
|
1
21
|
import time
|
|
2
22
|
from typing import Dict, Optional
|
|
3
23
|
|
pyRDDLGym_jax/core/tuning.py
CHANGED
|
@@ -1,3 +1,18 @@
|
|
|
1
|
+
# ***********************************************************************
|
|
2
|
+
# JAXPLAN
|
|
3
|
+
#
|
|
4
|
+
# Author: Michael Gimelfarb
|
|
5
|
+
#
|
|
6
|
+
# REFERENCES:
|
|
7
|
+
#
|
|
8
|
+
# [1] Gimelfarb, Michael, Ayal Taitler, and Scott Sanner. "JaxPlan and GurobiPlan:
|
|
9
|
+
# Optimization Baselines for Replanning in Discrete and Mixed Discrete-Continuous
|
|
10
|
+
# Probabilistic Domains." Proceedings of the International Conference on Automated
|
|
11
|
+
# Planning and Scheduling. Vol. 34. 2024.
|
|
12
|
+
#
|
|
13
|
+
# ***********************************************************************
|
|
14
|
+
|
|
15
|
+
|
|
1
16
|
import csv
|
|
2
17
|
import datetime
|
|
3
18
|
import threading
|
|
@@ -1,3 +1,18 @@
|
|
|
1
|
+
# ***********************************************************************
|
|
2
|
+
# JAXPLAN
|
|
3
|
+
#
|
|
4
|
+
# Author: Michael Gimelfarb
|
|
5
|
+
#
|
|
6
|
+
# REFERENCES:
|
|
7
|
+
#
|
|
8
|
+
# [1] Gimelfarb, Michael, Ayal Taitler, and Scott Sanner. "JaxPlan and GurobiPlan:
|
|
9
|
+
# Optimization Baselines for Replanning in Discrete and Mixed Discrete-Continuous
|
|
10
|
+
# Probabilistic Domains." Proceedings of the International Conference on Automated
|
|
11
|
+
# Planning and Scheduling. Vol. 34. 2024.
|
|
12
|
+
#
|
|
13
|
+
# ***********************************************************************
|
|
14
|
+
|
|
15
|
+
|
|
1
16
|
import ast
|
|
2
17
|
import os
|
|
3
18
|
from datetime import datetime
|
|
@@ -61,6 +76,7 @@ class JaxPlannerDashboard:
|
|
|
61
76
|
self.xticks = {}
|
|
62
77
|
self.test_return = {}
|
|
63
78
|
self.train_return = {}
|
|
79
|
+
self.pgpe_return = {}
|
|
64
80
|
self.return_dist = {}
|
|
65
81
|
self.return_dist_ticks = {}
|
|
66
82
|
self.return_dist_last_progress = {}
|
|
@@ -299,6 +315,9 @@ class JaxPlannerDashboard:
|
|
|
299
315
|
dbc.Col(Graph(id='train-return-graph'), width=6),
|
|
300
316
|
dbc.Col(Graph(id='test-return-graph'), width=6),
|
|
301
317
|
]),
|
|
318
|
+
dbc.Row([
|
|
319
|
+
dbc.Col(Graph(id='pgpe-return-graph'), width=6)
|
|
320
|
+
]),
|
|
302
321
|
dbc.Row([
|
|
303
322
|
Graph(id='dist-return-graph')
|
|
304
323
|
])
|
|
@@ -661,6 +680,33 @@ class JaxPlannerDashboard:
|
|
|
661
680
|
)
|
|
662
681
|
return fig
|
|
663
682
|
|
|
683
|
+
@app.callback(
|
|
684
|
+
Output('pgpe-return-graph', 'figure'),
|
|
685
|
+
[Input('interval', 'n_intervals'),
|
|
686
|
+
Input('trigger-experiment-check', 'children'),
|
|
687
|
+
Input('tabs-main', 'active_tab')]
|
|
688
|
+
)
|
|
689
|
+
def update_pgpe_return_graph(n, trigger, active_tab):
|
|
690
|
+
if active_tab != 'tab-performance': return dash.no_update
|
|
691
|
+
fig = go.Figure()
|
|
692
|
+
for (row, checked) in self.checked.copy().items():
|
|
693
|
+
if checked:
|
|
694
|
+
fig.add_trace(go.Scatter(
|
|
695
|
+
x=self.xticks[row], y=self.pgpe_return[row],
|
|
696
|
+
name=f'id={row}',
|
|
697
|
+
mode='lines+markers',
|
|
698
|
+
marker=dict(size=3), line=dict(width=2)
|
|
699
|
+
))
|
|
700
|
+
fig.update_layout(
|
|
701
|
+
title=dict(text="PGPE Return"),
|
|
702
|
+
xaxis=dict(title=dict(text="Training Iteration")),
|
|
703
|
+
yaxis=dict(title=dict(text="Cumulative Reward")),
|
|
704
|
+
font=dict(size=PLOT_AXES_FONT_SIZE),
|
|
705
|
+
legend=dict(bgcolor='rgba(0,0,0,0)'),
|
|
706
|
+
template="plotly_white"
|
|
707
|
+
)
|
|
708
|
+
return fig
|
|
709
|
+
|
|
664
710
|
@app.callback(
|
|
665
711
|
Output('dist-return-graph', 'figure'),
|
|
666
712
|
[Input('interval', 'n_intervals'),
|
|
@@ -1316,6 +1362,7 @@ class JaxPlannerDashboard:
|
|
|
1316
1362
|
self.xticks[experiment_id] = []
|
|
1317
1363
|
self.train_return[experiment_id] = []
|
|
1318
1364
|
self.test_return[experiment_id] = []
|
|
1365
|
+
self.pgpe_return[experiment_id] = []
|
|
1319
1366
|
self.return_dist_ticks[experiment_id] = []
|
|
1320
1367
|
self.return_dist_last_progress[experiment_id] = 0
|
|
1321
1368
|
self.return_dist[experiment_id] = []
|
|
@@ -1367,6 +1414,7 @@ class JaxPlannerDashboard:
|
|
|
1367
1414
|
self.xticks[experiment_id].append(iteration)
|
|
1368
1415
|
self.train_return[experiment_id].append(callback['train_return'])
|
|
1369
1416
|
self.test_return[experiment_id].append(callback['best_return'])
|
|
1417
|
+
self.pgpe_return[experiment_id].append(callback['pgpe_return'])
|
|
1370
1418
|
|
|
1371
1419
|
# data for return distributions
|
|
1372
1420
|
progress = callback['progress']
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
[Model]
|
|
2
2
|
logic='FuzzyLogic'
|
|
3
|
-
comparison_kwargs={'weight':
|
|
4
|
-
rounding_kwargs={'weight':
|
|
5
|
-
control_kwargs={'weight':
|
|
3
|
+
comparison_kwargs={'weight': 20}
|
|
4
|
+
rounding_kwargs={'weight': 20}
|
|
5
|
+
control_kwargs={'weight': 20}
|
|
6
6
|
|
|
7
7
|
[Optimizer]
|
|
8
8
|
method='JaxStraightLinePlan'
|
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
[Model]
|
|
2
2
|
logic='FuzzyLogic'
|
|
3
|
-
comparison_kwargs={'weight':
|
|
4
|
-
rounding_kwargs={'weight':
|
|
5
|
-
control_kwargs={'weight':
|
|
3
|
+
comparison_kwargs={'weight': 20}
|
|
4
|
+
rounding_kwargs={'weight': 20}
|
|
5
|
+
control_kwargs={'weight': 20}
|
|
6
6
|
|
|
7
7
|
[Optimizer]
|
|
8
8
|
method='JaxStraightLinePlan'
|
|
9
9
|
method_kwargs={}
|
|
10
10
|
optimizer='rmsprop'
|
|
11
|
-
optimizer_kwargs={'learning_rate': 0.
|
|
11
|
+
optimizer_kwargs={'learning_rate': 0.001}
|
|
12
12
|
batch_size_train=1
|
|
13
13
|
batch_size_test=1
|
|
14
14
|
clip_grad=1.0
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
[Model]
|
|
2
2
|
logic='FuzzyLogic'
|
|
3
|
-
comparison_kwargs={'weight':
|
|
4
|
-
rounding_kwargs={'weight':
|
|
5
|
-
control_kwargs={'weight':
|
|
3
|
+
comparison_kwargs={'weight': 10}
|
|
4
|
+
rounding_kwargs={'weight': 10}
|
|
5
|
+
control_kwargs={'weight': 10}
|
|
6
6
|
|
|
7
7
|
[Optimizer]
|
|
8
8
|
method='JaxStraightLinePlan'
|
|
@@ -11,6 +11,7 @@ optimizer='rmsprop'
|
|
|
11
11
|
optimizer_kwargs={'learning_rate': 0.03}
|
|
12
12
|
batch_size_train=1
|
|
13
13
|
batch_size_test=1
|
|
14
|
+
pgpe=None
|
|
14
15
|
|
|
15
16
|
[Training]
|
|
16
17
|
key=42
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: pyRDDLGym-jax
|
|
3
|
-
Version:
|
|
3
|
+
Version: 2.0
|
|
4
4
|
Summary: pyRDDLGym-jax: automatic differentiation for solving sequential planning problems in JAX.
|
|
5
5
|
Home-page: https://github.com/pyrddlgym-project/pyRDDLGym-jax
|
|
6
6
|
Author: Michael Gimelfarb, Ayal Taitler, Scott Sanner
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
pyRDDLGym_jax/__init__.py,sha256=
|
|
1
|
+
pyRDDLGym_jax/__init__.py,sha256=TiPG4w8nN4AzPkhugwVvZkHmAgP955NltD4QRmBLhRU,19
|
|
2
2
|
pyRDDLGym_jax/entry_point.py,sha256=dxDlO_5gneEEViwkLCg30Z-KVzUgdRXaKuFjoZklkA0,974
|
|
3
3
|
pyRDDLGym_jax/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
-
pyRDDLGym_jax/core/compiler.py,sha256=
|
|
5
|
-
pyRDDLGym_jax/core/logic.py,sha256=
|
|
6
|
-
pyRDDLGym_jax/core/planner.py,sha256=
|
|
7
|
-
pyRDDLGym_jax/core/simulator.py,sha256=
|
|
8
|
-
pyRDDLGym_jax/core/tuning.py,sha256=
|
|
9
|
-
pyRDDLGym_jax/core/visualization.py,sha256=
|
|
4
|
+
pyRDDLGym_jax/core/compiler.py,sha256=Rn-aIqfgfWqu45bvCfPb9tB8RIOBVdbj-pI-V3WS2Z8,89212
|
|
5
|
+
pyRDDLGym_jax/core/logic.py,sha256=_A6eGYtLVU3pbLAezxJVB9bnClJoaFIa2mBIDdFrqoU,39655
|
|
6
|
+
pyRDDLGym_jax/core/planner.py,sha256=4j56l7SL7F89g2QA4nOpyhODmY0DamvxYLfCMKxJNbQ,118593
|
|
7
|
+
pyRDDLGym_jax/core/simulator.py,sha256=DnPL93WVCMZqtqMUoiJdfWcH9pEvNgGfDfO4NV0wIS0,9271
|
|
8
|
+
pyRDDLGym_jax/core/tuning.py,sha256=RKKtDZp7unvfbhZEoaunZtcAn5xtzGYqXBB_Ij_Aapc,24205
|
|
9
|
+
pyRDDLGym_jax/core/visualization.py,sha256=XtQL1A5dQIlfeUpte-r3lNVw-GNLxj2EYUNMz7AFOtc,70359
|
|
10
10
|
pyRDDLGym_jax/core/assets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
11
|
pyRDDLGym_jax/core/assets/favicon.ico,sha256=RMMrI9YvmF81TgYG7FO7UAre6WmYFkV3B2GmbA1l0kM,175085
|
|
12
12
|
pyRDDLGym_jax/examples/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -16,8 +16,8 @@ pyRDDLGym_jax/examples/run_plan.py,sha256=v2AvwgIa4Ejr626vBOgWFJIQvay3IPKWno02zt
|
|
|
16
16
|
pyRDDLGym_jax/examples/run_scipy.py,sha256=wvcpWCvdjvYHntO95a7JYfY2fuCMUTKnqjJikW0PnL4,2291
|
|
17
17
|
pyRDDLGym_jax/examples/run_tune.py,sha256=zqrhvLR5PeWJv0NsRxDCzAPmvgPgz_1NrtM1xBy6ndU,3606
|
|
18
18
|
pyRDDLGym_jax/examples/configs/Cartpole_Continuous_gym_drp.cfg,sha256=mE8MqhOlkHeXIGEVrnR3QY6I-_iy4uxFYRA71P1bmtk,347
|
|
19
|
-
pyRDDLGym_jax/examples/configs/Cartpole_Continuous_gym_replan.cfg,sha256=
|
|
20
|
-
pyRDDLGym_jax/examples/configs/Cartpole_Continuous_gym_slp.cfg,sha256=
|
|
19
|
+
pyRDDLGym_jax/examples/configs/Cartpole_Continuous_gym_replan.cfg,sha256=nFFYHCKQUMn8x-OpJwu2pwe1tycNSJ8iAIwSkCBn33E,370
|
|
20
|
+
pyRDDLGym_jax/examples/configs/Cartpole_Continuous_gym_slp.cfg,sha256=eJ3HvHjODoKdtX7u-AM51xQaHJnYgzEy2t3omNG2oCs,340
|
|
21
21
|
pyRDDLGym_jax/examples/configs/HVAC_ippc2023_drp.cfg,sha256=9-QMZPZuecAEaerD79ZAbGX-tgfL8Y2W-tfkAyD15Cw,362
|
|
22
22
|
pyRDDLGym_jax/examples/configs/HVAC_ippc2023_slp.cfg,sha256=BiY6wwSYkR9-T46AA4n3okJ1Qvj8Iu-y1V5BrfCbqrM,340
|
|
23
23
|
pyRDDLGym_jax/examples/configs/MountainCar_Continuous_gym_slp.cfg,sha256=VBlTiHFQG72D1wpebMsuzSokwqlPVD99WjPp4YoWs84,356
|
|
@@ -25,15 +25,15 @@ pyRDDLGym_jax/examples/configs/MountainCar_ippc2023_slp.cfg,sha256=bH_5O13-Y6ztv
|
|
|
25
25
|
pyRDDLGym_jax/examples/configs/PowerGen_Continuous_drp.cfg,sha256=Pq6E9RYksue7X2cWjdWyUsV0LqQTjTvq6p0aLBVKWfY,370
|
|
26
26
|
pyRDDLGym_jax/examples/configs/PowerGen_Continuous_replan.cfg,sha256=SGVQAOqrOjEsZEtxL_Z6aGbLR19h5gKCcy0oz2vtQp8,382
|
|
27
27
|
pyRDDLGym_jax/examples/configs/PowerGen_Continuous_slp.cfg,sha256=6obQik2FBldoJ3VwoVfGhQqKpKdnYox770cF-SGRi3Q,345
|
|
28
|
-
pyRDDLGym_jax/examples/configs/Quadcopter_drp.cfg,sha256=
|
|
29
|
-
pyRDDLGym_jax/examples/configs/Quadcopter_slp.cfg,sha256=
|
|
30
|
-
pyRDDLGym_jax/examples/configs/Reservoir_Continuous_drp.cfg,sha256=
|
|
28
|
+
pyRDDLGym_jax/examples/configs/Quadcopter_drp.cfg,sha256=rs-CzOAyZV_NvwSh2f6Fm9XNw5Z8WIYgpAOzgTm_Gv8,403
|
|
29
|
+
pyRDDLGym_jax/examples/configs/Quadcopter_slp.cfg,sha256=EtSCTjd8gWm7akQdfHFxdpGnQvHzjo2IHbAuVxTAX4U,356
|
|
30
|
+
pyRDDLGym_jax/examples/configs/Reservoir_Continuous_drp.cfg,sha256=7nPOJCo3eaZuq1pCyIJJJkDM0jjJThDuDECJDZzX-uc,379
|
|
31
31
|
pyRDDLGym_jax/examples/configs/Reservoir_Continuous_replan.cfg,sha256=V3jzPGuNq2IAxYy_EeZWin4Y_uf0HvGhzg06ODNSY-I,381
|
|
32
|
-
pyRDDLGym_jax/examples/configs/Reservoir_Continuous_slp.cfg,sha256=
|
|
33
|
-
pyRDDLGym_jax/examples/configs/UAV_Continuous_slp.cfg,sha256=
|
|
34
|
-
pyRDDLGym_jax/examples/configs/Wildfire_MDP_ippc2014_drp.cfg,sha256=
|
|
35
|
-
pyRDDLGym_jax/examples/configs/Wildfire_MDP_ippc2014_replan.cfg,sha256=
|
|
36
|
-
pyRDDLGym_jax/examples/configs/Wildfire_MDP_ippc2014_slp.cfg,sha256=
|
|
32
|
+
pyRDDLGym_jax/examples/configs/Reservoir_Continuous_slp.cfg,sha256=SYAJmoUIUhhvAej3XOzC5boGxKVHnSiVi5-ZGj2S29M,354
|
|
33
|
+
pyRDDLGym_jax/examples/configs/UAV_Continuous_slp.cfg,sha256=osoIPfrldPw7oJF2AaAw0-ke6YHQNdrslFBCTytsqmo,354
|
|
34
|
+
pyRDDLGym_jax/examples/configs/Wildfire_MDP_ippc2014_drp.cfg,sha256=oNX8uW8Bw2uG9zHX1zeLF3mHWDHRIlJXYvbFcY0pfCI,382
|
|
35
|
+
pyRDDLGym_jax/examples/configs/Wildfire_MDP_ippc2014_replan.cfg,sha256=exCfGI3WU7IFO7n5rRe5cO1ZHAdFwttRYzjIdD4Pz2Y,451
|
|
36
|
+
pyRDDLGym_jax/examples/configs/Wildfire_MDP_ippc2014_slp.cfg,sha256=e6Ikgv2uBbKuXHfVKt4KQ01LDUBGbc31D28bCcztJ58,413
|
|
37
37
|
pyRDDLGym_jax/examples/configs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
38
38
|
pyRDDLGym_jax/examples/configs/default_drp.cfg,sha256=XeMWAAG_OFZo7JAMxS5-XXroZaeVMzfM0NswmEobIns,373
|
|
39
39
|
pyRDDLGym_jax/examples/configs/default_replan.cfg,sha256=CK4cEz8ReXyAZPLaLG9clIIRXAqM3IplUCxbLt_V2lY,407
|
|
@@ -41,9 +41,9 @@ pyRDDLGym_jax/examples/configs/default_slp.cfg,sha256=mJo0woDevhQCSQfJg30ULVy9qG
|
|
|
41
41
|
pyRDDLGym_jax/examples/configs/tuning_drp.cfg,sha256=CQMpSCKTkGioO7U82mHMsYWFRsutULx0V6Wrl3YzV2U,504
|
|
42
42
|
pyRDDLGym_jax/examples/configs/tuning_replan.cfg,sha256=m_0nozFg_GVld0tGv92Xao_KONFJDq_vtiJKt5isqI8,501
|
|
43
43
|
pyRDDLGym_jax/examples/configs/tuning_slp.cfg,sha256=KHu8II6CA-h_HblwvWHylNRjSvvGS3VHxN7JQNR4p_Q,464
|
|
44
|
-
pyRDDLGym_jax-
|
|
45
|
-
pyRDDLGym_jax-
|
|
46
|
-
pyRDDLGym_jax-
|
|
47
|
-
pyRDDLGym_jax-
|
|
48
|
-
pyRDDLGym_jax-
|
|
49
|
-
pyRDDLGym_jax-
|
|
44
|
+
pyRDDLGym_jax-2.0.dist-info/LICENSE,sha256=Y0Gi6H6mLOKN-oIKGZulQkoTJyPZeAaeuZu7FXH-meg,1095
|
|
45
|
+
pyRDDLGym_jax-2.0.dist-info/METADATA,sha256=ZYIe9c_Tar4WO8qQOvcUIJVMmZznPUBRaegS0DH2un8,15090
|
|
46
|
+
pyRDDLGym_jax-2.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
|
47
|
+
pyRDDLGym_jax-2.0.dist-info/entry_points.txt,sha256=Q--z9QzqDBz1xjswPZ87PU-pib-WPXx44hUWAFoBGBA,59
|
|
48
|
+
pyRDDLGym_jax-2.0.dist-info/top_level.txt,sha256=n_oWkP_BoZK0VofvPKKmBZ3NPk86WFNvLhi1BktCbVQ,14
|
|
49
|
+
pyRDDLGym_jax-2.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|