pyRDDLGym-jax 0.1__tar.gz → 0.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/PKG-INFO +1 -1
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/README.md +24 -22
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/core/compiler.py +445 -221
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/core/logic.py +129 -62
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/core/planner.py +699 -332
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/core/simulator.py +5 -7
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/core/tuning.py +23 -12
- pyRDDLGym-jax-0.1/pyRDDLGym_jax/examples/configs/Cartpole_Continuous_drp.cfg → pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/Cartpole_Continuous_gym_drp.cfg +2 -3
- pyRDDLGym-jax-0.1/pyRDDLGym_jax/examples/configs/HVAC_drp.cfg → pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/HVAC_ippc2023_drp.cfg +2 -2
- pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/MountainCar_ippc2023_slp.cfg +19 -0
- pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/Quadcopter_drp.cfg +18 -0
- pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/Reservoir_Continuous_drp.cfg +18 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/examples/configs/Reservoir_Continuous_slp.cfg +1 -1
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/examples/configs/UAV_Continuous_slp.cfg +1 -1
- pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/default_drp.cfg +19 -0
- pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/default_replan.cfg +20 -0
- pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/default_slp.cfg +19 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/examples/run_gradient.py +1 -1
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/examples/run_gym.py +1 -2
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/examples/run_plan.py +7 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/examples/run_tune.py +6 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax.egg-info/PKG-INFO +1 -1
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax.egg-info/SOURCES.txt +19 -13
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/setup.py +1 -1
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/LICENSE +0 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/__init__.py +0 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/core/__init__.py +0 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/examples/__init__.py +0 -0
- /pyRDDLGym-jax-0.1/pyRDDLGym_jax/examples/configs/Cartpole_Continuous_replan.cfg → /pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/Cartpole_Continuous_gym_replan.cfg +0 -0
- /pyRDDLGym-jax-0.1/pyRDDLGym_jax/examples/configs/Cartpole_Continuous_slp.cfg → /pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/Cartpole_Continuous_gym_slp.cfg +0 -0
- /pyRDDLGym-jax-0.1/pyRDDLGym_jax/examples/configs/HVAC_slp.cfg → /pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/HVAC_ippc2023_slp.cfg +0 -0
- /pyRDDLGym-jax-0.1/pyRDDLGym_jax/examples/configs/MarsRover_drp.cfg → /pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/MarsRover_ippc2023_drp.cfg +0 -0
- /pyRDDLGym-jax-0.1/pyRDDLGym_jax/examples/configs/MarsRover_slp.cfg → /pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/MarsRover_ippc2023_slp.cfg +0 -0
- /pyRDDLGym-jax-0.1/pyRDDLGym_jax/examples/configs/MountainCar_slp.cfg → /pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/MountainCar_Continuous_gym_slp.cfg +0 -0
- /pyRDDLGym-jax-0.1/pyRDDLGym_jax/examples/configs/Pendulum_slp.cfg → /pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/Pendulum_gym_slp.cfg +0 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/examples/configs/Pong_slp.cfg +0 -0
- /pyRDDLGym-jax-0.1/pyRDDLGym_jax/examples/configs/PowerGen_drp.cfg → /pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/PowerGen_Continuous_drp.cfg +0 -0
- /pyRDDLGym-jax-0.1/pyRDDLGym_jax/examples/configs/PowerGen_replan.cfg → /pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/PowerGen_Continuous_replan.cfg +0 -0
- /pyRDDLGym-jax-0.1/pyRDDLGym_jax/examples/configs/PowerGen_slp.cfg → /pyrddlgym_jax-0.2/pyRDDLGym_jax/examples/configs/PowerGen_Continuous_slp.cfg +0 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/examples/configs/Quadcopter_slp.cfg +0 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/examples/configs/Reservoir_Continuous_replan.cfg +0 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/examples/configs/SupplyChain_slp.cfg +0 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/examples/configs/Traffic_slp.cfg +0 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/examples/configs/Wildfire_MDP_ippc2014_drp.cfg +0 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/examples/configs/Wildfire_MDP_ippc2014_replan.cfg +0 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/examples/configs/Wildfire_MDP_ippc2014_slp.cfg +0 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax/examples/configs/__init__.py +0 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax.egg-info/dependency_links.txt +0 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax.egg-info/requires.txt +0 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/pyRDDLGym_jax.egg-info/top_level.txt +0 -0
- {pyRDDLGym-jax-0.1 → pyrddlgym_jax-0.2}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pyRDDLGym-jax
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.2
|
|
4
4
|
Summary: pyRDDLGym-jax: JAX compilation of RDDL description files, and a differentiable planner in JAX.
|
|
5
5
|
Home-page: https://github.com/pyrddlgym-project/pyRDDLGym-jax
|
|
6
6
|
Author: Michael Gimelfarb, Ayal Taitler, Scott Sanner
|
|
@@ -33,24 +33,12 @@ To use the compiler or planner without the automated hyper-parameter tuning, you
|
|
|
33
33
|
|
|
34
34
|
Additionally, if you wish to run the examples, you need ``rddlrepository>=2``, and run the automated tuning optimization, you will also need ``bayesian-optimization>=1.4.3``.
|
|
35
35
|
|
|
36
|
-
You can install this package, together with all of its requirements
|
|
36
|
+
You can install this package, together with all of its requirements via pip:
|
|
37
37
|
|
|
38
38
|
```shell
|
|
39
|
-
|
|
40
|
-
conda create -n jaxplan python=3.11
|
|
41
|
-
conda activate jaxplan
|
|
42
|
-
conda install pip git
|
|
43
|
-
|
|
44
|
-
# Manually install pyRDDLGym and rddlrepository
|
|
45
|
-
pip install git+https://github.com/pyrddlgym-project/pyRDDLGym
|
|
46
|
-
pip install git+https://github.com/pyrddlgym-project/rddlrepository
|
|
47
|
-
|
|
48
|
-
# Install pyRDDLGym-jax
|
|
49
|
-
pip install git+https://github.com/pyrddlgym-project/pyRDDLGym-jax
|
|
39
|
+
pip install rddlrepository pyRDDLGym-jax
|
|
50
40
|
```
|
|
51
41
|
|
|
52
|
-
A pip installer will be coming soon.
|
|
53
|
-
|
|
54
42
|
## Running the Basic Examples
|
|
55
43
|
|
|
56
44
|
A basic run script is provided to run the Jax Planner on any domain in ``rddlrepository``, provided a config file is available (currently, only a limited subset of configs are provided as examples).
|
|
@@ -232,19 +220,33 @@ An [example is provided to illustrate how you can define your own policy class a
|
|
|
232
220
|
|
|
233
221
|
## Citing pyRDDLGym-jax
|
|
234
222
|
|
|
235
|
-
The main ideas of
|
|
223
|
+
The [following citation](https://ojs.aaai.org/index.php/ICAPS/article/view/31480) describes the main ideas of the framework. Please cite it if you found it useful:
|
|
224
|
+
|
|
225
|
+
```
|
|
226
|
+
@inproceedings{gimelfarb2024jaxplan,
|
|
227
|
+
title={JaxPlan and GurobiPlan: Optimization Baselines for Replanning in Discrete and Mixed Discrete and Continuous Probabilistic Domains},
|
|
228
|
+
author={Michael Gimelfarb and Ayal Taitler and Scott Sanner},
|
|
229
|
+
booktitle={34th International Conference on Automated Planning and Scheduling},
|
|
230
|
+
year={2024},
|
|
231
|
+
url={https://openreview.net/forum?id=7IKtmUpLEH}
|
|
232
|
+
}
|
|
233
|
+
```
|
|
234
|
+
|
|
235
|
+
The utility optimization is discussed in [this paper](https://ojs.aaai.org/index.php/AAAI/article/view/21226):
|
|
236
236
|
|
|
237
237
|
```
|
|
238
|
-
@
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
238
|
+
@inproceedings{patton2022distributional,
|
|
239
|
+
title={A distributional framework for risk-sensitive end-to-end planning in continuous mdps},
|
|
240
|
+
author={Patton, Noah and Jeong, Jihwan and Gimelfarb, Mike and Sanner, Scott},
|
|
241
|
+
booktitle={Proceedings of the AAAI Conference on Artificial Intelligence},
|
|
242
|
+
volume={36},
|
|
243
|
+
number={9},
|
|
244
|
+
pages={9894--9901},
|
|
245
|
+
year={2022}
|
|
243
246
|
}
|
|
244
247
|
```
|
|
245
248
|
|
|
246
|
-
|
|
247
|
-
- [A Distributional Framework for Risk-Sensitive End-to-End Planning in Continuous MDP, AAAI 2022](https://ojs.aaai.org/index.php/AAAI/article/view/21226)
|
|
249
|
+
Some of the implementation details derive from the following literature, which you may wish to also cite in your research papers:
|
|
248
250
|
- [Deep reactive policies for planning in stochastic nonlinear domains, AAAI 2019](https://ojs.aaai.org/index.php/AAAI/article/view/4744)
|
|
249
251
|
- [Scalable planning with tensorflow for hybrid nonlinear domains, NeurIPS 2017](https://proceedings.neurips.cc/paper/2017/file/98b17f068d5d9b7668e19fb8ae470841-Paper.pdf)
|
|
250
252
|
|