unienv 0.0.1b4__py3-none-any.whl → 0.0.1b6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. {unienv-0.0.1b4.dist-info → unienv-0.0.1b6.dist-info}/METADATA +3 -2
  2. {unienv-0.0.1b4.dist-info → unienv-0.0.1b6.dist-info}/RECORD +43 -32
  3. {unienv-0.0.1b4.dist-info → unienv-0.0.1b6.dist-info}/WHEEL +1 -1
  4. unienv_data/base/common.py +25 -10
  5. unienv_data/base/storage.py +2 -0
  6. unienv_data/batches/backend_compat.py +1 -1
  7. unienv_data/batches/combined_batch.py +1 -1
  8. unienv_data/batches/slicestack_batch.py +1 -0
  9. unienv_data/replay_buffer/replay_buffer.py +179 -65
  10. unienv_data/replay_buffer/trajectory_replay_buffer.py +230 -163
  11. unienv_data/storages/_episode_storage.py +438 -0
  12. unienv_data/storages/_list_storage.py +136 -0
  13. unienv_data/storages/backend_compat.py +268 -0
  14. unienv_data/storages/dict_storage.py +39 -7
  15. unienv_data/storages/flattened.py +11 -4
  16. unienv_data/storages/hdf5.py +11 -0
  17. unienv_data/storages/image_storage.py +144 -0
  18. unienv_data/storages/npz_storage.py +135 -0
  19. unienv_data/storages/pytorch.py +17 -10
  20. unienv_data/storages/transformation.py +16 -1
  21. unienv_data/storages/video_storage.py +297 -0
  22. unienv_data/third_party/tensordict/memmap_tensor.py +1174 -0
  23. unienv_data/transformations/image_compress.py +97 -21
  24. unienv_interface/func_wrapper/frame_stack.py +1 -1
  25. unienv_interface/space/space_utils/batch_utils.py +5 -1
  26. unienv_interface/space/space_utils/flatten_utils.py +8 -2
  27. unienv_interface/space/spaces/dict.py +6 -0
  28. unienv_interface/space/spaces/tuple.py +4 -4
  29. unienv_interface/transformations/__init__.py +3 -1
  30. unienv_interface/transformations/batch_and_unbatch.py +42 -4
  31. unienv_interface/transformations/chained_transform.py +9 -8
  32. unienv_interface/transformations/crop.py +69 -0
  33. unienv_interface/transformations/dict_transform.py +8 -2
  34. unienv_interface/transformations/identity.py +16 -0
  35. unienv_interface/transformations/image_resize.py +106 -0
  36. unienv_interface/transformations/iter_transform.py +92 -0
  37. unienv_interface/transformations/rescale.py +24 -5
  38. unienv_interface/utils/symbol_util.py +7 -1
  39. unienv_interface/wrapper/backend_compat.py +1 -1
  40. unienv_interface/wrapper/frame_stack.py +1 -1
  41. {unienv-0.0.1b4.dist-info → unienv-0.0.1b6.dist-info}/licenses/LICENSE +0 -0
  42. {unienv-0.0.1b4.dist-info → unienv-0.0.1b6.dist-info}/top_level.txt +0 -0
  43. /unienv_interface/utils/{data_queue.py → framestack_queue.py} +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: unienv
3
- Version: 0.0.1b4
3
+ Version: 0.0.1b6
4
4
  Summary: Unified robot environment framework supporting multiple tensor and simulation backends
5
5
  License-Expression: MIT
6
6
  Project-URL: Homepage, https://github.com/UniEnvOrg/UniEnv
@@ -12,9 +12,10 @@ Requires-Python: >=3.10
12
12
  Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
14
  Requires-Dist: numpy
15
- Requires-Dist: xbarray>=0.0.1a8
15
+ Requires-Dist: xbarray>=0.0.1a13
16
16
  Requires-Dist: pillow
17
17
  Requires-Dist: cloudpickle
18
+ Requires-Dist: pyvers
18
19
  Provides-Extra: dev
19
20
  Requires-Dist: pytest; extra == "dev"
20
21
  Provides-Extra: gymnasium
@@ -1,27 +1,34 @@
1
- unienv-0.0.1b4.dist-info/licenses/LICENSE,sha256=nkklvEaJUR4QDBygz7tkEe1FMVKV1JSjnGzJNLhdIWM,1091
1
+ unienv-0.0.1b6.dist-info/licenses/LICENSE,sha256=nkklvEaJUR4QDBygz7tkEe1FMVKV1JSjnGzJNLhdIWM,1091
2
2
  unienv_data/__init__.py,sha256=zFxbe7aM5JvYXIK0FGnOPwWQJMN-8l_l8prB85CkcA8,95
3
3
  unienv_data/base/__init__.py,sha256=w-I8A-z7YYArkHc2ZOVGrfzfThsaDBg7aD7qMFprNM8,186
4
- unienv_data/base/common.py,sha256=EYOzuYmvsCy1uJftsw6cXeycPIr8P7GWZ3_q4wgoNeo,12879
5
- unienv_data/base/storage.py,sha256=s99PYEZGa76kf-Enx57kOyVkwjb-tpU-vTHcGc5Dcew,5415
4
+ unienv_data/base/common.py,sha256=A3RtD3Omqk0Qplsc-44ukAEzbQEU22_MkwUlC7l-HHM,13083
5
+ unienv_data/base/storage.py,sha256=afICsO_7Zbm9azV0Jxho_z9F7JM30TUDjJM1NHETDHM,5495
6
6
  unienv_data/batches/__init__.py,sha256=Vi92f8ddgFYCqwv7xO2Pi3oJePnioJ4XrJbQVV7eIvk,234
7
- unienv_data/batches/backend_compat.py,sha256=7Juf7nU2jYHohRzNzmGfqMMpJtFM-3oTzzLu6EbC77E,8168
8
- unienv_data/batches/combined_batch.py,sha256=aua1H86sa_qWrCtAAp5JIZMGtFiiKFPFkU0y5JoyElM,15325
7
+ unienv_data/batches/backend_compat.py,sha256=tzFG8gTq0yW-J6PLvu--lCGS0lFc0QfelicJ50p_HYc,8207
8
+ unienv_data/batches/combined_batch.py,sha256=pNrbLvU565BUDWO0pZLCnSMygmoGVCLxjC9OkLRKtLA,15330
9
9
  unienv_data/batches/framestack_batch.py,sha256=pdURqZeksOlbf21Nhx8kkm0gtFt6rjt2OiNWgZPdFCM,2312
10
- unienv_data/batches/slicestack_batch.py,sha256=J2EhARcPA-zz6EBnV7OLzm4yyvnZ06vrdUoPD5RkJ-o,16672
10
+ unienv_data/batches/slicestack_batch.py,sha256=Q3-gsJTvMjKTeZAHWNBTGRsws0HctsfMMTw0vylNxvA,16785
11
11
  unienv_data/batches/transformations.py,sha256=b4HqX3wZ6TuRgQ2q81Jv43PmeHGmP8cwURK_ULjGNgs,5647
12
12
  unienv_data/integrations/pytorch.py,sha256=pW5rXBXagfzwJjM_VGgg8CPXEs3e2fKgg4nY7M3dpOc,2350
13
13
  unienv_data/replay_buffer/__init__.py,sha256=uVebYruIYlj8OjTYVi8UYI4gWp3S3XIdgFlHbwO260o,100
14
- unienv_data/replay_buffer/replay_buffer.py,sha256=nhbC-7aHGIYhcCdmaaDdhB2U9ODAZrbKMq8dP8ffOv0,10344
15
- unienv_data/replay_buffer/trajectory_replay_buffer.py,sha256=fxV6FIqAHhN8opYs2WjAJMPqNRWD3iIku-4WlaydyG4,20737
14
+ unienv_data/replay_buffer/replay_buffer.py,sha256=8vPma5dL6jDGhI3Oo6IEvNcDYJG9Lb0Xlvxp45tQMEs,14498
15
+ unienv_data/replay_buffer/trajectory_replay_buffer.py,sha256=cqRmzdewFS8IvJcMwxxQgwZf7TvvrViym87OaCOes3Y,24009
16
16
  unienv_data/samplers/__init__.py,sha256=e7uunWN3r-g_2fDaMsYMe8cZcF4N-okCxqBPweQnE0s,97
17
17
  unienv_data/samplers/multiprocessing_sampler.py,sha256=FEBK8pMTnkpA0xuMkbvlv4aIdVTTubeT8BjL60BJL5o,13254
18
18
  unienv_data/samplers/step_sampler.py,sha256=ZCcrx9WbILtaR6izhIP3DhtmFcP7KQBdaYaSZ7vWwRk,3010
19
- unienv_data/storages/dict_storage.py,sha256=SqCGcGT9Y4l0thdmx23XSxRMzIEIuldA6m8Cd9HrpnA,12588
20
- unienv_data/storages/flattened.py,sha256=Fu01TjrzvmyNhXEGtC4FiBTb7cqXDtVkErc1QNwLvcI,6704
21
- unienv_data/storages/hdf5.py,sha256=F_mkrmX6SGT2HamJAyYopBmj_Nf5NzJiyvVN9irtiiM,26260
22
- unienv_data/storages/pytorch.py,sha256=ftO8cND7PFV0J1B1o2YOWqj4U_pyWsJvWv9lC9A7LJg,6953
23
- unienv_data/storages/transformation.py,sha256=9BIwrvdruiTRduqC03e5UbSjBT1jLSxLCkNfrsVDP7I,7577
24
- unienv_data/transformations/image_compress.py,sha256=dINrvmpTWy3sbqruHk0kPZG2XNyJI90ERgErXV7GamE,9131
19
+ unienv_data/storages/_episode_storage.py,sha256=OpZt4P-P6LHrBR4F-tNcCFROLskWaOKWCDfoPV7qz1I,21970
20
+ unienv_data/storages/_list_storage.py,sha256=pH9xZOqXCx65NBRRD-INcP8OP-NWsI-JvdzVsPj9MSg,5225
21
+ unienv_data/storages/backend_compat.py,sha256=BxeMJlC3FI60KLJ7QB5kF-mrGlJ6xi584Dcu4IN4Zrc,10714
22
+ unienv_data/storages/dict_storage.py,sha256=DSqRIgo3m1XtUcLtyjYSqqpi01mr_nJOLg5BCddwPcg,13862
23
+ unienv_data/storages/flattened.py,sha256=Yf1G4D6KE36sESyDMGWKXqhFjz6Idx7N1aEhihmGovA,7055
24
+ unienv_data/storages/hdf5.py,sha256=Jnls1rs7nlOOp9msmAfhuZp80OZd8S2Llls176EOUc4,27096
25
+ unienv_data/storages/image_storage.py,sha256=4J1ZiGFHbGLHmReMztImJoDcRmiB_llD2wbMB3rdvOQ,5137
26
+ unienv_data/storages/npz_storage.py,sha256=IP2DXbUs_ySzILne3s3hq3gwHiy9tfpWz6HcNciA8DU,4868
27
+ unienv_data/storages/pytorch.py,sha256=bf3ys6eBlMvjyPK4XE-itENjEWq5Vm60qNwBNqJIZqg,7345
28
+ unienv_data/storages/transformation.py,sha256=-9_jPZNpx6RXY_ojv_1UCSTa4Z9apI9V9jit8nG93oM,8133
29
+ unienv_data/storages/video_storage.py,sha256=2vcNlghhDZWWzAdf9t0VeCMZrv-x_rYkYaCw8XV8AJA,13331
30
+ unienv_data/third_party/tensordict/memmap_tensor.py,sha256=J6SkFf-FDy43XuaHLgbvDsHt6v2vYfuhRyeoV02P8vw,42589
31
+ unienv_data/transformations/image_compress.py,sha256=f8JTY4DJEXaiu5lO77T4ROV950rh_bOZBchOF-O0tx8,13130
25
32
  unienv_interface/__init__.py,sha256=pAWqfm4l7NAssuyXCugIjekSIh05aBbOjNhwsNXcJbE,100
26
33
  unienv_interface/backends/__init__.py,sha256=L7CFwCChHVL-2Dpz34pTGC37WgodfJEeDQwXscyM7FM,198
27
34
  unienv_interface/backends/base.py,sha256=1_hji1qwNAhcEtFQdAuzaNey9g5bWYj38t1sQxjnggc,132
@@ -36,38 +43,42 @@ unienv_interface/env_base/funcenv_wrapper.py,sha256=chw1iJ1RhAFMv4JAk67cttJvI9ag
36
43
  unienv_interface/env_base/vec_env.py,sha256=bcv6NdOxt0Xp1fRMXqzFtmVw6LQ-pDj_Jvj-qaW6otQ,16116
37
44
  unienv_interface/env_base/wrapper.py,sha256=7hf4Rr2wouS0igPoahhvb2tzYY3bCaWL0NlgwpYZwQs,9734
38
45
  unienv_interface/func_wrapper/__init__.py,sha256=6BPF8O25WkIBpODVTwnOE9HGSm3KRKX6iPwFGWESlxA,123
39
- unienv_interface/func_wrapper/frame_stack.py,sha256=52CqAHDqwgHtOwMwxzB3Syup9kA19zdlvXCH4mI7MNU,6819
46
+ unienv_interface/func_wrapper/frame_stack.py,sha256=wuGsrluoz60FTczRuo8sHPfpl_Yl4GVTRBb2QDzYPrA,6825
40
47
  unienv_interface/func_wrapper/transformation.py,sha256=7mdzcpjLjqtpbtXoqbkGtTMPQxoMmMsqzDWHcZLbrhs,5939
41
48
  unienv_interface/space/__init__.py,sha256=6-wLoD9mKDAfz7IuQs_Rn9DMDfDwTZ0tEhQ924libpg,99
42
49
  unienv_interface/space/space.py,sha256=mFlCcDvMgEPTXlwo_iwBlm6Eg4Bn2rrecgsfIVstdq0,4067
43
50
  unienv_interface/space/space_utils/__init__.py,sha256=GAsPoZC8YNabx3Gw5m2o4zsnG8zmA3mcuM9_lNKhiGo,121
44
- unienv_interface/space/space_utils/batch_utils.py,sha256=qXK7kERPXKGIYozz7lpjzVz56S9GkH6ZASfIRzCYXHY,36993
51
+ unienv_interface/space/space_utils/batch_utils.py,sha256=hD4ItBp2WQzIQR5u0Zkw0FQQfOeg6ZPRi18Johmcc40,37150
45
52
  unienv_interface/space/space_utils/construct_utils.py,sha256=Y4RpV9obY8XQ85O3r_NC1HrBk-Nm941ffRNXNL7nHgA,8323
46
- unienv_interface/space/space_utils/flatten_utils.py,sha256=kkHkjrsk43NDbg3Q5VAhVoIXStuRayYFO-7knsDzx4A,12289
53
+ unienv_interface/space/space_utils/flatten_utils.py,sha256=6ObJgVq4yhOq_7N5E5pQZS6WmmeKu-MyRFJ_x-gqmNg,12607
47
54
  unienv_interface/space/space_utils/gym_utils.py,sha256=nH8EKruOKCXNrIMPUd9F4XGKCfFkhxsTmx4I1BeSgn0,15079
48
55
  unienv_interface/space/space_utils/serialization_utils.py,sha256=LWYSFN7E6tEFe8ULWm42LkFUxP_0dfTGkCcx0yl4Y8s,9530
49
56
  unienv_interface/space/spaces/__init__.py,sha256=Jap768TlwHFDDiTzHZ0qaHEFEVC1cKA2QzLlSZVQnjI,535
50
57
  unienv_interface/space/spaces/batched.py,sha256=RA8aLUSS14zBSCTm_ud18TTa-ntbIZ074xwJ0xls1Jk,3691
51
58
  unienv_interface/space/spaces/binary.py,sha256=0iQUbO37dhkznVpjhsJdwlD-KdWgCEx2H7KrybuZ_PM,3570
52
59
  unienv_interface/space/spaces/box.py,sha256=NCmileEZOKz-L3WNzZ-uwydrRFsIMdNZBwTn1vWgeI0,13316
53
- unienv_interface/space/spaces/dict.py,sha256=G5_iYC1Bj5DqeJ7aFlq6eRJbnpATbIRIyRu1jF_UUvk,7022
60
+ unienv_interface/space/spaces/dict.py,sha256=NggllKi0smoz2bL3yrfBM5FJGBNRWZ05xXaNEqY1QKs,7234
54
61
  unienv_interface/space/spaces/dynamic_box.py,sha256=HvMNgzfYwIVc5VVgCtq-8lQbNI1V1dZMI-w60AwYss4,19591
55
62
  unienv_interface/space/spaces/graph.py,sha256=KocRFLtYP5VWYpwbP6HybXH5R4jTIYJdNePKb6vhnYE,15163
56
63
  unienv_interface/space/spaces/text.py,sha256=ePGGJdiD3q-BAX6IHLO7HMe0OH4VrzF043K02eb0zXI,4443
57
- unienv_interface/space/spaces/tuple.py,sha256=rgZQz3EB3CLbIk9UlHBQbM6w9gssbA1izm-Qq-_Chqs,4267
64
+ unienv_interface/space/spaces/tuple.py,sha256=mmJab6kl5VtQStyn754pmk0RLPSQW06Mu15Hp3Qad80,4287
58
65
  unienv_interface/space/spaces/union.py,sha256=Qisd-DdmPcGRmdhZFGiQw8_AOjYWqkuQ4Hwd-I8tdSI,4375
59
- unienv_interface/transformations/__init__.py,sha256=g19uGnDHMywvDAXRaqFgoWAF1vCPrbJENEpaEgtIrOw,353
60
- unienv_interface/transformations/batch_and_unbatch.py,sha256=ELCnNtwmgA5wpTBJZasfNSHmtf4vzydzLPmO6IGbT9o,1164
61
- unienv_interface/transformations/chained_transform.py,sha256=TDnUvxUKK6bXGc_sfr6ZCvvVWw7P5KX2sA9i7i2lx14,2075
62
- unienv_interface/transformations/dict_transform.py,sha256=ynrJrloVUix2I27Ir1mL86crT0vY5DvpiBAVxPBJup4,5357
66
+ unienv_interface/transformations/__init__.py,sha256=zf8NbY-HW4EgHri9PxpuelEvBpFwUtDEcJiXXhFSDNQ,435
67
+ unienv_interface/transformations/batch_and_unbatch.py,sha256=DK-0VNFvoB9FD1qPtq9SNphQnuzRYJrm_L7VuPiLaMY,2145
68
+ unienv_interface/transformations/chained_transform.py,sha256=_6E1g_8u-WAxKd-f2sHJwKQk9HTIRnulyXwHUwJP12I,2203
69
+ unienv_interface/transformations/crop.py,sha256=sigcQcLklp3P6b6KQfP-Ja3OV1CWeusCLNKMvNNdACQ,3107
70
+ unienv_interface/transformations/dict_transform.py,sha256=GhFSN9t3mL3gvoD_GH-np68Fo4m78YnSyHbUHeyzKcw,5540
63
71
  unienv_interface/transformations/filter_dict.py,sha256=DzR-hgHoHJObTipxwB2UrKVlTxbfIrJohaOgqdAICLY,5871
64
- unienv_interface/transformations/rescale.py,sha256=fM5ukWUvNvPeDO48_PRU0KyyvGhBIDxaN9XZyQ1VaQQ,4364
72
+ unienv_interface/transformations/identity.py,sha256=biW3caBis6ixlOJQk2RJ-7OzP16n0yhpIuqvd7e7Ack,549
73
+ unienv_interface/transformations/image_resize.py,sha256=QyPnpMvdx3IvQyW5_iRq7LMnQQuq7XpOv3x6qQHuNeI,4454
74
+ unienv_interface/transformations/iter_transform.py,sha256=lK7fopeiZJrO0WUXFoUmAOhmYkdHXDnChsQ9TJGV8hU,3688
75
+ unienv_interface/transformations/rescale.py,sha256=85PAq5ta9KelxMaL6RIJXBFxOmRbZjsGlMJiElCW9wI,5329
65
76
  unienv_interface/transformations/transformation.py,sha256=u4_9H1tvophhgG0p0F3xfkMMsRuaKY2TQmVeGoeQsJ0,1652
66
77
  unienv_interface/utils/control_util.py,sha256=lY_1EknglY3cNekWX9rYWt0ZUglaPMtIt4M5D9y0WfE,2351
67
- unienv_interface/utils/data_queue.py,sha256=UZiuQDOn39DB9Heu6xinrwuzAL3X8jHlDkFoSC5Phtc,5707
78
+ unienv_interface/utils/framestack_queue.py,sha256=UZiuQDOn39DB9Heu6xinrwuzAL3X8jHlDkFoSC5Phtc,5707
68
79
  unienv_interface/utils/seed_util.py,sha256=Up3nBXj7L8w-S9W5Q1U2d9accMhMf0TmHPaN6JXDVWs,677
69
80
  unienv_interface/utils/stateclass.py,sha256=xjzicPGX1UuI7q3ZAxhBCCoouKfNtLywUzQtLaT0yS4,1390
70
- unienv_interface/utils/symbol_util.py,sha256=NAERK-D_2MaTg2eYW-L75tbzPQN5YJIiKtM9zuQ89Sw,383
81
+ unienv_interface/utils/symbol_util.py,sha256=EKC5cVyuXaP5n68-bSbk1A3jCCJCrX90BF7c8mFQYrU,562
71
82
  unienv_interface/utils/vec_util.py,sha256=EIK680ReCl_rr-qKP8co5hwz8Dx-gks8SHf-CLOZSOA,373
72
83
  unienv_interface/world/__init__.py,sha256=aGuYTz8XFzW32RGkdi2b2LJ1sa0kgFrQyOR3JXDEwLQ,230
73
84
  unienv_interface/world/combined_funcnode.py,sha256=O9qWxhtMJkDVtWuGyaeEj3nKMgIyRAPqF9-5LU6yna8,10853
@@ -78,16 +89,16 @@ unienv_interface/world/node.py,sha256=EAvHnx0u7IudmWQDbAUIRVEqB4kh2Xsm1aXdS3Celo
78
89
  unienv_interface/world/world.py,sha256=Kl7wbNbs2YR3CjFrCLFhDB3DQUAWM6LjBwSADQtBTII,5740
79
90
  unienv_interface/wrapper/__init__.py,sha256=ZNqr-WjVRqgvIxkLkeABxpYZ6tRgJNZOzmluDeJ6W_w,614
80
91
  unienv_interface/wrapper/action_rescale.py,sha256=rTJlEHvWSuwGVX83cjfLWvszBk7B2iExX_K37vH8Wic,1231
81
- unienv_interface/wrapper/backend_compat.py,sha256=T6hosgu2hrZvg3xtnyELmR6Exlz-ztqdj9vdyiz7bhI,7081
92
+ unienv_interface/wrapper/backend_compat.py,sha256=amLAITi1qLylQ45BkpvmwMXSkG-J9YEu1JPjCrBT5I8,7120
82
93
  unienv_interface/wrapper/batch_and_unbatch.py,sha256=HpmnppgOKmshNlfmJYkGQYtEU7_U7q3mEdY5n4UaqEY,3457
83
94
  unienv_interface/wrapper/control_frequency_limit.py,sha256=B0E2aUbaUr2p2yIN6wT3q4rAbPYsVroioqma2qKMoC0,2322
84
95
  unienv_interface/wrapper/flatten.py,sha256=NWA5xne5j_L34oq_wT85wGvp6iHwdCSeGsk1DMugvRw,5837
85
- unienv_interface/wrapper/frame_stack.py,sha256=lZZh_T_AmxsRWeYSLsTU321lVgIt12MX1eWl_yRNlWg,6002
96
+ unienv_interface/wrapper/frame_stack.py,sha256=07rt8SuUQmniu0HRAzAuSrW9K1ri_87UxxsF-WIUzbI,6008
86
97
  unienv_interface/wrapper/gym_compat.py,sha256=JhLxDsO1NsJnKzKhO0MqMw9i5_1FLxoxKilWaQQyBkw,9789
87
98
  unienv_interface/wrapper/time_limit.py,sha256=VRvB00BK7deI2QtdGatqwDWmPgjgjg1E7MTvEyaW5rg,2904
88
99
  unienv_interface/wrapper/transformation.py,sha256=pQ-_YVU8WWDqSk2sONUUgQY1iigOD092KNcp1DYxoxk,10043
89
100
  unienv_interface/wrapper/video_record.py,sha256=y_nJRYgo1SeLeO_Ymg9xbbGPKm48AbU3BxZK2wd0gzk,8679
90
- unienv-0.0.1b4.dist-info/METADATA,sha256=R_70XnKo1K6ObRxMmSlW1W_lxfD_rGR6txa3wBHGPOM,3033
91
- unienv-0.0.1b4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
92
- unienv-0.0.1b4.dist-info/top_level.txt,sha256=wfcJ5_DruUtOEUZjEyfadaKn7B90hWqz2aw-eM3wX5g,29
93
- unienv-0.0.1b4.dist-info/RECORD,,
101
+ unienv-0.0.1b6.dist-info/METADATA,sha256=i9zJN3WnVG3gmSSZnKI8SROtMN3U5HGUGRD4p1fu3Ho,3056
102
+ unienv-0.0.1b6.dist-info/WHEEL,sha256=qELbo2s1Yzl39ZmrAibXA2jjPLUYfnVhUNTlyF1rq0Y,92
103
+ unienv-0.0.1b6.dist-info/top_level.txt,sha256=wfcJ5_DruUtOEUZjEyfadaKn7B90hWqz2aw-eM3wX5g,29
104
+ unienv-0.0.1b6.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (80.10.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -7,6 +7,7 @@ from unienv_interface.env_base.env import ContextType, ObsType, ActType
7
7
  from unienv_interface.space import Space, BoxSpace, DictSpace
8
8
  import dataclasses
9
9
 
10
+ from functools import cached_property
10
11
  from unienv_interface.space.space_utils import batch_utils as space_batch_utils, flatten_utils as space_flatten_utils
11
12
 
12
13
  __all__ = [
@@ -46,13 +47,18 @@ class BatchBase(abc.ABC, Generic[BatchT, BArrayType, BDeviceType, BDtypeType, BR
46
47
  ):
47
48
  self.single_space = single_space
48
49
  self.single_metadata_space = single_metadata_space
49
- self._batched_space : Space[BatchT, BDeviceType, BDtypeType, BRNGType] = space_batch_utils.batch_space(single_space, 1)
50
- if single_metadata_space is not None:
51
- self._batched_metadata_space : DictSpace[
52
- BDeviceType, BDtypeType, BRNGType
53
- ] = space_batch_utils.batch_space(single_metadata_space, 1)
50
+
51
+ # For backwards compatibility
52
+ @cached_property
53
+ def _batched_space(self) -> Space[BatchT, BDeviceType, BDtypeType, BRNGType]:
54
+ return space_batch_utils.batch_space(self.single_space, 1)
55
+
56
+ @cached_property
57
+ def _batched_metadata_space(self) -> Optional[DictSpace[BDeviceType, BDtypeType, BRNGType]]:
58
+ if self.single_metadata_space is not None:
59
+ return space_batch_utils.batch_space(self.single_metadata_space, 1)
54
60
  else:
55
- self._batched_metadata_space = None
61
+ return None
56
62
 
57
63
  @property
58
64
  def backend(self) -> ComputeBackend[BArrayType, BDeviceType, BDtypeType, BRNGType]:
@@ -146,7 +152,7 @@ class BatchBase(abc.ABC, Generic[BatchT, BArrayType, BDeviceType, BDtypeType, BR
146
152
  if tqdm:
147
153
  from tqdm import tqdm
148
154
  iterable_start = tqdm(iterable_start, desc="Extending Batch")
149
- for start_idx in range(0, n_total, chunk_size):
155
+ for start_idx in iterable_start:
150
156
  end_idx = min(start_idx + chunk_size, n_total)
151
157
  data_chunk = other.get_at(slice(start_idx, end_idx))
152
158
  self.extend(data_chunk)
@@ -183,15 +189,24 @@ class BatchSampler(
183
189
  ) -> None:
184
190
  super().__init__(single_space=single_space, single_metadata_space=single_metadata_space)
185
191
  self.batch_size = batch_size
186
- self._batched_space : Space[SamplerBatchT, SamplerDeviceType, SamplerDtypeType, SamplerRNGType] = space_batch_utils.batch_space(self.single_space, batch_size)
187
- self._batched_metadata_space : Optional[DictSpace[SamplerDeviceType, SamplerDtypeType, SamplerRNGType]] = space_batch_utils.batch_space(self.single_metadata_space, batch_size) if self.single_metadata_space is not None else None
188
-
192
+
189
193
  def manual_seed(self, seed : int) -> None:
190
194
  if self.rng is not None:
191
195
  self.rng = self.backend.random.random_number_generator(seed, device=self.device)
192
196
  if self.data_rng is not None:
193
197
  self.data_rng = self.backend.random.random_number_generator(seed, device=self.data.device)
194
198
 
199
+ @cached_property
200
+ def _batched_space(self) -> Space[BatchT, BDeviceType, BDtypeType, BRNGType]:
201
+ return space_batch_utils.batch_space(self.single_space, self.batch_size)
202
+
203
+ @cached_property
204
+ def _batched_metadata_space(self) -> Optional[DictSpace[BDeviceType, BDtypeType, BRNGType]]:
205
+ if self.single_metadata_space is not None:
206
+ return space_batch_utils.batch_space(self.single_metadata_space, self.batch_size)
207
+ else:
208
+ return None
209
+
195
210
  @property
196
211
  def sampled_space(self) -> Space[SamplerBatchT, SamplerDeviceType, SamplerDtypeType, SamplerRNGType]:
197
212
  return self._batched_space
@@ -20,6 +20,7 @@ class SpaceStorage(abc.ABC, Generic[BatchT, BArrayType, BDeviceType, BDtypeType,
20
20
  *args,
21
21
  capacity : Optional[int],
22
22
  cache_path : Optional[Union[str, os.PathLike]] = None,
23
+ multiprocessing : bool = False,
23
24
  **kwargs
24
25
  ) -> "SpaceStorage[BatchT, BArrayType, BDeviceType, BDtypeType, BRNGType]":
25
26
  raise NotImplementedError
@@ -32,6 +33,7 @@ class SpaceStorage(abc.ABC, Generic[BatchT, BArrayType, BDeviceType, BDtypeType,
32
33
  *,
33
34
  capacity : Optional[int] = None,
34
35
  read_only : bool = True,
36
+ multiprocessing : bool = False,
35
37
  **kwargs
36
38
  ) -> "SpaceStorage[BatchT, BArrayType, BDeviceType, BDtypeType, BRNGType]":
37
39
  raise NotImplementedError
@@ -36,7 +36,7 @@ def data_to(
36
36
  key: data_to(value, source_backend, target_backend, target_device)
37
37
  for key, value in data.items()
38
38
  }
39
- elif isinstance(data, Sequence):
39
+ elif isinstance(data, Sequence) and not isinstance(data, (str, bytes)):
40
40
  data = [
41
41
  data_to(value, source_backend, target_backend, target_device)
42
42
  for value in data
@@ -94,7 +94,7 @@ class CombinedBatch(BatchBase[
94
94
  batch_index = int(self.backend.sum(
95
95
  idx >= self.index_caches[:, 0]
96
96
  ) - 1)
97
- return batch_index, idx - self.index_caches[batch_index, 0]
97
+ return batch_index, idx - int(self.index_caches[batch_index, 0])
98
98
 
99
99
  def _convert_index(self, idx : Union[IndexableType, BArrayType]) -> Tuple[
100
100
  int,
@@ -33,6 +33,7 @@ class SliceStackedBatch(BatchBase[
33
33
  fill_invalid_data : bool = True,
34
34
  stack_metadata : bool = False,
35
35
  ):
36
+ assert batch.backend.dtype_is_real_integer(fixed_offset.dtype), "Fixed offset must be an integer tensor"
36
37
  assert len(fixed_offset.shape) == 1, "Fixed offset must be a 1D tensor"
37
38
  assert fixed_offset.shape[0] > 0, "Fixed offset must have a positive length"
38
39
  assert batch.backend.any(fixed_offset == 0), "There should be at least one zero in the fixed offset"
@@ -1,6 +1,10 @@
1
1
  import abc
2
2
  import os
3
3
  import dataclasses
4
+ import multiprocessing as mp
5
+ import ctypes
6
+ from contextlib import nullcontext
7
+
4
8
  from typing import Generic, TypeVar, Optional, Any, Dict, Union, Tuple, Sequence, Callable, Type
5
9
  from unienv_interface.backends import ComputeBackend, BArrayType, BDeviceType, BDtypeType, BRNGType
6
10
 
@@ -51,7 +55,6 @@ def index_with_offset(
51
55
  return data_index
52
56
 
53
57
  class ReplayBuffer(BatchBase[BatchT, BArrayType, BDeviceType, BDtypeType, BRNGType]):
54
- is_mutable = True
55
58
  # =========== Class Attributes ==========
56
59
  @staticmethod
57
60
  def create(
@@ -60,6 +63,7 @@ class ReplayBuffer(BatchBase[BatchT, BArrayType, BDeviceType, BDtypeType, BRNGTy
60
63
  *args,
61
64
  cache_path : Optional[Union[str, os.PathLike]] = None,
62
65
  capacity : Optional[int] = None,
66
+ multiprocessing : bool = False,
63
67
  **kwargs
64
68
  ) -> "ReplayBuffer[BatchT, BArrayType, BDeviceType, BDtypeType, BRNGType]":
65
69
  storage_path_relative = "storage" + (storage_cls.single_file_ext or "")
@@ -70,6 +74,7 @@ class ReplayBuffer(BatchBase[BatchT, BArrayType, BDeviceType, BDtypeType, BRNGTy
70
74
  *args,
71
75
  cache_path=None if cache_path is None else os.path.join(cache_path, storage_path_relative),
72
76
  capacity=capacity,
77
+ multiprocessing=multiprocessing,
73
78
  **kwargs
74
79
  )
75
80
  return ReplayBuffer(
@@ -77,7 +82,8 @@ class ReplayBuffer(BatchBase[BatchT, BArrayType, BDeviceType, BDtypeType, BRNGTy
77
82
  storage_path_relative,
78
83
  0,
79
84
  0,
80
- cache_path=cache_path
85
+ cache_path=cache_path,
86
+ multiprocessing=multiprocessing
81
87
  )
82
88
 
83
89
  @staticmethod
@@ -89,6 +95,48 @@ class ReplayBuffer(BatchBase[BatchT, BArrayType, BDeviceType, BDtypeType, BRNGTy
89
95
  metadata = json.load(f)
90
96
  return metadata.get('type', None) == __class__.__name__
91
97
  return False
98
+
99
+ @staticmethod
100
+ def get_length_from_path(
101
+ path : Union[str, os.PathLike]
102
+ ) -> Optional[int]:
103
+ if os.path.exists(os.path.join(path, "metadata.json")):
104
+ with open(os.path.join(path, "metadata.json"), "r") as f:
105
+ metadata = json.load(f)
106
+ if metadata.get('type', None) != __class__.__name__:
107
+ return None
108
+ return int(metadata["count"])
109
+ return None
110
+
111
+ @staticmethod
112
+ def get_capacity_from_path(
113
+ path : Union[str, os.PathLike]
114
+ ) -> Optional[int]:
115
+ if os.path.exists(os.path.join(path, "metadata.json")):
116
+ with open(os.path.join(path, "metadata.json"), "r") as f:
117
+ metadata = json.load(f)
118
+ if metadata.get('type', None) != __class__.__name__:
119
+ return None
120
+ return int(metadata["capacity"])
121
+ return None
122
+
123
+ @staticmethod
124
+ def get_space_from_path(
125
+ path : Union[str, os.PathLike],
126
+ *,
127
+ backend: ComputeBackend[BArrayType, BDeviceType, BDtypeType, BRNGType],
128
+ device: Optional[BDeviceType] = None,
129
+ ) -> Optional[Space[BatchT, BDeviceType, BDtypeType, BRNGType]]:
130
+ if os.path.exists(os.path.join(path, "metadata.json")):
131
+ with open(os.path.join(path, "metadata.json"), "r") as f:
132
+ metadata = json.load(f)
133
+ if metadata.get('type', None) != __class__.__name__:
134
+ return None
135
+ single_instance_space = bsu.json_to_space(
136
+ metadata["single_instance_space"], backend, device
137
+ )
138
+ return single_instance_space
139
+ return None
92
140
 
93
141
  @staticmethod
94
142
  def load_from(
@@ -97,6 +145,7 @@ class ReplayBuffer(BatchBase[BatchT, BArrayType, BDeviceType, BDtypeType, BRNGTy
97
145
  backend: ComputeBackend[BArrayType, BDeviceType, BDtypeType, BRNGType],
98
146
  device: Optional[BDeviceType] = None,
99
147
  read_only : bool = True,
148
+ multiprocessing : bool = False,
100
149
  **storage_kwargs
101
150
  ) -> "ReplayBuffer[BatchT, BArrayType, BDeviceType, BDtypeType, BRNGType]":
102
151
  with open(os.path.join(path, "metadata.json"), "r") as f:
@@ -118,52 +167,103 @@ class ReplayBuffer(BatchBase[BatchT, BArrayType, BDeviceType, BDtypeType, BRNGTy
118
167
  single_instance_space,
119
168
  capacity=capacity,
120
169
  read_only=read_only,
170
+ multiprocessing=multiprocessing,
121
171
  **storage_kwargs
122
172
  )
123
- return ReplayBuffer(storage, metadata["storage_path_relative"], count, offset, cache_path=path)
173
+ return ReplayBuffer(
174
+ storage,
175
+ metadata["storage_path_relative"],
176
+ count,
177
+ offset,
178
+ cache_path=path,
179
+ multiprocessing=multiprocessing
180
+ )
124
181
 
125
182
  # =========== Instance Attributes and Methods ==========
126
183
  def dumps(self, path : Union[str, os.PathLike]):
127
- os.makedirs(path, exist_ok=True)
128
- storage_path = os.path.join(path, self.storage_path_relative)
129
- self.storage.dumps(storage_path)
130
- metadata = {
131
- "type": __class__.__name__,
132
- "count": self.count,
133
- "offset": self.offset,
134
- "capacity": self.storage.capacity,
135
- "storage_cls": get_full_class_name(type(self.storage)),
136
- "storage_path_relative": self.storage_path_relative,
137
- "single_instance_space": bsu.space_to_json(self.storage.single_instance_space),
138
- }
139
- with open(os.path.join(path, "metadata.json"), "w") as f:
140
- json.dump(metadata, f)
184
+ with self._lock_scope():
185
+ os.makedirs(path, exist_ok=True)
186
+ storage_path = os.path.join(path, self.storage_path_relative)
187
+ self.storage.dumps(storage_path)
188
+ metadata = {
189
+ "type": __class__.__name__,
190
+ "count": self.count,
191
+ "offset": self.offset,
192
+ "capacity": self.storage.capacity,
193
+ "storage_cls": get_full_class_name(type(self.storage)),
194
+ "storage_path_relative": self.storage_path_relative,
195
+ "single_instance_space": bsu.space_to_json(self.storage.single_instance_space),
196
+ }
197
+ with open(os.path.join(path, "metadata.json"), "w") as f:
198
+ json.dump(metadata, f)
141
199
 
142
200
  def __init__(
143
201
  self,
144
202
  storage : SpaceStorage[BatchT, BArrayType, BDeviceType, BDtypeType, BRNGType],
145
- storage_path_relative : Union[str, os.PathLike],
203
+ storage_path_relative : str,
146
204
  count : int = 0,
147
205
  offset : int = 0,
148
206
  cache_path : Optional[Union[str, os.PathLike]] = None,
207
+ multiprocessing : bool = False,
149
208
  ):
150
209
  self.storage = storage
151
- self.count = count
152
- self.offset = offset
153
- self.storage_path_relative = storage_path_relative
210
+ self._storage_path_relative = storage_path_relative
154
211
  self._cache_path = cache_path
212
+ self._multiprocessing = multiprocessing
213
+ if multiprocessing and storage.is_mutable:
214
+ assert storage.is_multiprocessing_safe, "Storage is not multiprocessing safe"
215
+ self._lock = mp.RLock()
216
+ self._count_value = mp.Value(ctypes.c_long, int(count))
217
+ self._offset_value = mp.Value(ctypes.c_long, int(offset))
218
+ else:
219
+ self._lock = None
220
+ self._count_value = int(count)
221
+ self._offset_value = int(offset)
222
+
155
223
  super().__init__(
156
224
  storage.single_instance_space,
157
225
  None
158
226
  )
159
227
 
228
+ def _lock_scope(self):
229
+ if self._lock is not None:
230
+ return self._lock
231
+ else:
232
+ return nullcontext()
233
+
160
234
  @property
161
235
  def cache_path(self) -> Optional[Union[str, os.PathLike]]:
162
236
  return self._cache_path
163
237
 
238
+ @property
239
+ def storage_path_relative(self) -> str:
240
+ return self._storage_path_relative
241
+
164
242
  def __len__(self) -> int:
165
243
  return self.count
244
+
245
+ @property
246
+ def count(self) -> int:
247
+ return self._count_value.value if not isinstance(self._count_value, int) else self._count_value
248
+
249
+ @count.setter
250
+ def count(self, value: int) -> None:
251
+ if not isinstance(self._count_value, int):
252
+ self._count_value.value = int(value)
253
+ else:
254
+ self._count_value = int(value)
166
255
 
256
+ @property
257
+ def offset(self) -> int:
258
+ return self._offset_value.value if not isinstance(self._offset_value, int) else self._offset_value
259
+
260
+ @offset.setter
261
+ def offset(self, value: int) -> None:
262
+ if not isinstance(self._offset_value, int):
263
+ self._offset_value.value = int(value)
264
+ else:
265
+ self._offset_value = int(value)
266
+
167
267
  @property
168
268
  def capacity(self) -> Optional[int]:
169
269
  return self.storage.capacity
@@ -176,12 +276,21 @@ class ReplayBuffer(BatchBase[BatchT, BArrayType, BDeviceType, BDtypeType, BRNGTy
176
276
  def device(self) -> Optional[BDeviceType]:
177
277
  return self.storage.device
178
278
 
279
+ @property
280
+ def is_mutable(self) -> bool:
281
+ return self.storage.is_mutable
282
+
283
+ @property
284
+ def is_multiprocessing_safe(self) -> bool:
285
+ return self._multiprocessing
286
+
179
287
  def get_flattened_at(self, idx):
180
288
  return self.get_flattened_at_with_metadata(idx)[0]
181
289
 
182
290
  def get_flattened_at_with_metadata(self, idx: Union[IndexableType, BArrayType]) -> BArrayType:
183
291
  if hasattr(self.storage, "get_flattened"):
184
- data = self.storage.get_flattened(idx)
292
+ with self._lock_scope():
293
+ data = self.storage.get_flattened(idx)
185
294
  return data, None
186
295
 
187
296
  data, metadata = self.get_at_with_metadata(idx)
@@ -195,19 +304,21 @@ class ReplayBuffer(BatchBase[BatchT, BArrayType, BDeviceType, BDtypeType, BRNGTy
195
304
  return self.get_at_with_metadata(idx)[0]
196
305
 
197
306
  def get_at_with_metadata(self, idx):
198
- data_index = index_with_offset(
199
- self.backend,
200
- idx,
201
- self.count,
202
- self.offset,
203
- self.device
204
- )
205
- data = self.storage.get(data_index)
307
+ with self._lock_scope():
308
+ data_index = index_with_offset(
309
+ self.backend,
310
+ idx,
311
+ self.count,
312
+ self.offset,
313
+ self.device
314
+ )
315
+ data = self.storage.get(data_index)
206
316
  return data, None
207
317
 
208
318
  def set_flattened_at(self, idx: Union[IndexableType, BArrayType], value: BArrayType) -> None:
209
319
  if hasattr(self.storage, "set_flattened"):
210
- self.storage.set_flattened(idx, value)
320
+ with self._lock_scope():
321
+ self.storage.set_flattened(idx, value)
211
322
  return
212
323
 
213
324
  if isinstance(idx, int):
@@ -217,13 +328,14 @@ class ReplayBuffer(BatchBase[BatchT, BArrayType, BDeviceType, BDtypeType, BRNGTy
217
328
  self.set_at(idx, value)
218
329
 
219
330
  def set_at(self, idx, value):
220
- self.storage.set(index_with_offset(
221
- self.backend,
222
- idx,
223
- self.count,
224
- self.offset,
225
- self.device
226
- ), value)
331
+ with self._lock_scope():
332
+ self.storage.set(index_with_offset(
333
+ self.backend,
334
+ idx,
335
+ self.count,
336
+ self.offset,
337
+ self.device
338
+ ), value)
227
339
 
228
340
  def extend_flattened(
229
341
  self,
@@ -233,35 +345,37 @@ class ReplayBuffer(BatchBase[BatchT, BArrayType, BDeviceType, BDtypeType, BRNGTy
233
345
  self.extend(unflattened_data)
234
346
 
235
347
  def extend(self, value):
236
- B = sbu.batch_size_data(value)
237
- if B == 0:
238
- return
239
- if self.capacity is None:
240
- assert self.offset == 0, "Offset must be 0 when capacity is None"
241
- self.storage.extend_length(B)
242
- self.storage.set(slice(self.count, self.count + B), value)
243
- self.count += B
244
- return
245
-
246
- # We have a fixed capacity, only keep the last `capacity` elements
247
- if B >= self.capacity:
248
- self.storage.set(Ellipsis, sbu.get_at(self._batched_space, value, slice(-self.capacity, None)))
249
- self.count = self.capacity
250
- self.offset = 0
251
- return
252
-
253
- # Otherwise, perform round-robin writes
254
- indexes = (self.backend.arange(B, device=self.device) + self.offset + self.count) % self.capacity
255
- self.storage.set(indexes, value)
256
- outflow = max(0, self.count + B - self.capacity)
257
- if outflow > 0:
258
- self.offset = (self.offset + outflow) % self.capacity
259
- self.count = min(self.count + B, self.capacity)
348
+ with self._lock_scope():
349
+ B = sbu.batch_size_data(value)
350
+ if B == 0:
351
+ return
352
+ if self.capacity is None:
353
+ assert self.offset == 0, "Offset must be 0 when capacity is None"
354
+ self.storage.extend_length(B)
355
+ self.storage.set(slice(self.count, self.count + B), value)
356
+ self.count += B
357
+ return
358
+
359
+ # We have a fixed capacity, only keep the last `capacity` elements
360
+ if B >= self.capacity:
361
+ self.storage.set(Ellipsis, sbu.get_at(self._batched_space, value, slice(-self.capacity, None)))
362
+ self.count = self.capacity
363
+ self.offset = 0
364
+ return
365
+
366
+ # Otherwise, perform round-robin writes
367
+ indexes = (self.backend.arange(B, device=self.device) + self.offset + self.count) % self.capacity
368
+ self.storage.set(indexes, value)
369
+ outflow = max(0, self.count + B - self.capacity)
370
+ if outflow > 0:
371
+ self.offset = (self.offset + outflow) % self.capacity
372
+ self.count = min(self.count + B, self.capacity)
260
373
 
261
374
  def clear(self):
262
- self.count = 0
263
- self.offset = 0
264
- self.storage.clear()
375
+ with self._lock_scope():
376
+ self.count = 0
377
+ self.offset = 0
378
+ self.storage.clear()
265
379
 
266
380
  def close(self) -> None:
267
381
  self.storage.close()