mani-skill-nightly 2025.6.28.2040__py3-none-any.whl → 2025.6.28.2124__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -340,6 +340,18 @@ class BaseAgent:
340
340
  obs.update(controller=controller_state)
341
341
  return obs
342
342
 
343
+ def get_controller_state(self):
344
+ """
345
+ Get the state of the controller.
346
+ """
347
+ return self.controller.get_state()
348
+
349
+ def set_controller_state(self, state: Array):
350
+ """
351
+ Set the state of the controller.
352
+ """
353
+ self.controller.set_state(state)
354
+
343
355
  def get_state(self) -> Dict:
344
356
  """Get current state, including robot state and controller state"""
345
357
  state = dict()
@@ -353,7 +365,7 @@ class BaseAgent:
353
365
  state["robot_qvel"] = self.robot.get_qvel()
354
366
 
355
367
  # controller state
356
- state["controller"] = self.controller.get_state()
368
+ state["controller"] = self.get_controller_state()
357
369
 
358
370
  return state
359
371
 
@@ -368,7 +380,7 @@ class BaseAgent:
368
380
  self.robot.set_qvel(state["robot_qvel"])
369
381
 
370
382
  if not ignore_controller and "controller" in state:
371
- self.controller.set_state(state["controller"])
383
+ self.set_controller_state(state["controller"])
372
384
  if self.device.type == "cuda":
373
385
  self.scene._gpu_apply_all()
374
386
  self.scene.px.gpu_update_articulation_kinematics()
@@ -70,6 +70,18 @@ class MultiAgent(BaseAgent, Generic[T]):
70
70
  for agent in self.agents:
71
71
  agent.controller.before_simulation_step()
72
72
 
73
+ def get_controller_state(self):
74
+ """
75
+ Get the state of the controller.
76
+ """
77
+ return {
78
+ uid: agent.get_controller_state() for uid, agent in self.agents_dict.items()
79
+ }
80
+
81
+ def set_controller_state(self, state: Dict):
82
+ for uid, agent in self.agents_dict.items():
83
+ agent.set_controller_state(state[uid])
84
+
73
85
  # -------------------------------------------------------------------------- #
74
86
  # Other
75
87
  # -------------------------------------------------------------------------- #
@@ -1241,7 +1241,10 @@ class BaseEnv(gym.Env):
1241
1241
  Get environment state dictionary. Override to include task information (e.g., goal)
1242
1242
  """
1243
1243
  sim_state = self.scene.get_sim_state()
1244
- controller_state = self.agent.controller.get_state()
1244
+ controller_state = self.agent.get_controller_state()
1245
+ # Remove any empty keys from controller_state
1246
+ if isinstance(self.agent.controller, dict):
1247
+ controller_state = {k: v for k, v in controller_state.items() if len(v) > 0}
1245
1248
  if len(controller_state) > 0:
1246
1249
  sim_state["controller"] = controller_state
1247
1250
  return sim_state
@@ -1,4 +1,4 @@
1
- from typing import Any, Dict, Union
1
+ from typing import Dict, Union
2
2
 
3
3
  import numpy as np
4
4
  import sapien
@@ -42,6 +42,7 @@ class PlugChargerEnv(BaseEnv):
42
42
 
43
43
  SUPPORTED_ROBOTS = ["panda_wristcam"]
44
44
  agent: Union[PandaWristCam]
45
+ SUPPORTED_REWARD_MODES = ["none", "sparse"]
45
46
 
46
47
  def __init__(
47
48
  self, *args, robot_uids="panda_wristcam", robot_init_qpos_noise=0.02, **kwargs
@@ -199,7 +200,10 @@ class PlugChargerEnv(BaseEnv):
199
200
  )
200
201
  qpos = (
201
202
  torch.normal(
202
- 0, self.robot_init_qpos_noise, (b, len(qpos)), device=self.device
203
+ 0,
204
+ self.robot_init_qpos_noise,
205
+ (b, len(qpos)),
206
+ device=self.device,
203
207
  )
204
208
  + qpos
205
209
  )
@@ -27,6 +27,7 @@ from mani_skill.trajectory.merge_trajectory import merge_trajectories
27
27
  from mani_skill.trajectory.utils.actions import conversion as action_conversion
28
28
  from mani_skill.utils import common, io_utils, wrappers
29
29
  from mani_skill.utils.logging_utils import logger
30
+ from mani_skill.utils.wrappers.flatten import FlattenActionSpaceWrapper
30
31
  from mani_skill.utils.wrappers.record import RecordEpisode
31
32
 
32
33
 
@@ -399,6 +400,11 @@ def _main(
399
400
  json_path = traj_path.replace(".h5", ".json")
400
401
  json_data = io_utils.load_json(json_path)
401
402
  env = gym.make(env_id, **env_kwargs)
403
+ if isinstance(env.action_space, gym.spaces.Dict):
404
+ logger.warning(
405
+ "We currently do not track which wrappers are used when recording trajectories but majority of the time in multi-agent envs with dictionary action spaces the actions are stored as flat vectors. We will flatten the action space with the ManiSkill provided FlattenActionSpaceWrapper. If you do not want this behavior you can copy the replay trajectory code yourself and modify it as needed."
406
+ )
407
+ env = FlattenActionSpaceWrapper(env)
402
408
  # TODO (support adding wrappers to the recorded data?)
403
409
 
404
410
  # if pbar is not None:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mani-skill-nightly
3
- Version: 2025.6.28.2040
3
+ Version: 2025.6.28.2124
4
4
  Summary: ManiSkill3: A Unified Benchmark for Generalizable Manipulation Skills
5
5
  Home-page: https://github.com/haosulab/ManiSkill
6
6
  Author: ManiSkill contributors
@@ -1,8 +1,8 @@
1
1
  mani_skill/__init__.py,sha256=_wZjMWSlWZYeAAEjiHAHa5W6uq7Eh4fbny8HwlYSJhQ,2101
2
2
  mani_skill/agents/__init__.py,sha256=6u7nUfWDqWmD_ieNhECfhV6mIyf4SmMdumexE2lRlsU,44
3
- mani_skill/agents/base_agent.py,sha256=rnBMYAtEhjfVHeYUx28yWU9Y1tIxXVETBv2IqSiyt8w,18098
3
+ mani_skill/agents/base_agent.py,sha256=pnaE1VWJSpdzlsFPmcG5VVLLX9sWtPnl1ZM3ZIy2kXM,18400
4
4
  mani_skill/agents/base_real_agent.py,sha256=DD9SXZa7er5zF7wdm97IO_0W4xbdw-66iLC8j86ykYw,8549
5
- mani_skill/agents/multi_agent.py,sha256=OL7sPDwP1Gfn870W3LRNhwsPp7Gy52sdYI4LySvaGzQ,2970
5
+ mani_skill/agents/multi_agent.py,sha256=AFygr2797M5Hhk4qMoLuFmFk7msqnp82bFuSyP1j8JA,3341
6
6
  mani_skill/agents/registration.py,sha256=rtu9vAys_Qz76d9LsDFpqanZxaFiLSPZajA3uHF9HHQ,1331
7
7
  mani_skill/agents/utils.py,sha256=U8wdgsNHRa-RtlC3HQM8_Agn9P82TIe3ZrXetaMMU_U,1912
8
8
  mani_skill/agents/controllers/__init__.py,sha256=tsd27dW6brQBm563CddUGtFMBNAxe_MCpTqqR5VyJaY,1103
@@ -539,7 +539,7 @@ mani_skill/assets/robots/xarm7/meshes/visual/link7.glb,sha256=aZatACOv20VJbi2tOE
539
539
  mani_skill/assets/robots/xarm7/meshes/visual/link_base.glb,sha256=vcy2lN1V72jIsSDRT0ZKVskR_0pVOXtDvBkxO2GENWs,467668
540
540
  mani_skill/envs/__init__.py,sha256=YPlttBErTcf9vSnkZ54EQ8vTABSfFFrBdUY0AkF4vmg,43
541
541
  mani_skill/envs/minimal_template.py,sha256=9THHWA1vkHatptc9g5Ojh-UBUKWQmLHVeq4fcaqv2aY,2200
542
- mani_skill/envs/sapien_env.py,sha256=4fuabvPy9uCC_x3DTehdgOgyIDqIZDjzbhBsz6iiakQ,73663
542
+ mani_skill/envs/sapien_env.py,sha256=VFP2hMAfaocOnOKl0xpuy95dJV_LhJBTqcNceBB2z-0,73858
543
543
  mani_skill/envs/scene.py,sha256=4ZAIJs61fwPPhfDvc3v845sj_Ftsd1sSYaju10KnXbQ,48465
544
544
  mani_skill/envs/sim2real_env.py,sha256=3mkQX4TonE2pUC5_Atmx0IYDH2_v6GSwOPJvQMEvCNY,19214
545
545
  mani_skill/envs/template.py,sha256=0wnwKjnGOF7RvTR5Gz4VopaUiFxnIioXwmb4nPVxAs8,11939
@@ -613,7 +613,7 @@ mani_skill/envs/tasks/tabletop/pick_cube.py,sha256=wC2DdKKxROaG2oWovbKGlPyuKLd21
613
613
  mani_skill/envs/tasks/tabletop/pick_cube_cfgs.py,sha256=ns0bhw6nrJElSR9nGruGYECyzeAJgq4nd2HraEHI5A0,2564
614
614
  mani_skill/envs/tasks/tabletop/pick_single_ycb.py,sha256=mrqEoOa9UVF34Z5fpsvjcr683diUffsKEjJ9Zh0qfFU,10409
615
615
  mani_skill/envs/tasks/tabletop/place_sphere.py,sha256=J3ReBFK7TyZQlleIFspz7Pl1wqAzaYoveGZfNNL5DVM,10101
616
- mani_skill/envs/tasks/tabletop/plug_charger.py,sha256=nqxrafAtziJGjwBVhB3OjfA4UxVSIoJxrAWzA9_YMuY,10347
616
+ mani_skill/envs/tasks/tabletop/plug_charger.py,sha256=So0WttpXKU_1okVCgB00htqa_fxPcJZERhFSKqspA_o,10463
617
617
  mani_skill/envs/tasks/tabletop/poke_cube.py,sha256=KV6mp-Xgm9h4GYUcAUop2AZ4IECTdQKEMRRd9NThyBo,9343
618
618
  mani_skill/envs/tasks/tabletop/pull_cube.py,sha256=tyy9KOgBjQOHjFrVK2-hNQPCPDjJ7Y61ZtbwPX_6gvk,5548
619
619
  mani_skill/envs/tasks/tabletop/pull_cube_tool.py,sha256=NaZpdbYYL4zC41GVY__eq4uRIQpVXthzAqe5oSq8YWU,9951
@@ -703,7 +703,7 @@ mani_skill/sensors/depth_camera.py,sha256=KCT7DMqQacVag_24MjkKAml87T6FtDqNS0TJFf
703
703
  mani_skill/trajectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
704
704
  mani_skill/trajectory/dataset.py,sha256=nrG3jkhdzRqAdjxC_c8Z4FxpkvW3A9XPvUp9-Ux_u38,6351
705
705
  mani_skill/trajectory/merge_trajectory.py,sha256=zsjRMTsiIirZGIV4KrtYOM2-zoOAzd7ObZEdWGJzZbE,3685
706
- mani_skill/trajectory/replay_trajectory.py,sha256=AtmsIV4Oj_vh6MDbHV5Kyrlv7J6mXSHvAWwjuFBVMZo,27074
706
+ mani_skill/trajectory/replay_trajectory.py,sha256=ABiM4pMSkTAhU1L2fdaY-Mwnw2Hzg8p1rAaWf3ijWOE,27681
707
707
  mani_skill/trajectory/utils/__init__.py,sha256=Nchv09IpXv0FOgpf7Ng1Ekus6ZfAh3kI0KJs-79QOig,1515
708
708
  mani_skill/trajectory/utils/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
709
709
  mani_skill/trajectory/utils/actions/conversion.py,sha256=x88C64ke44gB-HEbqq_gSRFv34L7irSwT_wYttkQUn8,12922
@@ -825,8 +825,8 @@ mani_skill/vector/wrappers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJ
825
825
  mani_skill/vector/wrappers/gymnasium.py,sha256=aNPB-2oGDLep8qzdsuTSIlwGGO0OGQAQ193LefOGoTk,7434
826
826
  mani_skill/vector/wrappers/sb3.py,sha256=SlXdiEPqcNHYMhJCzA29kBU6zK7DKTe1nc0L6Z3QQtY,4722
827
827
  mani_skill/viewer/__init__.py,sha256=srvDBsk4LQU75K2VIttrhiQ68p_ro7PSDqQRls2PY5c,1722
828
- mani_skill_nightly-2025.6.28.2040.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
829
- mani_skill_nightly-2025.6.28.2040.dist-info/METADATA,sha256=4tsIGDJelHdLjtIrbdK0CdaI_2nhI4o_PC1xEW4chpM,9272
830
- mani_skill_nightly-2025.6.28.2040.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
831
- mani_skill_nightly-2025.6.28.2040.dist-info/top_level.txt,sha256=bkBgOVl_MZMoQx2aRFsSFEYlZLxjWlip5vtJ39FB3jA,11
832
- mani_skill_nightly-2025.6.28.2040.dist-info/RECORD,,
828
+ mani_skill_nightly-2025.6.28.2124.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
829
+ mani_skill_nightly-2025.6.28.2124.dist-info/METADATA,sha256=0QTuTb7HR5inib2hW2OYYBETNZ_Y7ogG9F7OA2DO0yo,9272
830
+ mani_skill_nightly-2025.6.28.2124.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
831
+ mani_skill_nightly-2025.6.28.2124.dist-info/top_level.txt,sha256=bkBgOVl_MZMoQx2aRFsSFEYlZLxjWlip5vtJ39FB3jA,11
832
+ mani_skill_nightly-2025.6.28.2124.dist-info/RECORD,,