mani-skill-nightly 2025.10.21.2002__py3-none-any.whl → 2025.10.21.2011__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mani-skill-nightly might be problematic. Click here for more details.

@@ -0,0 +1,180 @@
1
+
2
+ from typing import Any, Dict, Union
3
+ import numpy as np
4
+ import sapien.physx as physx
5
+ import torch
6
+ import os
7
+ from mani_skill import ASSET_DIR
8
+ from mani_skill.agents.robots import FloatingInspireHandRight
9
+ from mani_skill.envs.sapien_env import BaseEnv
10
+ from mani_skill.sensors.camera import CameraConfig
11
+ from mani_skill.utils import sapien_utils
12
+ from mani_skill.utils.registration import register_env
13
+ from mani_skill.utils.scene_builder.table import TableSceneBuilder
14
+ from mani_skill.utils.structs.pose import Pose
15
+ from mani_skill.utils.structs.types import Array, GPUMemoryConfig, SimConfig
16
+
17
+
18
+ @register_env(
19
+ "InsertFlower-v1",
20
+ max_episode_steps=300,
21
+ asset_download_ids=["oakink-v2"]
22
+ )
23
+ class InsertFlowerEnv(BaseEnv):
24
+ agent: Union[FloatingInspireHandRight]
25
+ _clearance = 0.003
26
+ hand_init_height = 0.25
27
+ flower_spawn_half_size = 0.05
28
+ asset_path = f"{ASSET_DIR}/tasks/oakink-v2/align_ds"
29
+
30
+ def __init__(
31
+ self,
32
+ *args,
33
+ robot_uids="allegro_hand_right_floating",
34
+ robot_init_qpos_noise=0.02,
35
+ num_envs=1,
36
+ reconfiguration_freq=None,
37
+ **kwargs,
38
+ ):
39
+ self.robot_init_qpos_noise = robot_init_qpos_noise
40
+ self.target_area = {"min": [-0.3, -0.25, 0.25], "max": [-0.2, -0.15, 0.35]}
41
+
42
+ super().__init__(
43
+ *args,
44
+ robot_uids=robot_uids,
45
+ num_envs=num_envs,
46
+ reconfiguration_freq=reconfiguration_freq,
47
+ **kwargs,
48
+ )
49
+
50
+ with torch.device(self.device):
51
+ self.prev_unit_vector = torch.zeros((self.num_envs, 3))
52
+ self.cum_rotation_angle = torch.zeros((self.num_envs,))
53
+
54
+ @property
55
+ def _default_sim_config(self):
56
+ return SimConfig(
57
+ gpu_memory_config=GPUMemoryConfig(
58
+ max_rigid_contact_count=self.num_envs * max(1024, self.num_envs) * 8,
59
+ max_rigid_patch_count=self.num_envs * max(1024, self.num_envs) * 2,
60
+ found_lost_pairs_capacity=2**26,
61
+ )
62
+ )
63
+
64
+ @property
65
+ def _default_sensor_configs(self):
66
+ pose = sapien_utils.look_at(eye=[0.15, 0, 0.45], target=[-0.1, 0, self.hand_init_height])
67
+ return [CameraConfig("base_camera", pose, 128, 128, np.pi / 2, 0.01, 100)]
68
+
69
+ @property
70
+ def _default_human_render_camera_configs(self):
71
+ pose = sapien_utils.look_at([0.2, 0.4, 0.6], [0.0, 0.0, 0.3])
72
+ return CameraConfig("render_camera", pose, 512, 512, 1, 0.01, 100)
73
+
74
+ def _load_scene(self, options: dict):
75
+ self.table_scene = TableSceneBuilder(env=self, robot_init_qpos_noise=self.robot_init_qpos_noise)
76
+ self.table_scene.build()
77
+
78
+ # === Load Vase (Static) ===
79
+ vase_builder = self.scene.create_actor_builder()
80
+ vase_visual_mesh_file = os.path.join(self.asset_path, "O02@0080@00001/model.obj")
81
+ vase_collision_mesh_file = os.path.join(
82
+ self.asset_path, "O02@0080@00001/model.obj.coacd.ply"
83
+ )
84
+ vase_builder.add_visual_from_file(vase_visual_mesh_file)
85
+ vase_builder.add_multiple_convex_collisions_from_file(vase_collision_mesh_file)
86
+ vase_builder.initial_pose = Pose.create_from_pq(
87
+ [-0.2509, -0.2027, 0.102 + 0.001], [0.8712, 0.0069, 0.0082, 0.4908]
88
+ )
89
+
90
+ self.vase = vase_builder.build_static(name="vase")
91
+
92
+ # === Load Flower (Dynamic) ===
93
+ flower_builder = self.scene.create_actor_builder()
94
+ flower_mesh_file = os.path.join(self.asset_path, "O02@0081@00001/model.obj")
95
+ flower_collision_file = os.path.join(
96
+ self.asset_path, "O02@0081@00001/model.obj.coacd.ply"
97
+ )
98
+ flower_builder.add_visual_from_file(flower_mesh_file)
99
+
100
+ flower_material = physx.PhysxMaterial(static_friction=1, dynamic_friction=1, restitution=1)
101
+ flower_builder.add_multiple_convex_collisions_from_file(
102
+ flower_collision_file, density=200, material=flower_material
103
+ )
104
+
105
+ self.init_flower_pose = Pose.create_from_pq(
106
+ [-0.242, 0.0, 0.015 + 0.001], [-0.352413, -0.258145, -0.635074, 0.637062]
107
+ )
108
+ flower_builder.initial_pose = self.init_flower_pose
109
+ self.flower = flower_builder.build(name="flower")
110
+ self.target_area_box = list(self.target_area.values())
111
+ # Convert target_area into tensor for fast computation
112
+ self.target_area_box = torch.tensor(self.target_area_box, device=self.device, dtype=torch.float32).view(2, 3)
113
+
114
+ def _after_reconfigure(self, options: dict):
115
+ pass
116
+
117
+ def _initialize_episode(self, env_idx: torch.Tensor, options: dict):
118
+ self._initialize_actors(env_idx)
119
+ self._initialize_agent(env_idx)
120
+
121
+ def _initialize_actors(self, env_idx: torch.Tensor):
122
+ with torch.device(self.device):
123
+ b = len(env_idx)
124
+
125
+ self.table_scene.initialize(env_idx)
126
+
127
+ flower_pose = self.init_flower_pose
128
+ flower_pose.p[:, :2] += torch.rand((b, 2)) * self.flower_spawn_half_size * 2 - self.flower_spawn_half_size
129
+ self.flower.set_pose(flower_pose)
130
+
131
+ def _initialize_agent(self, env_idx: torch.Tensor):
132
+ with torch.device(self.device):
133
+ b = len(env_idx)
134
+ dof = self.agent.robot.dof
135
+ if isinstance(dof, torch.Tensor):
136
+ dof = dof[0]
137
+ init_qpos = torch.zeros((b, dof))
138
+ self.agent.reset(init_qpos)
139
+ self.agent.robot.set_pose(
140
+ Pose.create_from_pq(
141
+ torch.tensor([0.0, 0, self.hand_init_height]),
142
+ torch.tensor([
143
+ 0,
144
+ 0.707,
145
+ 0,
146
+ -0.707,
147
+ ]),
148
+ )
149
+ )
150
+
151
+ def _get_obs_extra(self, info: Dict):
152
+ return {}
153
+
154
+ def evaluate(self, **kwargs) -> dict:
155
+ object_pos = self.flower.pose.p
156
+
157
+ # Check if the object is within the specified bounds
158
+ is_within = torch.logical_and(
159
+ torch.all(object_pos >= self.target_area_box[0], dim=-1), # min bounds
160
+ torch.all(object_pos <= self.target_area_box[1], dim=-1), # max bounds
161
+ )
162
+
163
+ return {"success": is_within}
164
+
165
+ def compute_dense_reward(self, obs: Any, action: Array, info: Dict) -> float:
166
+ object_pos = self.flower.pose.p
167
+ dist_outside = torch.max(
168
+ torch.max(self.target_area_box[0] - object_pos, torch.zeros_like(object_pos)), # lower bound
169
+ torch.max(object_pos - self.target_area_box[1], torch.zeros_like(object_pos)), # upper bound
170
+ )
171
+ reward = torch.exp(-5 * torch.norm(dist_outside)).reshape(-1)
172
+
173
+ return reward
174
+
175
+ def compute_normalized_dense_reward(self, obs: Any, action: Array, info: Dict):
176
+ return self.compute_dense_reward(obs=obs, action=action, info=info) / 4.0
177
+
178
+
179
+ if __name__ == "__main__":
180
+ print(InsertFlowerEnv.asset_path)
@@ -399,6 +399,7 @@ def _main(
399
399
  # Load associated json
400
400
  json_path = traj_path.replace(".h5", ".json")
401
401
  json_data = io_utils.load_json(json_path)
402
+ env_kwargs["sim_config"]["scene_config"]["gravity"] = np.array(env_kwargs["sim_config"]["scene_config"]["gravity"])
402
403
  env = gym.make(env_id, **env_kwargs)
403
404
  if isinstance(env.action_space, gym.spaces.Dict):
404
405
  logger.warning(
@@ -193,6 +193,11 @@ def initialize_data_sources():
193
193
  url="https://github.com/TrossenRobotics/ManiSkill-WidowX_AI/archive/refs/tags/v0.1.0.zip",
194
194
  target_path="robots/widowxai",
195
195
  )
196
+ DATA_SOURCES["oakink-v2"] = DataSource(
197
+ source_type="task_assets",
198
+ url="https://huggingface.co/datasets/haosulab/OakInk-v2/resolve/main/object_repair.zip",
199
+ target_path="tasks/oakink-v2",
200
+ )
196
201
 
197
202
 
198
203
  def expand_data_group_into_individual_data_source_ids(data_group_id: str):
@@ -358,18 +358,19 @@ class RecordEpisode(gym.Wrapper):
358
358
  *args,
359
359
  seed: Optional[Union[int, List[int]]] = None,
360
360
  options: Optional[dict] = None,
361
+ save=True,
361
362
  **kwargs,
362
363
  ):
363
364
  if self.save_on_reset:
364
365
  if self.save_video and self.num_envs == 1:
365
- self.flush_video()
366
+ self.flush_video(save=save)
366
367
  # if doing a full reset then we flush all trajectories including incompleted ones
367
368
  if self._trajectory_buffer is not None:
368
369
  if options is None or "env_idx" not in options:
369
- self.flush_trajectory(env_idxs_to_flush=np.arange(self.num_envs))
370
+ self.flush_trajectory(env_idxs_to_flush=np.arange(self.num_envs), save=save)
370
371
  else:
371
372
  self.flush_trajectory(
372
- env_idxs_to_flush=common.to_numpy(options["env_idx"])
373
+ env_idxs_to_flush=common.to_numpy(options["env_idx"], save=save)
373
374
  )
374
375
 
375
376
  obs, info = super().reset(*args, seed=seed, options=options, **kwargs)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mani_skill-nightly
3
- Version: 2025.10.21.2002
3
+ Version: 2025.10.21.2011
4
4
  Summary: ManiSkill3: A Unified Benchmark for Generalizable Manipulation Skills
5
5
  Home-page: https://github.com/haosulab/ManiSkill
6
6
  Author: ManiSkill contributors
@@ -15,7 +15,7 @@ mani_skill/agents/controllers/pd_joint_pos_vel.py,sha256=wgiXmenTVIao1Tm1vtdJWTZ
15
15
  mani_skill/agents/controllers/pd_joint_vel.py,sha256=VZF06ISCkdKBX_fUHxb7mdl9GN1Lob5dhrFGlwCx16Q,1957
16
16
  mani_skill/agents/controllers/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
17
  mani_skill/agents/controllers/utils/kinematics.py,sha256=FxREZaBRjUfc7S-jf0cLhN3HivTPxhq3AnO5zHE2nyI,11920
18
- mani_skill/agents/robots/__init__.py,sha256=lvW8t2bn2UjuWZ7PsYiKqMnHaMLvGVw6itBhxwHaauk,737
18
+ mani_skill/agents/robots/__init__.py,sha256=FnQzL5K1Hr0fSPMBiKgBRiHOc08nciuDr-3Ta2RMno8,797
19
19
  mani_skill/agents/robots/allegro_hand/__init__.py,sha256=PWH6MMv2i5g1xi76_E-YJLtKFLVKxkJgQ96GD_YeQN4,104
20
20
  mani_skill/agents/robots/allegro_hand/allegro.py,sha256=MnU04fCcvppTGJHdhgvs2OqUKToQtNMXbs2bFM5sXiU,4896
21
21
  mani_skill/agents/robots/allegro_hand/allegro_touch.py,sha256=CMnGIf1JT7jbrM5jc-OHfODeYiwrUp8rzuS9rUMzmK4,5805
@@ -25,6 +25,8 @@ mani_skill/agents/robots/dclaw/__init__.py,sha256=t1VSGN3WYw9f3mR7_M08-VhCBQPWOi
25
25
  mani_skill/agents/robots/dclaw/dclaw.py,sha256=G5DqqRl2R8NroNyTaStdofEFW23wWqla2qy6mqkVOG8,4082
26
26
  mani_skill/agents/robots/fetch/__init__.py,sha256=q3QA2oGTx-LgdmCbpe3wpj1ifoqhhFDdrMMXC43Nsuc,79
27
27
  mani_skill/agents/robots/fetch/fetch.py,sha256=k4hZP-CZiUl2MQfVabpKsCprEInqao4x3xFksfTS-rc,15499
28
+ mani_skill/agents/robots/floating_ability_hand/__init__.py,sha256=c4OTQ6OwGSr3NVKeEGVcbZFHyPdc93OKm-0TQVEbC4c,59
29
+ mani_skill/agents/robots/floating_ability_hand/floating_ability_hand.py,sha256=Gj83G_rp9M1NdDNkMf8tZlr41nOJmVMgMVfO6OA0PDA,6795
28
30
  mani_skill/agents/robots/floating_panda_gripper/__init__.py,sha256=AwV0Sml7DmQb6hk4FqbxHdO7_XXHHMhrOZtZRk6d-Po,57
29
31
  mani_skill/agents/robots/floating_panda_gripper/floating_panda_gripper.py,sha256=sUzYnpEGDW_afwldcerl679wC-6HDEq-APNBSY6-WAo,5071
30
32
  mani_skill/agents/robots/floating_robotiq_2f_85_gripper/__init__.py,sha256=bBXRyK4ncX9Zv6R5NXEHD9f6hT9A2PZEsG0n4K76f00,71
@@ -82,8 +84,10 @@ mani_skill/assets/partnet_mobility/meta/info_cabinet_drawer_train.json,sha256=ip
82
84
  mani_skill/assets/partnet_mobility/meta/info_chair_train.json,sha256=0562aw8jOrCIptfoUVOthbZfSX0rny4CslUXAOkX3ew,23046
83
85
  mani_skill/assets/partnet_mobility/meta/info_faucet_train.json,sha256=B1WwN-fF30WpWEqeyd5N5El4bejf00w3II4CY3LvFSs,32349
84
86
  mani_skill/assets/robots/ability_hand/ability_hand_left.urdf,sha256=vVQlfGeLuVRopHNRi2Oqv1C-kOWxy8eR0iy4zQQ0JMI,18094
87
+ mani_skill/assets/robots/ability_hand/ability_hand_left_floating.urdf,sha256=2jvRgtfXJc9DTEIA_dVU-h6CmFYi8cvp2uuPapgAB_Q,20823
85
88
  mani_skill/assets/robots/ability_hand/ability_hand_left_glb.urdf,sha256=V6XUBUgsf9VDzFOQFiv_OgClN9bPDOwnAxdzlEWj-ZI,18094
86
89
  mani_skill/assets/robots/ability_hand/ability_hand_right.urdf,sha256=OVvPSWO14FoPb1MzwaNTfbTiTO_ENI4pBCT7WRIOP7M,18377
90
+ mani_skill/assets/robots/ability_hand/ability_hand_right_floating.urdf,sha256=dGjKaVyeDX4z_XVfgAaKganBE53ciPFSSVIOPARsCrA,21106
87
91
  mani_skill/assets/robots/ability_hand/ability_hand_right_glb.urdf,sha256=v3yym1piKmSNeIfA9dYAdMoAo8ncE1UCqKzjiODXOxI,18377
88
92
  mani_skill/assets/robots/ability_hand/meshes/collision/FB_palm_ref.obj,sha256=cclKOfw8Kqx2eiY_YhKHv02iHxPNHNFMqoPWVsC8dfU,31522
89
93
  mani_skill/assets/robots/ability_hand/meshes/collision/FB_palm_ref_MIR.obj,sha256=OvZORUoZc75Jguj1OK5XLuJWO-3qdspXDGW3gHgatDM,32872
@@ -560,7 +564,8 @@ mani_skill/envs/tasks/control/assets/hopper.xml,sha256=KezBRNw-bdYsOmXaph0P6Y1uU
560
564
  mani_skill/envs/tasks/control/assets/common/materials.xml,sha256=XxJ5NRzW5_g_pKLnM1WP-yl60Z-b1OdXva7G59Nqd3w,1108
561
565
  mani_skill/envs/tasks/control/assets/common/skybox.xml,sha256=UvNofKMDKbQHewom_qrZtz0gHhg8veD_vWzVLeUZB70,202
562
566
  mani_skill/envs/tasks/control/assets/common/visual.xml,sha256=7B0nSrcNlWBBM9r6ksDRGU3lG7qZe9U7cLxn65XYI_w,175
563
- mani_skill/envs/tasks/dexterity/__init__.py,sha256=wXXGA6ZdY5HExcnf3Dux4sMFUgmWjyIKTjWF7PCYYIU,121
567
+ mani_skill/envs/tasks/dexterity/__init__.py,sha256=BUM4SkQ1zXWzP0dXQTsP5vIoTfLENzh0oIfvlIQrhf4,163
568
+ mani_skill/envs/tasks/dexterity/insert_flower.py,sha256=RCPtsIjtxRzjnQCf9BxyJ8S7dB7isT1xLssxu6UB0U4,6893
564
569
  mani_skill/envs/tasks/dexterity/rotate_single_object_in_hand.py,sha256=cKnlWB5EcTjNVi53wSkqxVgVUYhuRm5QJcPmg9sP-iI,14165
565
570
  mani_skill/envs/tasks/dexterity/rotate_valve.py,sha256=SwSaOUXPZJEC5UdrcLGYwat2m3YUQFpHQ_WHCD77890,11659
566
571
  mani_skill/envs/tasks/digital_twins/__init__.py,sha256=_yWWBFH-P69UTPvo9GRa8Ux_FXOjLWdkiK3rs2ojX3o,60
@@ -713,7 +718,7 @@ mani_skill/sensors/depth_camera.py,sha256=KCT7DMqQacVag_24MjkKAml87T6FtDqNS0TJFf
713
718
  mani_skill/trajectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
714
719
  mani_skill/trajectory/dataset.py,sha256=nrG3jkhdzRqAdjxC_c8Z4FxpkvW3A9XPvUp9-Ux_u38,6351
715
720
  mani_skill/trajectory/merge_trajectory.py,sha256=zsjRMTsiIirZGIV4KrtYOM2-zoOAzd7ObZEdWGJzZbE,3685
716
- mani_skill/trajectory/replay_trajectory.py,sha256=ABiM4pMSkTAhU1L2fdaY-Mwnw2Hzg8p1rAaWf3ijWOE,27681
721
+ mani_skill/trajectory/replay_trajectory.py,sha256=uqfhiAXrE8EdXRJR03VNXe4EoGXyTpYR_WdKNrYPksE,27801
717
722
  mani_skill/trajectory/utils/__init__.py,sha256=-Efv2GEzTnFHd3SxqQtaZLaMRGrCc-P1ClmgLhoV4gs,1465
718
723
  mani_skill/trajectory/utils/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
719
724
  mani_skill/trajectory/utils/actions/conversion.py,sha256=x88C64ke44gB-HEbqq_gSRFv34L7irSwT_wYttkQUn8,12922
@@ -730,7 +735,7 @@ mani_skill/utils/sapien_utils.py,sha256=bUo4jFvWbxjJxXY-cjcR7Q2ZAhAcArlPaElYI2f3
730
735
  mani_skill/utils/tree.py,sha256=-nXdhwa9zI0dnaOoKmshcPXuM_S-9ZhhoU1GWb1h_p8,877
731
736
  mani_skill/utils/assets/README.md,sha256=5kkmsIiV64ySEGO34HaAlpjXTyrGs1KTV5WnofK46G0,70
732
737
  mani_skill/utils/assets/__init__.py,sha256=gQVKwAczcImTXArSltBWKlSUUuguO12sZYO3Jh5KLso,159
733
- mani_skill/utils/assets/data.py,sha256=xEuibRoEPBDN_vEU-MM5UWf6VDb1omE6BfZKPvlMPdI,8807
738
+ mani_skill/utils/assets/data.py,sha256=WAi6QzhDJZptJ4GCziuwLNYVDzZdxda1vsc71OFw0Sc,9028
734
739
  mani_skill/utils/building/__init__.py,sha256=quCI5WYGhzGLMVg_NDyYv2G_MxRTBL8R6XD4a6iY8qc,218
735
740
  mani_skill/utils/building/_mjcf_loader.py,sha256=SqzSoRootFvItHrzwrDuSHScePxbaPqWb7262M7HzIU,37011
736
741
  mani_skill/utils/building/actor_builder.py,sha256=WHaJKmN9FsIOPjUYnRrOsaD5jhWN_EnncOPOxE1lz-c,15006
@@ -831,15 +836,15 @@ mani_skill/utils/wrappers/cached_reset.py,sha256=osTMFsYlpCzY-OJWjWFSAfUZrZpR8Hs
831
836
  mani_skill/utils/wrappers/flatten.py,sha256=GuHJ3fCOdj9G_jm--XgG8k0p2G1eJx4LY1tesQQjnkg,4913
832
837
  mani_skill/utils/wrappers/frame_stack.py,sha256=pCp83HqXnFxbsKRYgwXreNBHnhD-yF0R2_7jdtGOTWQ,4213
833
838
  mani_skill/utils/wrappers/gymnasium.py,sha256=p0kl29kkedD2arIvGskClKhYDBAH97mZO4rTepz62jQ,4174
834
- mani_skill/utils/wrappers/record.py,sha256=73g-dvnFrXcFy7t2BksgH8UqBUVBZG8BbCNPEbdtBSc,37362
839
+ mani_skill/utils/wrappers/record.py,sha256=yKeOaPZu9ytd18WH2fv34ckZhnsYhTCz8wS58Bqs-0o,37412
835
840
  mani_skill/utils/wrappers/visual_encoders.py,sha256=ISLO5ceaRkINhvce92VuZMDMCU3I4F7cQWFW2aVP-14,2205
836
841
  mani_skill/vector/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
837
842
  mani_skill/vector/wrappers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
838
843
  mani_skill/vector/wrappers/gymnasium.py,sha256=99bHbJ0PlPwcVU4hShsTDj0_2d4bCrrzfJNzYOrOsqc,7315
839
844
  mani_skill/vector/wrappers/sb3.py,sha256=SlXdiEPqcNHYMhJCzA29kBU6zK7DKTe1nc0L6Z3QQtY,4722
840
- mani_skill_nightly-2025.10.21.2002.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
841
- mani_skill_nightly-2025.10.21.2002.dist-info/licenses/LICENSE-3RD-PARTY,sha256=yXvoAIyaRY7VDFhXV4Ea_Dmd_-IR59yyUy0Scz7eXzY,1604
842
- mani_skill_nightly-2025.10.21.2002.dist-info/METADATA,sha256=ldPAw9HsS0dF0SM2blhG_wPXEmKp1ZJL3ypT70Vx61w,9543
843
- mani_skill_nightly-2025.10.21.2002.dist-info/WHEEL,sha256=SmOxYU7pzNKBqASvQJ7DjX3XGUF92lrGhMb3R6_iiqI,91
844
- mani_skill_nightly-2025.10.21.2002.dist-info/top_level.txt,sha256=bkBgOVl_MZMoQx2aRFsSFEYlZLxjWlip5vtJ39FB3jA,11
845
- mani_skill_nightly-2025.10.21.2002.dist-info/RECORD,,
845
+ mani_skill_nightly-2025.10.21.2011.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
846
+ mani_skill_nightly-2025.10.21.2011.dist-info/licenses/LICENSE-3RD-PARTY,sha256=yXvoAIyaRY7VDFhXV4Ea_Dmd_-IR59yyUy0Scz7eXzY,1604
847
+ mani_skill_nightly-2025.10.21.2011.dist-info/METADATA,sha256=HEQSYEdIRMtqb5GbLv_oY4IG30d2GGR6UUORU1IOqD0,9543
848
+ mani_skill_nightly-2025.10.21.2011.dist-info/WHEEL,sha256=SmOxYU7pzNKBqASvQJ7DjX3XGUF92lrGhMb3R6_iiqI,91
849
+ mani_skill_nightly-2025.10.21.2011.dist-info/top_level.txt,sha256=bkBgOVl_MZMoQx2aRFsSFEYlZLxjWlip5vtJ39FB3jA,11
850
+ mani_skill_nightly-2025.10.21.2011.dist-info/RECORD,,