multi-agent-rlenv 3.7.1__py3-none-any.whl → 3.7.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,25 +1,28 @@
1
- from importlib.util import find_spec
2
1
  from .pymarl_adapter import PymarlAdapter
3
- from marlenv.utils import dummy_type, dummy_function
2
+ from marlenv.utils import dummy_function
4
3
 
5
- HAS_GYM = find_spec("gymnasium") is not None
6
- if HAS_GYM:
4
+ try:
7
5
  from .gym_adapter import Gym, make
8
- else:
9
- Gym = dummy_type("gymnasium")
6
+
7
+ HAS_GYM = True
8
+ except ImportError:
9
+ HAS_GYM = False
10
10
  make = dummy_function("gymnasium")
11
11
 
12
- HAS_PETTINGZOO = find_spec("pettingzoo") is not None
13
- if HAS_PETTINGZOO:
12
+ try:
14
13
  from .pettingzoo_adapter import PettingZoo
15
- else:
16
- PettingZoo = dummy_type("pettingzoo")
17
14
 
18
- HAS_SMAC = find_spec("smac") is not None
19
- if HAS_SMAC:
15
+ HAS_PETTINGZOO = True
16
+ except ImportError:
17
+ HAS_PETTINGZOO = False
18
+
19
+ try:
20
20
  from .smac_adapter import SMAC
21
- else:
22
- SMAC = dummy_type("smac", "https://github.com/oxwhirl/smac.git")
21
+
22
+ HAS_SMAC = True
23
+ except ImportError:
24
+ HAS_SMAC = False
25
+
23
26
 
24
27
  __all__ = [
25
28
  "PymarlAdapter",
@@ -3,7 +3,7 @@ from typing import Sequence
3
3
 
4
4
  import numpy as np
5
5
  import numpy.typing as npt
6
- from gymnasium import spaces # pettingzoo uses gymnasium spaces
6
+ from gymnasium import spaces
7
7
  from pettingzoo import ParallelEnv
8
8
 
9
9
  from marlenv.models import MARLEnv, Observation, State, Step, DiscreteSpace, ContinuousSpace, Space
@@ -181,20 +181,7 @@ class SMAC(MARLEnv[MultiDiscreteSpace]):
181
181
 
182
182
  def step(self, action):
183
183
  reward, done, info = self._env.step(action)
184
- obs = Observation(
185
- self._env.get_obs(), # type: ignore
186
- self.available_actions(),
187
- )
188
- state = self.get_state()
189
- step = Step(
190
- obs,
191
- state,
192
- reward,
193
- done,
194
- False,
195
- info,
196
- )
197
- return step
184
+ return Step(self.get_observation(), self.get_state(), reward, done, False, info)
198
185
 
199
186
  def available_actions(self) -> npt.NDArray[np.bool]:
200
187
  return np.array(self._env.get_avail_actions()) == 1
@@ -1,10 +1,15 @@
1
- from marlenv.adapters import SMAC
2
1
  from .deepsea import DeepSea
3
2
  from .matrix_game import MatrixGame
4
3
  from .coordinated_grid import CoordinatedGrid
5
4
 
6
5
 
7
- __all__ = ["SMAC", "DeepSea", "lle", "overcooked", "MatrixGame", "connect_n", "CoordinatedGrid"]
6
+ __all__ = ["smac", "DeepSea", "lle", "overcooked", "MatrixGame", "connect_n", "CoordinatedGrid"]
7
+
8
+
9
+ def smac():
10
+ from marlenv.adapters import SMAC
11
+
12
+ return SMAC
8
13
 
9
14
 
10
15
  def lle():
@@ -92,7 +92,7 @@ class Observation:
92
92
  """
93
93
  Convert the observation to a tuple of tensors of shape (1, n_agents, <dim>).
94
94
  """
95
- import torch
95
+ import torch # pyright: ignore[reportMissingImports]
96
96
 
97
97
  data = torch.from_numpy(self.data).unsqueeze(0).to(device, non_blocking=True)
98
98
  extras = torch.from_numpy(self.extras).unsqueeze(0).to(device, non_blocking=True)
marlenv/models/state.py CHANGED
@@ -55,7 +55,7 @@ class State(Generic[StateType]):
55
55
 
56
56
  def as_tensors(self, device=None):
57
57
  """Convert the state to a tuple of tensors of shape (1, <dim>)."""
58
- import torch
58
+ import torch # pyright: ignore[reportMissingImports]
59
59
 
60
60
  data = torch.from_numpy(self.data).unsqueeze(0).to(device, non_blocking=True)
61
61
  extras = torch.from_numpy(self.extras).unsqueeze(0).to(device, non_blocking=True)
@@ -3,6 +3,8 @@ from types import SimpleNamespace
3
3
 
4
4
 
5
5
  def _raise_error(module_name: str, package_name: Optional[str] = None):
6
+ if package_name is None:
7
+ package_name = module_name
6
8
  raise ImportError(
7
9
  f"The optional dependency `{module_name}` is not installed.\nInstall the `{package_name}` package (e.g. pip install {package_name})."
8
10
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: multi-agent-rlenv
3
- Version: 3.7.1
3
+ Version: 3.7.2
4
4
  Summary: A strongly typed Multi-Agent Reinforcement Learning framework
5
5
  Project-URL: repository, https://github.com/yamoling/multi-agent-rlenv
6
6
  Author-email: Yannick Molinghen <yannick.molinghen@ulb.be>
@@ -4,12 +4,12 @@ marlenv/env_pool.py,sha256=mJhJUROX9k2A2njwnUOBl2EAuhotksQMugH_Zydg1IU,951
4
4
  marlenv/exceptions.py,sha256=gJUC_2rVAvOfK_ypVFc7Myh-pIfSU3To38VBVS_0rZA,1179
5
5
  marlenv/mock_env.py,sha256=rvl4QAn046HM79IMMiAj1Aoy3_GBSNBBR1_9fHPutR8,4682
6
6
  marlenv/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- marlenv/adapters/__init__.py,sha256=JsKWaeXvUjWEg3JC9TOtyjtZlTI9AAkLyab-jDa5yzM,783
7
+ marlenv/adapters/__init__.py,sha256=G-PwyGDymdAMFWtCpnlHkHQuSE40Q8bv_-yQ7gVcbbM,600
8
8
  marlenv/adapters/gym_adapter.py,sha256=DXQ1czcvRoL9hTwcVzfMyXArZeVIHP1gAKqZJO87y7Y,3065
9
- marlenv/adapters/pettingzoo_adapter.py,sha256=A3dcwsQa7jlWc14TybXpBknl0FPK5YK9w-6tzMBHlDI,2913
9
+ marlenv/adapters/pettingzoo_adapter.py,sha256=UzSUdP4EUJOt49AB7H45ToA8rUkGmPQgrJKegvK86og,2877
10
10
  marlenv/adapters/pymarl_adapter.py,sha256=2s7EY31s1hrml3q-BBaXo_eDMXTjkebozZPvzsgrb9c,3353
11
- marlenv/adapters/smac_adapter.py,sha256=nGWNRpn1F6ZFIoTcU0IJGApc_1GHaoBOVsoNljJ-PAg,8509
12
- marlenv/catalog/__init__.py,sha256=UCJGbmVzNtKvO3fZQWxR_EigGpXhAyIMevyXxghB2F8,535
11
+ marlenv/adapters/smac_adapter.py,sha256=OIR0_do9KavLlZ2f1YQNJwhl_yLCa6SVvCrp78hwU20,8279
12
+ marlenv/catalog/__init__.py,sha256=l9_lvqpV2wKKMYDrStbW93WGEBDhGw6KjgbZsOcLKx0,570
13
13
  marlenv/catalog/coordinated_grid.py,sha256=Kq5UzG9rr5gYRO0QWFCmKmO56JIzgIR19an9_pvypJU,4997
14
14
  marlenv/catalog/deepsea.py,sha256=yTyvskWZiAZem11L8cZwHedBIDQ4EAxE2IaUKrjKL2U,2413
15
15
  marlenv/catalog/matrix_game.py,sha256=zkErnh6ZIa1kBryYMVLw-jeMCd2AJ-BlP2yROxpbb0w,1519
@@ -20,14 +20,14 @@ marlenv/catalog/connectn/env.py,sha256=Ot5vfAbzS6eRe3-nLW_AkhEH7F1WVvv4_odoxZU7H
20
20
  marlenv/models/__init__.py,sha256=uihmRs71Gg5z7Bvau_xtaQVg7xEtX8sTzi74bIHL5P0,443
21
21
  marlenv/models/env.py,sha256=BG1iVHxGD_p827mF0ewyOBn6wU2gtFsHLW1b4UtW-V0,7841
22
22
  marlenv/models/episode.py,sha256=zsyxsW4LIioPKyY4DZKn64A31e5ZvlwOf3HIGuRUzhs,13531
23
- marlenv/models/observation.py,sha256=RhvKvmys4bu3UwwVsvu7fJ7TMKt2QkKnBD1e0hw2r7s,3528
23
+ marlenv/models/observation.py,sha256=6uY2h0zHBm6g1ECzD8jZLXuSzuuX-U60QW0E_b4qPuc,3569
24
24
  marlenv/models/spaces.py,sha256=d_aIPWwPdaOWZeNRUUdzSiDxs9XQb9itPnrE_EyhhfQ,7810
25
- marlenv/models/state.py,sha256=LbP--JxBzRwMFpEAaZyxCX13xKQ27xPE2fabohaq9YI,2058
25
+ marlenv/models/state.py,sha256=JvCXwf0l7L2UMHkvYp-WM_aDegJ-hePpQI2yiUw6X_g,2099
26
26
  marlenv/models/step.py,sha256=00PhD_ccdCIYAY1SVJdJU91weU0Y_tNIJwK16TN_53I,3056
27
27
  marlenv/models/transition.py,sha256=UkJVRNxZoyRkjE7YmKtUf_4xA7cOEh20O60dTldbvys,5070
28
28
  marlenv/utils/__init__.py,sha256=ky5mz_T7EF65YNaEN1UDCUYZVlz7hFyKResgIJlE_1Q,462
29
29
  marlenv/utils/cached_property_collector.py,sha256=IOjbr61f0DqLhcidXKrl7MhN1BOEGiTzCANIKQCxaF0,600
30
- marlenv/utils/import_placeholders.py,sha256=QN7gsfbFgSP2Lh-7YBC1RH-SNjbFacvRFmBgNs4Eb90,972
30
+ marlenv/utils/import_placeholders.py,sha256=nNcOGHSsBVmcUXRALJD7YBg5WzX6vTQGTfsl8LnMQgA,1036
31
31
  marlenv/utils/schedule.py,sha256=4S0V0RyYHuReVafeHnpfvSLf3oF0buAzD09qMFfexa0,9133
32
32
  marlenv/wrappers/__init__.py,sha256=Z4_M-mxRNKQeu52tkmQ4B2m3-zrsmjfXXL5NsWQ4vu4,952
33
33
  marlenv/wrappers/action_randomizer.py,sha256=A1kejqGOTA0sc_RQL0EOd6sMSbcIdiV5zlscjKUlzdY,474
@@ -45,7 +45,7 @@ marlenv/wrappers/rlenv_wrapper.py,sha256=iFSQsDMkUUbQJKEO8l6SosNi-eOUVSh4pIJVu7a
45
45
  marlenv/wrappers/state_counter.py,sha256=QmEMb55vOnK-VJuvKsDIIBgcNRsHuovqgpK2pcCY7sA,1211
46
46
  marlenv/wrappers/time_limit.py,sha256=HctKeiepPQ2NAIa208SnvknioSkRIuUQ4X-Xhf_XTs0,3974
47
47
  marlenv/wrappers/video_recorder.py,sha256=mtWcqaYNCu-zjVXvpa8DJe3_062tpK_TChOu-Xyxs3s,2533
48
- multi_agent_rlenv-3.7.1.dist-info/METADATA,sha256=1yiAoMwqkzgpKby8KV09M-vVLRgJS5ZKZMqZCfHn80A,5751
49
- multi_agent_rlenv-3.7.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
50
- multi_agent_rlenv-3.7.1.dist-info/licenses/LICENSE,sha256=_eeiGVoIJ7kYt6l1zbIvSBQppTnw0mjnYk1lQ4FxEjE,1074
51
- multi_agent_rlenv-3.7.1.dist-info/RECORD,,
48
+ multi_agent_rlenv-3.7.2.dist-info/METADATA,sha256=loGAqI0-dvtGJEM6txWjaP00TImesivli2RI0Pd2OK0,5751
49
+ multi_agent_rlenv-3.7.2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
50
+ multi_agent_rlenv-3.7.2.dist-info/licenses/LICENSE,sha256=_eeiGVoIJ7kYt6l1zbIvSBQppTnw0mjnYk1lQ4FxEjE,1074
51
+ multi_agent_rlenv-3.7.2.dist-info/RECORD,,