continual-foragax 0.33.2__tar.gz → 0.34.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (143) hide show
  1. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/PKG-INFO +1 -1
  2. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/pyproject.toml +2 -2
  3. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/continual_foragax.egg-info/PKG-INFO +1 -1
  4. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/env.py +286 -31
  5. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/tests/test_foragax.py +2 -2
  6. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/README.md +0 -0
  7. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/setup.cfg +0 -0
  8. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/continual_foragax.egg-info/SOURCES.txt +0 -0
  9. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/continual_foragax.egg-info/dependency_links.txt +0 -0
  10. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/continual_foragax.egg-info/entry_points.txt +0 -0
  11. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/continual_foragax.egg-info/requires.txt +0 -0
  12. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/continual_foragax.egg-info/top_level.txt +0 -0
  13. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/__init__.py +0 -0
  14. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/colors.py +0 -0
  15. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID100897.txt +0 -0
  16. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID100928.txt +0 -0
  17. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID100929.txt +0 -0
  18. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID100930.txt +0 -0
  19. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID100931.txt +0 -0
  20. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106714.txt +0 -0
  21. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106715.txt +0 -0
  22. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106716.txt +0 -0
  23. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106717.txt +0 -0
  24. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106718.txt +0 -0
  25. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106930.txt +0 -0
  26. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106931.txt +0 -0
  27. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106932.txt +0 -0
  28. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106933.txt +0 -0
  29. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106934.txt +0 -0
  30. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106935.txt +0 -0
  31. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106936.txt +0 -0
  32. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106937.txt +0 -0
  33. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106938.txt +0 -0
  34. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106939.txt +0 -0
  35. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106940.txt +0 -0
  36. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106941.txt +0 -0
  37. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106942.txt +0 -0
  38. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106943.txt +0 -0
  39. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106994.txt +0 -0
  40. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106995.txt +0 -0
  41. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106996.txt +0 -0
  42. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106997.txt +0 -0
  43. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106998.txt +0 -0
  44. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID106999.txt +0 -0
  45. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107000.txt +0 -0
  46. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107001.txt +0 -0
  47. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107002.txt +0 -0
  48. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107003.txt +0 -0
  49. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107004.txt +0 -0
  50. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107005.txt +0 -0
  51. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107006.txt +0 -0
  52. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107007.txt +0 -0
  53. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107008.txt +0 -0
  54. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107009.txt +0 -0
  55. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107010.txt +0 -0
  56. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107011.txt +0 -0
  57. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107012.txt +0 -0
  58. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107013.txt +0 -0
  59. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107014.txt +0 -0
  60. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107015.txt +0 -0
  61. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107016.txt +0 -0
  62. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107017.txt +0 -0
  63. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107018.txt +0 -0
  64. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107019.txt +0 -0
  65. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107020.txt +0 -0
  66. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107021.txt +0 -0
  67. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107022.txt +0 -0
  68. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107023.txt +0 -0
  69. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107024.txt +0 -0
  70. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107025.txt +0 -0
  71. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107026.txt +0 -0
  72. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107027.txt +0 -0
  73. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107028.txt +0 -0
  74. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107029.txt +0 -0
  75. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107030.txt +0 -0
  76. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107031.txt +0 -0
  77. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107032.txt +0 -0
  78. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107033.txt +0 -0
  79. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107034.txt +0 -0
  80. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107035.txt +0 -0
  81. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107036.txt +0 -0
  82. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107037.txt +0 -0
  83. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107038.txt +0 -0
  84. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107039.txt +0 -0
  85. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107040.txt +0 -0
  86. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107041.txt +0 -0
  87. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107042.txt +0 -0
  88. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107043.txt +0 -0
  89. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107044.txt +0 -0
  90. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107045.txt +0 -0
  91. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107046.txt +0 -0
  92. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107047.txt +0 -0
  93. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107048.txt +0 -0
  94. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107049.txt +0 -0
  95. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107050.txt +0 -0
  96. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107051.txt +0 -0
  97. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107052.txt +0 -0
  98. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107053.txt +0 -0
  99. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107054.txt +0 -0
  100. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107055.txt +0 -0
  101. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107056.txt +0 -0
  102. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107057.txt +0 -0
  103. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107058.txt +0 -0
  104. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107059.txt +0 -0
  105. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107060.txt +0 -0
  106. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107061.txt +0 -0
  107. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107062.txt +0 -0
  108. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107063.txt +0 -0
  109. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107064.txt +0 -0
  110. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107065.txt +0 -0
  111. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107066.txt +0 -0
  112. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107067.txt +0 -0
  113. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107068.txt +0 -0
  114. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107069.txt +0 -0
  115. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107070.txt +0 -0
  116. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID107071.txt +0 -0
  117. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID115808.txt +0 -0
  118. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID115812.txt +0 -0
  119. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID146811.txt +0 -0
  120. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID156831.txt +0 -0
  121. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID156835.txt +0 -0
  122. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID156839.txt +0 -0
  123. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID156843.txt +0 -0
  124. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID156847.txt +0 -0
  125. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID156851.txt +0 -0
  126. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID156855.txt +0 -0
  127. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID156859.txt +0 -0
  128. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID156863.txt +0 -0
  129. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID156867.txt +0 -0
  130. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID156871.txt +0 -0
  131. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID156875.txt +0 -0
  132. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID156879.txt +0 -0
  133. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID156883.txt +0 -0
  134. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/TG_SOUID156887.txt +0 -0
  135. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/elements.txt +0 -0
  136. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/metadata.txt +0 -0
  137. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/data/ECA_non-blended_custom/sources.txt +0 -0
  138. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/objects.py +0 -0
  139. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/registry.py +0 -0
  140. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/rendering.py +0 -0
  141. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/src/foragax/weather.py +0 -0
  142. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/tests/test_benchmark.py +0 -0
  143. {continual_foragax-0.33.2 → continual_foragax-0.34.0}/tests/test_registry.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: continual-foragax
3
- Version: 0.33.2
3
+ Version: 0.34.0
4
4
  Summary: A continual reinforcement learning benchmark
5
5
  Author-email: Steven Tang <stang5@ualberta.ca>
6
6
  Requires-Python: >=3.8
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "continual-foragax"
3
- version = "0.33.2"
3
+ version = "0.34.0"
4
4
  description = "A continual reinforcement learning benchmark"
5
5
  readme = "README.md"
6
6
  authors = [
@@ -30,7 +30,7 @@ build-backend = "setuptools.build_meta"
30
30
  [tool]
31
31
  [tool.commitizen]
32
32
  name = "cz_conventional_commits"
33
- version = "0.33.2"
33
+ version = "0.34.0"
34
34
  tag_format = "$version"
35
35
  version_files = ["pyproject.toml"]
36
36
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: continual-foragax
3
- Version: 0.33.2
3
+ Version: 0.34.0
4
4
  Summary: A continual reinforcement learning benchmark
5
5
  Author-email: Steven Tang <stang5@ualberta.ca>
6
6
  Requires-Python: >=3.8
@@ -1235,12 +1235,91 @@ class ForagaxEnv(environment.Environment):
1235
1235
 
1236
1236
  return spaces.Box(0, 1, obs_shape, float)
1237
1237
 
1238
+ def _compute_reward_grid(self, state: EnvState) -> jax.Array:
1239
+ """Compute rewards for all grid positions.
1240
+
1241
+ Returns:
1242
+ Array of shape (H, W) with reward values for each cell
1243
+ """
1244
+ fixed_key = jax.random.key(0) # Fixed key for deterministic reward computation
1245
+
1246
+ def compute_reward(obj_id, params):
1247
+ return jax.lax.cond(
1248
+ obj_id > 0,
1249
+ lambda: jax.lax.switch(
1250
+ obj_id, self.reward_fns, state.time, fixed_key, params
1251
+ ),
1252
+ lambda: 0.0,
1253
+ )
1254
+
1255
+ reward_grid = jax.vmap(jax.vmap(compute_reward))(
1256
+ state.object_state.object_id, state.object_state.state_params
1257
+ )
1258
+ return reward_grid
1259
+
1260
+ def _reward_to_color(self, reward: jax.Array) -> jax.Array:
1261
+ """Convert reward value to RGB color using diverging gradient.
1262
+
1263
+ Args:
1264
+ reward: Reward value (typically -1 to +1)
1265
+
1266
+ Returns:
1267
+ RGB color array with shape (..., 3) and dtype uint8
1268
+ """
1269
+ # Diverging gradient: +1 = green (0, 255, 0), 0 = white (255, 255, 255), -1 = magenta (255, 0, 255)
1270
+ # Clamp reward to [-1, 1] range for color mapping
1271
+ reward_clamped = jnp.clip(reward, -1.0, 1.0)
1272
+
1273
+ # For positive rewards: interpolate from white to green
1274
+ # For negative rewards: interpolate from white to magenta
1275
+ # At reward = 0: white (255, 255, 255)
1276
+ # At reward = +1: green (0, 255, 0)
1277
+ # At reward = -1: magenta (255, 0, 255)
1278
+
1279
+ red_component = jnp.where(
1280
+ reward_clamped >= 0,
1281
+ (1 - reward_clamped) * 255, # Fade from white to green: 255 -> 0
1282
+ 255, # Stay at 255 for all negative rewards
1283
+ )
1284
+
1285
+ green_component = jnp.where(
1286
+ reward_clamped >= 0,
1287
+ 255, # Stay at 255 for all positive rewards
1288
+ (1 + reward_clamped) * 255, # Fade from white to magenta: 255 -> 0
1289
+ )
1290
+
1291
+ blue_component = jnp.where(
1292
+ reward_clamped >= 0,
1293
+ (1 - reward_clamped) * 255, # Fade from white to green: 255 -> 0
1294
+ 255, # Stay at 255 for all negative rewards
1295
+ )
1296
+
1297
+ return jnp.stack(
1298
+ [red_component, green_component, blue_component], axis=-1
1299
+ ).astype(jnp.uint8)
1300
+
1238
1301
  @partial(jax.jit, static_argnames=("self", "render_mode"))
1239
- def render(self, state: EnvState, params: EnvParams, render_mode: str = "world"):
1240
- """Render the environment state."""
1241
- is_world_mode = render_mode in ("world", "world_true")
1242
- is_aperture_mode = render_mode in ("aperture", "aperture_true")
1302
+ def render(
1303
+ self,
1304
+ state: EnvState,
1305
+ params: EnvParams,
1306
+ render_mode: str = "world",
1307
+ ):
1308
+ """Render the environment state.
1309
+
1310
+ Args:
1311
+ state: Current environment state
1312
+ params: Environment parameters
1313
+ render_mode: One of "world", "world_true", "world_reward", "aperture", "aperture_true", "aperture_reward"
1314
+ """
1315
+ is_world_mode = render_mode in ("world", "world_true", "world_reward")
1316
+ is_aperture_mode = render_mode in (
1317
+ "aperture",
1318
+ "aperture_true",
1319
+ "aperture_reward",
1320
+ )
1243
1321
  is_true_mode = render_mode in ("world_true", "aperture_true")
1322
+ is_reward_mode = render_mode in ("world_reward", "aperture_reward")
1244
1323
 
1245
1324
  if is_world_mode:
1246
1325
  # Create an RGB image from the object grid
@@ -1265,6 +1344,29 @@ class ForagaxEnv(environment.Environment):
1265
1344
 
1266
1345
  img = jax.lax.fori_loop(0, len(self.object_ids), update_image, img)
1267
1346
 
1347
+ if is_reward_mode:
1348
+ # Scale image by 3 to create space for reward visualization
1349
+ img = jax.image.resize(
1350
+ img,
1351
+ (self.size[1] * 3, self.size[0] * 3, 3),
1352
+ jax.image.ResizeMethod.NEAREST,
1353
+ )
1354
+
1355
+ # Compute rewards for all cells
1356
+ reward_grid = self._compute_reward_grid(state)
1357
+
1358
+ # Convert rewards to colors
1359
+ reward_colors = self._reward_to_color(reward_grid)
1360
+
1361
+ # Resize reward colors to match 3x scale and place in middle cells
1362
+ # We need to place reward colors at positions (i*3+1, j*3+1) for each (i,j)
1363
+ # Create index arrays for middle cells
1364
+ i_indices = jnp.arange(self.size[1])[:, None] * 3 + 1
1365
+ j_indices = jnp.arange(self.size[0])[None, :] * 3 + 1
1366
+
1367
+ # Broadcast and set middle cells
1368
+ img = img.at[i_indices, j_indices].set(reward_colors)
1369
+
1268
1370
  # Tint the agent's aperture
1269
1371
  y_coords, x_coords, y_coords_adj, x_coords_adj = (
1270
1372
  self._compute_aperture_coordinates(state.pos)
@@ -1273,27 +1375,127 @@ class ForagaxEnv(environment.Environment):
1273
1375
  alpha = 0.2
1274
1376
  agent_color = jnp.array(AGENT.color)
1275
1377
 
1276
- if self.nowrap:
1277
- # Create tint mask: any in-bounds original position maps to a cell makes it tinted
1278
- tint_mask = jnp.zeros((self.size[1], self.size[0]), dtype=int)
1279
- tint_mask = tint_mask.at[y_coords_adj, x_coords_adj].set(1)
1280
- # Apply tint to masked positions
1281
- original_colors = img
1282
- tinted_colors = (1 - alpha) * original_colors + alpha * agent_color
1283
- img = jnp.where(tint_mask[..., None], tinted_colors, img)
1378
+ if is_reward_mode:
1379
+ # For reward mode, we need to adjust coordinates for 3x scaled image
1380
+ if self.nowrap:
1381
+ # Create tint mask for 3x scaled image
1382
+ tint_mask = jnp.zeros(
1383
+ (self.size[1] * 3, self.size[0] * 3), dtype=bool
1384
+ )
1385
+
1386
+ # For each aperture cell, tint all 9 cells in its 3x3 block
1387
+ # Create meshgrid to get all aperture cell coordinates
1388
+ y_grid, x_grid = jnp.meshgrid(
1389
+ y_coords_adj.flatten(), x_coords_adj.flatten(), indexing="ij"
1390
+ )
1391
+ y_flat = y_grid.flatten()
1392
+ x_flat = x_grid.flatten()
1393
+
1394
+ # Create offset arrays for 3x3 blocks
1395
+ offsets = jnp.array(
1396
+ [[dy, dx] for dy in range(3) for dx in range(3)]
1397
+ )
1398
+
1399
+ # For each aperture cell, expand to 9 cells
1400
+ # We need to repeat each cell coordinate 9 times, then add offsets
1401
+ num_aperture_cells = y_flat.size
1402
+ y_base = jnp.repeat(
1403
+ y_flat * 3, 9
1404
+ ) # Repeat each y coord 9 times and scale by 3
1405
+ x_base = jnp.repeat(
1406
+ x_flat * 3, 9
1407
+ ) # Repeat each x coord 9 times and scale by 3
1408
+ y_offsets = jnp.tile(
1409
+ offsets[:, 0], num_aperture_cells
1410
+ ) # Tile all 9 offsets
1411
+ x_offsets = jnp.tile(
1412
+ offsets[:, 1], num_aperture_cells
1413
+ ) # Tile all 9 offsets
1414
+ y_expanded = y_base + y_offsets
1415
+ x_expanded = x_base + x_offsets
1416
+
1417
+ tint_mask = tint_mask.at[y_expanded, x_expanded].set(True)
1418
+
1419
+ original_colors = img
1420
+ tinted_colors = (1 - alpha) * original_colors + alpha * agent_color
1421
+ img = jnp.where(tint_mask[..., None], tinted_colors, img)
1422
+ else:
1423
+ # Tint all 9 cells in each 3x3 block for aperture cells
1424
+ # Create meshgrid to get all aperture cell coordinates
1425
+ y_grid, x_grid = jnp.meshgrid(
1426
+ y_coords_adj.flatten(), x_coords_adj.flatten(), indexing="ij"
1427
+ )
1428
+ y_flat = y_grid.flatten()
1429
+ x_flat = x_grid.flatten()
1430
+
1431
+ # Create offset arrays for 3x3 blocks
1432
+ offsets = jnp.array(
1433
+ [[dy, dx] for dy in range(3) for dx in range(3)]
1434
+ )
1435
+
1436
+ # For each aperture cell, expand to 9 cells
1437
+ # We need to repeat each cell coordinate 9 times, then add offsets
1438
+ num_aperture_cells = y_flat.size
1439
+ y_base = jnp.repeat(
1440
+ y_flat * 3, 9
1441
+ ) # Repeat each y coord 9 times and scale by 3
1442
+ x_base = jnp.repeat(
1443
+ x_flat * 3, 9
1444
+ ) # Repeat each x coord 9 times and scale by 3
1445
+ y_offsets = jnp.tile(
1446
+ offsets[:, 0], num_aperture_cells
1447
+ ) # Tile all 9 offsets
1448
+ x_offsets = jnp.tile(
1449
+ offsets[:, 1], num_aperture_cells
1450
+ ) # Tile all 9 offsets
1451
+ y_expanded = y_base + y_offsets
1452
+ x_expanded = x_base + x_offsets
1453
+
1454
+ # Get original colors and tint them
1455
+ original_colors = img[y_expanded, x_expanded]
1456
+ tinted_colors = (1 - alpha) * original_colors + alpha * agent_color
1457
+ img = img.at[y_expanded, x_expanded].set(tinted_colors)
1458
+
1459
+ # Agent color - set all 9 cells of the agent's 3x3 block
1460
+ agent_y, agent_x = state.pos[1], state.pos[0]
1461
+ agent_offsets = jnp.array(
1462
+ [[dy, dx] for dy in range(3) for dx in range(3)]
1463
+ )
1464
+ agent_y_cells = agent_y * 3 + agent_offsets[:, 0]
1465
+ agent_x_cells = agent_x * 3 + agent_offsets[:, 1]
1466
+ img = img.at[agent_y_cells, agent_x_cells].set(
1467
+ jnp.array(AGENT.color, dtype=jnp.uint8)
1468
+ )
1469
+
1470
+ # Scale by 8 to final size
1471
+ img = jax.image.resize(
1472
+ img,
1473
+ (self.size[1] * 24, self.size[0] * 24, 3),
1474
+ jax.image.ResizeMethod.NEAREST,
1475
+ )
1284
1476
  else:
1285
- original_colors = img[y_coords_adj, x_coords_adj]
1286
- tinted_colors = (1 - alpha) * original_colors + alpha * agent_color
1287
- img = img.at[y_coords_adj, x_coords_adj].set(tinted_colors)
1477
+ # Standard rendering without reward visualization
1478
+ if self.nowrap:
1479
+ # Create tint mask: any in-bounds original position maps to a cell makes it tinted
1480
+ tint_mask = jnp.zeros((self.size[1], self.size[0]), dtype=int)
1481
+ tint_mask = tint_mask.at[y_coords_adj, x_coords_adj].set(1)
1482
+ # Apply tint to masked positions
1483
+ original_colors = img
1484
+ tinted_colors = (1 - alpha) * original_colors + alpha * agent_color
1485
+ img = jnp.where(tint_mask[..., None], tinted_colors, img)
1486
+ else:
1487
+ original_colors = img[y_coords_adj, x_coords_adj]
1488
+ tinted_colors = (1 - alpha) * original_colors + alpha * agent_color
1489
+ img = img.at[y_coords_adj, x_coords_adj].set(tinted_colors)
1288
1490
 
1289
- # Agent color
1290
- img = img.at[state.pos[1], state.pos[0]].set(jnp.array(AGENT.color))
1491
+ # Agent color
1492
+ img = img.at[state.pos[1], state.pos[0]].set(jnp.array(AGENT.color))
1291
1493
 
1292
- img = jax.image.resize(
1293
- img,
1294
- (self.size[1] * 24, self.size[0] * 24, 3),
1295
- jax.image.ResizeMethod.NEAREST,
1296
- )
1494
+ img = jax.image.resize(
1495
+ img,
1496
+ (self.size[1] * 24, self.size[0] * 24, 3),
1497
+ jax.image.ResizeMethod.NEAREST,
1498
+ )
1297
1499
 
1298
1500
  if is_true_mode:
1299
1501
  # Apply true object borders by overlaying true colors on border pixels
@@ -1340,16 +1542,69 @@ class ForagaxEnv(environment.Environment):
1340
1542
  aperture_one_hot = jax.nn.one_hot(aperture, len(self.object_ids))
1341
1543
  img = jnp.tensordot(aperture_one_hot, self.object_colors, axes=1)
1342
1544
 
1343
- # Draw agent in the center
1344
- center_y, center_x = self.aperture_size[1] // 2, self.aperture_size[0] // 2
1345
- img = img.at[center_y, center_x].set(jnp.array(AGENT.color))
1545
+ if is_reward_mode:
1546
+ # Scale image by 3 to create space for reward visualization
1547
+ img = img.astype(jnp.uint8)
1548
+ img = jax.image.resize(
1549
+ img,
1550
+ (self.aperture_size[0] * 3, self.aperture_size[1] * 3, 3),
1551
+ jax.image.ResizeMethod.NEAREST,
1552
+ )
1346
1553
 
1347
- img = img.astype(jnp.uint8)
1348
- img = jax.image.resize(
1349
- img,
1350
- (self.aperture_size[0] * 24, self.aperture_size[1] * 24, 3),
1351
- jax.image.ResizeMethod.NEAREST,
1352
- )
1554
+ # Compute rewards for aperture region
1555
+ y_coords, x_coords, y_coords_adj, x_coords_adj = (
1556
+ self._compute_aperture_coordinates(state.pos)
1557
+ )
1558
+
1559
+ # Get reward grid for the full world
1560
+ full_reward_grid = self._compute_reward_grid(state)
1561
+
1562
+ # Extract aperture rewards
1563
+ aperture_rewards = full_reward_grid[y_coords_adj, x_coords_adj]
1564
+
1565
+ # Convert rewards to colors
1566
+ reward_colors = self._reward_to_color(aperture_rewards)
1567
+
1568
+ # Place reward colors in the middle cells (index 1 in each 3x3 block)
1569
+ i_indices = jnp.arange(self.aperture_size[0])[:, None] * 3 + 1
1570
+ j_indices = jnp.arange(self.aperture_size[1])[None, :] * 3 + 1
1571
+ img = img.at[i_indices, j_indices].set(reward_colors)
1572
+
1573
+ # Draw agent in the center (all 9 cells of the 3x3 block)
1574
+ center_y, center_x = (
1575
+ self.aperture_size[1] // 2,
1576
+ self.aperture_size[0] // 2,
1577
+ )
1578
+ agent_offsets = jnp.array(
1579
+ [[dy, dx] for dy in range(3) for dx in range(3)]
1580
+ )
1581
+ agent_y_cells = center_y * 3 + agent_offsets[:, 0]
1582
+ agent_x_cells = center_x * 3 + agent_offsets[:, 1]
1583
+ img = img.at[agent_y_cells, agent_x_cells].set(
1584
+ jnp.array(AGENT.color, dtype=jnp.uint8)
1585
+ )
1586
+
1587
+ # Scale by 8 to final size
1588
+ img = jax.image.resize(
1589
+ img,
1590
+ (self.aperture_size[0] * 24, self.aperture_size[1] * 24, 3),
1591
+ jax.image.ResizeMethod.NEAREST,
1592
+ )
1593
+ else:
1594
+ # Standard rendering without reward visualization
1595
+ # Draw agent in the center
1596
+ center_y, center_x = (
1597
+ self.aperture_size[1] // 2,
1598
+ self.aperture_size[0] // 2,
1599
+ )
1600
+ img = img.at[center_y, center_x].set(jnp.array(AGENT.color))
1601
+
1602
+ img = img.astype(jnp.uint8)
1603
+ img = jax.image.resize(
1604
+ img,
1605
+ (self.aperture_size[0] * 24, self.aperture_size[1] * 24, 3),
1606
+ jax.image.ResizeMethod.NEAREST,
1607
+ )
1353
1608
 
1354
1609
  if is_true_mode:
1355
1610
  # Apply true object borders by overlaying true colors on border pixels
@@ -2800,7 +2800,7 @@ def test_sine_object():
2800
2800
 
2801
2801
 
2802
2802
  def test_info_rewards():
2803
- """Test that info contains rewards with reward values for each grid position."""
2803
+ """Test that info contains rewards with next reward values for each grid position."""
2804
2804
  key = jax.random.key(0)
2805
2805
  env = ForagaxEnv(
2806
2806
  size=(5, 5),
@@ -2812,7 +2812,7 @@ def test_info_rewards():
2812
2812
  obs, state = env.reset(key, params)
2813
2813
 
2814
2814
  key, step_key = jax.random.split(key)
2815
- _, _, _, _, info = env.step(step_key, state, Actions.UP, params)
2815
+ _, state, _, _, info = env.step(step_key, state, Actions.UP, params)
2816
2816
 
2817
2817
  # Check that rewards is in info
2818
2818
  assert "rewards" in info