buildstock-fetch 1.0.4__py3-none-any.whl → 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of buildstock-fetch might be problematic. Click here for more details.

Files changed (214) hide show
  1. buildstock_fetch/data/buildstock_releases.json +166 -77
  2. buildstock_fetch/data/weather_station_map/weather_station_map.parquet +0 -0
  3. buildstock_fetch/main.py +850 -120
  4. buildstock_fetch/main_cli.py +169 -61
  5. {buildstock_fetch-1.0.4.dist-info → buildstock_fetch-1.2.0.dist-info}/METADATA +4 -2
  6. {buildstock_fetch-1.0.4.dist-info → buildstock_fetch-1.2.0.dist-info}/RECORD +214 -213
  7. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Alaska → state=AK}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  8. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Alaska → state=AK}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  9. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Alabama → state=AL}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  10. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Alabama → state=AL}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  11. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Arkansas → state=AR}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  12. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Arkansas → state=AR}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  13. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Arizona → state=AZ}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  14. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Arizona → state=AZ}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  15. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=California → state=CA}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  16. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=California → state=CA}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  17. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Colorado → state=CO}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  18. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Colorado → state=CO}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  19. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Connecticut → state=CT}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  20. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Connecticut → state=CT}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  21. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=District%20of%20Columbia → state=DC}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  22. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=District%20of%20Columbia → state=DC}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  23. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Delaware → state=DE}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  24. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Delaware → state=DE}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  25. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Florida → state=FL}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  26. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Florida → state=FL}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  27. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Georgia → state=GA}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  28. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Georgia → state=GA}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  29. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Hawaii → state=HI}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  30. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Hawaii → state=HI}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  31. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Iowa → state=IA}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  32. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Iowa → state=IA}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  33. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Idaho → state=ID}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  34. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Idaho → state=ID}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  35. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Illinois → state=IL}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  36. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Illinois → state=IL}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  37. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Indiana → state=IN}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  38. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Indiana → state=IN}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  39. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Kansas → state=KS}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  40. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Kansas → state=KS}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  41. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Kentucky → state=KY}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  42. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Kentucky → state=KY}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  43. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Louisiana → state=LA}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  44. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Louisiana → state=LA}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  45. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Massachusetts → state=MA}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  46. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Massachusetts → state=MA}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  47. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Maryland → state=MD}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  48. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Maryland → state=MD}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  49. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Maine → state=ME}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  50. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Maine → state=ME}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  51. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Michigan → state=MI}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  52. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Michigan → state=MI}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  53. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Minnesota → state=MN}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  54. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Minnesota → state=MN}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  55. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Missouri → state=MO}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  56. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Missouri → state=MO}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  57. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Mississippi → state=MS}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  58. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Mississippi → state=MS}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  59. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Montana → state=MT}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  60. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Montana → state=MT}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  61. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=North%20Carolina → state=NC}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  62. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=North%20Carolina → state=NC}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  63. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=North%20Dakota → state=ND}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  64. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=North%20Dakota → state=ND}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  65. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Nebraska → state=NE}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  66. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Nebraska → state=NE}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  67. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=New%20Hampshire → state=NH}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  68. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=New%20Hampshire → state=NH}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  69. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=New%20Jersey → state=NJ}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  70. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=New%20Jersey → state=NJ}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  71. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=New%20Mexico → state=NM}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  72. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=New%20Mexico → state=NM}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  73. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Nevada → state=NV}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  74. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Nevada → state=NV}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  75. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=New%20York → state=NY}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  76. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=New%20York → state=NY}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  77. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Ohio → state=OH}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  78. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Ohio → state=OH}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  79. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Oklahoma → state=OK}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  80. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Oklahoma → state=OK}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  81. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Oregon → state=OR}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  82. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Oregon → state=OR}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  83. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Pennsylvania → state=PA}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  84. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Pennsylvania → state=PA}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  85. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Rhode%20Island → state=RI}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  86. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Rhode%20Island → state=RI}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  87. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=South%20Carolina → state=SC}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  88. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=South%20Carolina → state=SC}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  89. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=South%20Dakota → state=SD}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  90. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=South%20Dakota → state=SD}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  91. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Tennessee → state=TN}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  92. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Tennessee → state=TN}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  93. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Texas → state=TX}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  94. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Texas → state=TX}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  95. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Utah → state=UT}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  96. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Utah → state=UT}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  97. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Virginia → state=VA}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  98. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Virginia → state=VA}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  99. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Vermont → state=VT}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  100. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Vermont → state=VT}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  101. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Washington → state=WA}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  102. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Washington → state=WA}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  103. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Wisconsin → state=WI}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  104. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Wisconsin → state=WI}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  105. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=West%20Virginia → state=WV}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  106. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=West%20Virginia → state=WV}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  107. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Wyoming → state=WY}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  108. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=amy2018/release_version=1/{state=Wyoming → state=WY}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  109. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Alaska → state=AK}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  110. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Alaska → state=AK}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  111. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Alabama → state=AL}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  112. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Alabama → state=AL}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  113. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Arkansas → state=AR}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  114. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Arkansas → state=AR}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  115. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Arizona → state=AZ}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  116. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Arizona → state=AZ}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  117. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=California → state=CA}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  118. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=California → state=CA}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  119. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Colorado → state=CO}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  120. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Colorado → state=CO}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  121. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Connecticut → state=CT}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  122. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Connecticut → state=CT}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  123. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=District%20of%20Columbia → state=DC}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  124. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=District%20of%20Columbia → state=DC}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  125. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Delaware → state=DE}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  126. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Delaware → state=DE}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  127. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Florida → state=FL}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  128. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Florida → state=FL}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  129. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Georgia → state=GA}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  130. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Georgia → state=GA}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  131. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Hawaii → state=HI}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  132. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Hawaii → state=HI}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  133. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Iowa → state=IA}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  134. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Iowa → state=IA}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  135. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Idaho → state=ID}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  136. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Idaho → state=ID}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  137. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Illinois → state=IL}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  138. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Illinois → state=IL}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  139. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Indiana → state=IN}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  140. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Indiana → state=IN}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  141. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Kansas → state=KS}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  142. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Kansas → state=KS}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  143. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Kentucky → state=KY}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  144. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Kentucky → state=KY}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  145. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Louisiana → state=LA}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  146. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Louisiana → state=LA}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  147. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Massachusetts → state=MA}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  148. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Massachusetts → state=MA}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  149. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Maryland → state=MD}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  150. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Maryland → state=MD}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  151. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Maine → state=ME}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  152. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Maine → state=ME}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  153. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Michigan → state=MI}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  154. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Michigan → state=MI}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  155. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Minnesota → state=MN}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  156. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Minnesota → state=MN}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  157. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Missouri → state=MO}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  158. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Missouri → state=MO}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  159. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Mississippi → state=MS}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  160. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Mississippi → state=MS}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  161. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Montana → state=MT}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  162. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Montana → state=MT}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  163. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=North%20Carolina → state=NC}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  164. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=North%20Carolina → state=NC}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  165. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=North%20Dakota → state=ND}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  166. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=North%20Dakota → state=ND}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  167. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Nebraska → state=NE}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  168. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Nebraska → state=NE}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  169. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=New%20Hampshire → state=NH}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  170. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=New%20Hampshire → state=NH}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  171. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=New%20Jersey → state=NJ}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  172. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=New%20Jersey → state=NJ}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  173. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=New%20Mexico → state=NM}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  174. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=New%20Mexico → state=NM}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  175. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Nevada → state=NV}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  176. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Nevada → state=NV}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  177. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=New%20York → state=NY}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  178. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=New%20York → state=NY}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  179. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Ohio → state=OH}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  180. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Ohio → state=OH}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  181. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Oklahoma → state=OK}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  182. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Oklahoma → state=OK}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  183. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Oregon → state=OR}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  184. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Oregon → state=OR}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  185. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Pennsylvania → state=PA}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  186. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Pennsylvania → state=PA}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  187. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Rhode%20Island → state=RI}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  188. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Rhode%20Island → state=RI}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  189. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=South%20Carolina → state=SC}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  190. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=South%20Carolina → state=SC}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  191. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=South%20Dakota → state=SD}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  192. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=South%20Dakota → state=SD}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  193. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Tennessee → state=TN}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  194. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Tennessee → state=TN}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  195. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Texas → state=TX}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  196. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Texas → state=TX}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  197. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Utah → state=UT}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  198. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Utah → state=UT}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  199. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Virginia → state=VA}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  200. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Virginia → state=VA}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  201. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Vermont → state=VT}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  202. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Vermont → state=VT}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  203. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Washington → state=WA}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  204. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Washington → state=WA}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  205. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Wisconsin → state=WI}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  206. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Wisconsin → state=WI}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  207. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=West%20Virginia → state=WV}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  208. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=West%20Virginia → state=WV}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  209. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Wyoming → state=WY}/d1454abff0d94c8090af7b3e923c473b-0.parquet +0 -0
  210. /buildstock_fetch/data/building_data/combined_metadata.parquet/product=comstock/release_year=2021/weather_file=tmy3/release_version=1/{state=Wyoming → state=WY}/dcd864cc169b4695be2b9775b1a054ae-0.parquet +0 -0
  211. {buildstock_fetch-1.0.4.dist-info → buildstock_fetch-1.2.0.dist-info}/WHEEL +0 -0
  212. {buildstock_fetch-1.0.4.dist-info → buildstock_fetch-1.2.0.dist-info}/entry_points.txt +0 -0
  213. {buildstock_fetch-1.0.4.dist-info → buildstock_fetch-1.2.0.dist-info}/licenses/LICENSE +0 -0
  214. {buildstock_fetch-1.0.4.dist-info → buildstock_fetch-1.2.0.dist-info}/top_level.txt +0 -0
buildstock_fetch/main.py CHANGED
@@ -1,5 +1,6 @@
1
1
  import concurrent.futures
2
2
  import json
3
+ import tempfile
3
4
  import zipfile
4
5
  from dataclasses import asdict, dataclass
5
6
  from importlib.resources import files
@@ -58,10 +59,37 @@ class NoAnnualLoadCurveError(ValueError):
58
59
  pass
59
60
 
60
61
 
62
+ class NoAggregateLoadCurveError(ValueError):
63
+ """Raised when no monthly load curve is available for a given release."""
64
+
65
+ pass
66
+
67
+
68
+ class UnknownAggregationFunctionError(ValueError):
69
+ """Raised when an unknown aggregation function is provided."""
70
+
71
+ pass
72
+
73
+
74
+ class NoWeatherFileError(ValueError):
75
+ """Raised when weather file is not available for a release."""
76
+
77
+ pass
78
+
79
+
61
80
  METADATA_DIR = Path(
62
81
  str(files("buildstock_fetch").joinpath("data").joinpath("building_data").joinpath("combined_metadata.parquet"))
63
82
  )
64
83
  RELEASE_JSON_FILE = Path(str(files("buildstock_fetch").joinpath("data").joinpath("buildstock_releases.json")))
84
+ LOAD_CURVE_COLUMN_AGGREGATION = Path(
85
+ str(
86
+ files("buildstock_fetch")
87
+ .joinpath("data")
88
+ .joinpath("load_curve_column_map")
89
+ .joinpath("2024_resstock_load_curve_columns.csv")
90
+ )
91
+ )
92
+ WEATHER_FILE_DIR = Path(str(files("buildstock_fetch").joinpath("data").joinpath("weather_station_map")))
65
93
 
66
94
 
67
95
  @dataclass
@@ -74,6 +102,7 @@ class RequestedFileTypes:
74
102
  load_curve_daily: bool = False
75
103
  load_curve_monthly: bool = False
76
104
  load_curve_annual: bool = False
105
+ weather: bool = False
77
106
 
78
107
 
79
108
  @dataclass
@@ -234,6 +263,10 @@ class BuildingID:
234
263
  else:
235
264
  return ""
236
265
 
266
+ def get_aggregate_load_curve_url(self) -> str:
267
+ """Generate the S3 download URL for this building. The url is the same as the 15-minute load curve url."""
268
+ return self.get_15min_load_curve_url()
269
+
237
270
  def get_annual_load_curve_url(self) -> str:
238
271
  """Generate the S3 download URL for this building."""
239
272
  if not self._validate_requested_file_type_availability("load_curve_annual"):
@@ -249,6 +282,85 @@ class BuildingID:
249
282
  else:
250
283
  return ""
251
284
 
285
+ def get_weather_file_url(self) -> str:
286
+ """Generate the S3 download URL for this building."""
287
+ if self.get_weather_station_name() == "":
288
+ return ""
289
+ return self._build_weather_url()
290
+
291
+ def _build_weather_url(self) -> str:
292
+ """Build the weather file URL based on release year and weather type."""
293
+ if self.release_year == "2021":
294
+ return self._build_2021_weather_url()
295
+ elif self.release_year == "2022":
296
+ return self._build_2022_weather_url()
297
+ elif self.release_year == "2023":
298
+ return self._build_2023_weather_url()
299
+ elif self.release_year == "2024":
300
+ return self._build_2024_weather_url()
301
+ elif self.release_year == "2025":
302
+ return self._build_2025_weather_url()
303
+ else:
304
+ return ""
305
+
306
+ def _build_2021_weather_url(self) -> str:
307
+ """Build weather URL for 2021 release."""
308
+ if self.weather == "tmy3":
309
+ return f"{self.base_url}weather/{self.weather}/{self.get_weather_station_name()}_tmy3.csv"
310
+ elif self.weather == "amy2018":
311
+ return f"{self.base_url}weather/{self.weather}/{self.get_weather_station_name()}_2018.csv"
312
+ elif self.weather == "amy2012":
313
+ return f"{self.base_url}weather/{self.weather}/{self.get_weather_station_name()}_2012.csv"
314
+ else:
315
+ return ""
316
+
317
+ def _build_2022_weather_url(self) -> str:
318
+ """Build weather URL for 2022 release."""
319
+ if self.weather == "tmy3":
320
+ return f"{self.base_url}weather/state={self.state}/{self.get_weather_station_name()}_TMY3.csv"
321
+ elif self.weather == "amy2018":
322
+ return f"{self.base_url}weather/state={self.state}/{self.get_weather_station_name()}_2018.csv"
323
+ elif self.weather == "amy2012":
324
+ return f"{self.base_url}weather/state={self.state}/{self.get_weather_station_name()}_2012.csv"
325
+ else:
326
+ return ""
327
+
328
+ def _build_2023_weather_url(self) -> str:
329
+ """Build weather URL for 2023 release."""
330
+ if self.weather == "tmy3":
331
+ return f"{self.base_url}weather/{self.weather}/{self.get_weather_station_name()}_TMY3.csv"
332
+ elif self.weather == "amy2018":
333
+ return f"{self.base_url}weather/{self.weather}/{self.get_weather_station_name()}_2018.csv"
334
+ elif self.weather == "amy2012":
335
+ return f"{self.base_url}weather/{self.weather}/{self.get_weather_station_name()}_2012.csv"
336
+ else:
337
+ return ""
338
+
339
+ def _build_2024_weather_url(self) -> str:
340
+ """Build weather URL for 2024 release."""
341
+ if self.res_com == "comstock" and self.weather == "amy2018":
342
+ return f"{self.base_url}weather/{self.weather}/{self.get_weather_station_name()}_2018.csv"
343
+ else:
344
+ if self.weather == "tmy3":
345
+ return f"{self.base_url}weather/state={self.state}/{self.get_weather_station_name()}_TMY3.csv"
346
+ elif self.weather == "amy2018":
347
+ return f"{self.base_url}weather/state={self.state}/{self.get_weather_station_name()}_2018.csv"
348
+ elif self.weather == "amy2012":
349
+ return f"{self.base_url}weather/state={self.state}/{self.get_weather_station_name()}_2012.csv"
350
+ else:
351
+ return ""
352
+
353
+ def _build_2025_weather_url(self) -> str:
354
+ """Build weather URL for 2025 release."""
355
+ if self.weather == "tmy3":
356
+ return f"{self.base_url}weather/{self.weather}/{self.get_weather_station_name()}_TMY3.csv"
357
+ elif self.weather == "amy2018":
358
+ return f"{self.base_url}weather/{self.weather}/{self.get_weather_station_name()}_2018.csv"
359
+ elif self.weather == "amy2012":
360
+ return f"{self.base_url}weather/{self.weather}/{self.get_weather_station_name()}_2012.csv"
361
+ else:
362
+ return ""
363
+
252
364
  def get_annual_load_curve_filename(self) -> str:
253
365
  """Generate the filename for the annual load curve."""
254
366
  if self.release_year == "2021":
@@ -278,6 +390,28 @@ class BuildingID:
278
390
  else:
279
391
  return ""
280
392
 
393
+ def get_weather_station_name(self) -> str:
394
+ """Get the weather station name for this building."""
395
+ weather_map_df = pl.read_parquet(WEATHER_FILE_DIR)
396
+
397
+ # Filter by multiple fields for a more specific match
398
+ weather_station_map = weather_map_df.filter(
399
+ (pl.col("product") == self.res_com)
400
+ & (pl.col("release_year") == self.release_year)
401
+ & (pl.col("weather_file") == self.weather)
402
+ & (pl.col("release_version") == self.release_number)
403
+ & (pl.col("bldg_id") == self.bldg_id)
404
+ )
405
+
406
+ # Check if we found a match
407
+ if weather_station_map.height > 0:
408
+ # Return the weather station name from the first (and should be only) match
409
+ weather_station_name = weather_station_map.select("weather_station_name").item()
410
+ return str(weather_station_name) if weather_station_name is not None else ""
411
+ else:
412
+ # No match found, return empty string
413
+ return ""
414
+
281
415
  def _build_annual_load_state_url(self) -> str:
282
416
  """Build the state-level URL for annual load curve data.
283
417
 
@@ -405,6 +539,11 @@ def _validate_release_name(release_name: str) -> bool:
405
539
  return release_name in valid_release_names
406
540
 
407
541
 
542
+ def _resolve_unique_metadata_urls(bldg_ids: list[BuildingID]) -> list[str]:
543
+ """Resolve the unique metadata URLs for a list of building IDs."""
544
+ return list({bldg_id.get_metadata_url() for bldg_id in bldg_ids})
545
+
546
+
408
547
  def fetch_bldg_ids(
409
548
  product: str, release_year: str, weather_file: str, release_version: str, state: str, upgrade_id: str
410
549
  ) -> list[BuildingID]:
@@ -474,13 +613,13 @@ def fetch_bldg_ids(
474
613
  def _download_with_progress(url: str, output_file: Path, progress: Progress, task_id: TaskID) -> int:
475
614
  """Download a file with progress tracking."""
476
615
  # Get file size first
477
- response = requests.head(url, timeout=30)
616
+ response = requests.head(url, timeout=30, verify=True)
478
617
  response.raise_for_status()
479
618
  total_size = int(response.headers.get("content-length", 0))
480
619
  progress.update(task_id, total=total_size)
481
620
 
482
621
  # Download with streaming
483
- response = requests.get(url, stream=True, timeout=30)
622
+ response = requests.get(url, stream=True, timeout=30, verify=True)
484
623
  response.raise_for_status()
485
624
 
486
625
  downloaded_size = 0
@@ -497,6 +636,205 @@ def _download_with_progress(url: str, output_file: Path, progress: Progress, tas
497
636
  return downloaded_size
498
637
 
499
638
 
639
+ def _download_with_progress_metadata(url: str, output_file: Path, progress: Progress, task_id: TaskID) -> int:
640
+ """Download a metadata file with progress tracking and append to existing file if it exists."""
641
+ # Get file size first
642
+ response = requests.head(url, timeout=30, verify=True)
643
+ response.raise_for_status()
644
+ total_size = int(response.headers.get("content-length", 0))
645
+ progress.update(task_id, total=total_size)
646
+
647
+ # Download with streaming
648
+ response = requests.get(url, stream=True, timeout=30, verify=True)
649
+ response.raise_for_status()
650
+
651
+ downloaded_size = 0
652
+
653
+ # Check if output file already exists
654
+ if output_file.exists():
655
+ # Read existing parquet file
656
+ existing_df = pl.read_parquet(output_file)
657
+
658
+ # Download new data to temporary file
659
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".parquet") as temp_file:
660
+ temp_path = Path(temp_file.name)
661
+
662
+ try:
663
+ # Download to temp file
664
+ with open(temp_path, "wb") as file:
665
+ for chunk in response.iter_content(chunk_size=8192):
666
+ if chunk:
667
+ file.write(chunk)
668
+ downloaded_size += len(chunk)
669
+ if total_size > 0:
670
+ progress.update(task_id, completed=downloaded_size)
671
+
672
+ # Read new data
673
+ new_df = pl.read_parquet(temp_path)
674
+
675
+ # Concatenate existing and new data, removing duplicates
676
+ combined_df = pl.concat([existing_df, new_df]).unique()
677
+
678
+ # Write combined data back to original file
679
+ combined_df.write_parquet(output_file)
680
+
681
+ finally:
682
+ # Clean up temp file
683
+ if temp_path.exists():
684
+ temp_path.unlink()
685
+ else:
686
+ # File doesn't exist, download normally
687
+ with open(str(output_file), "wb") as file:
688
+ for chunk in response.iter_content(chunk_size=8192):
689
+ if chunk:
690
+ file.write(chunk)
691
+ downloaded_size += len(chunk)
692
+ if total_size > 0:
693
+ progress.update(task_id, completed=downloaded_size)
694
+
695
+ return downloaded_size
696
+
697
+
698
+ def _get_time_step_grouping_key(aggregate_time_step: str) -> tuple[str, str]:
699
+ """Get the grouping key and format string for a given time step.
700
+
701
+ Args:
702
+ aggregate_time_step: The time step to aggregate to ("monthly", "hourly", "daily")
703
+
704
+ Returns:
705
+ A tuple of (grouping_key_name, format_string)
706
+ """
707
+ time_step_configs = {
708
+ "monthly": ("year_month", "%Y-%m"),
709
+ "hourly": ("year_month_day_hour", "%Y-%m-%d-%H"),
710
+ "daily": ("year_month_day", "%Y-%m-%d"),
711
+ }
712
+
713
+ if aggregate_time_step not in time_step_configs:
714
+ msg = f"Unknown aggregate time step: {aggregate_time_step}"
715
+ raise ValueError(msg)
716
+
717
+ return time_step_configs[aggregate_time_step]
718
+
719
+
720
+ def _create_aggregation_expressions(load_curve: pl.DataFrame, column_aggregations: dict[str, str]) -> list[pl.Expr]:
721
+ """Create aggregation expressions for each column based on the aggregation rules.
722
+
723
+ Args:
724
+ load_curve: The DataFrame to create expressions for
725
+ column_aggregations: Dictionary mapping column names to aggregation functions
726
+
727
+ Returns:
728
+ List of Polars expressions for aggregation
729
+ """
730
+ agg_exprs = []
731
+
732
+ for col in load_curve.columns:
733
+ if col in ["timestamp", "year_month", "year_month_day_hour", "year_month_day"]:
734
+ continue
735
+
736
+ if col in column_aggregations:
737
+ agg_func = column_aggregations[col]
738
+ if agg_func == "sum":
739
+ agg_exprs.append(pl.col(col).sum().alias(col))
740
+ elif agg_func == "mean":
741
+ agg_exprs.append(pl.col(col).mean().alias(col))
742
+ elif agg_func == "first":
743
+ agg_exprs.append(pl.col(col).first().alias(col))
744
+ else:
745
+ raise UnknownAggregationFunctionError()
746
+ else:
747
+ raise UnknownAggregationFunctionError()
748
+
749
+ # Add timestamp aggregation (take the first timestamp of each group)
750
+ agg_exprs.append(pl.col("timestamp").first().alias("timestamp"))
751
+
752
+ return agg_exprs
753
+
754
+
755
+ def _aggregate_load_curve_aggregate(load_curve: pl.DataFrame, aggregate_time_step: str) -> pl.DataFrame:
756
+ """Aggregate the 15-minute load curve to specified time step based on aggregation rules."""
757
+ # Read the aggregation rules from CSV
758
+ aggregation_rules = pl.read_csv(LOAD_CURVE_COLUMN_AGGREGATION)
759
+
760
+ # Create a dictionary mapping column names to their aggregation functions
761
+ column_aggregations = dict(zip(aggregation_rules["name"], aggregation_rules["Aggregate_function"]))
762
+
763
+ # Ensure timestamp column exists and convert to datetime if needed
764
+ if "timestamp" not in load_curve.columns:
765
+ msg = "DataFrame must contain a 'timestamp' column"
766
+ raise ValueError(msg)
767
+
768
+ # Convert timestamp to datetime if it's not already
769
+ load_curve = load_curve.with_columns(pl.col("timestamp").cast(pl.Datetime))
770
+
771
+ # Get the grouping key configuration
772
+ grouping_key, format_string = _get_time_step_grouping_key(aggregate_time_step)
773
+
774
+ # Create grouping key
775
+ load_curve = load_curve.with_columns(pl.col("timestamp").dt.strftime(format_string).alias(grouping_key))
776
+
777
+ # Create aggregation expressions
778
+ agg_exprs = _create_aggregation_expressions(load_curve, column_aggregations)
779
+
780
+ # Perform the aggregation
781
+ aggregate_data = load_curve.group_by(grouping_key).agg(agg_exprs)
782
+
783
+ # Sort by timestamp and drop the grouping column
784
+ aggregate_data = aggregate_data.sort("timestamp").drop(grouping_key)
785
+
786
+ return aggregate_data
787
+
788
+
789
+ def _download_and_process_aggregate(
790
+ url: str, output_file: Path, progress: Progress, task_id: TaskID, aggregate_time_step: str
791
+ ) -> int:
792
+ """Download aggregate time step load curve to temporary file, process with Polars, and save result."""
793
+ # Get file size first for progress tracking
794
+ response = requests.head(url, timeout=30, verify=True)
795
+ response.raise_for_status()
796
+ total_size = int(response.headers.get("content-length", 0))
797
+ progress.update(task_id, total=total_size)
798
+
799
+ # Download to temporary file
800
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".parquet") as temp_file:
801
+ temp_path = Path(temp_file.name)
802
+
803
+ try:
804
+ # Create session with retry logic
805
+ session = requests.Session()
806
+ retry_strategy = requests.adapters.HTTPAdapter(max_retries=15)
807
+ session.mount("http://", retry_strategy)
808
+ session.mount("https://", retry_strategy)
809
+
810
+ # Download with streaming to temp file
811
+ response = session.get(url, stream=True, timeout=60, verify=True)
812
+ response.raise_for_status()
813
+
814
+ downloaded_size = 0
815
+ with open(temp_path, "wb") as file:
816
+ for chunk in response.iter_content(chunk_size=8192):
817
+ if chunk:
818
+ file.write(chunk)
819
+ downloaded_size += len(chunk)
820
+ if total_size > 0:
821
+ progress.update(task_id, completed=downloaded_size)
822
+
823
+ # Process with Polars
824
+ load_curve_15min = pl.read_parquet(temp_path)
825
+ load_curve_aggregate = _aggregate_load_curve_aggregate(load_curve_15min, aggregate_time_step)
826
+
827
+ # Save processed file to final destination
828
+ load_curve_aggregate.write_parquet(output_file)
829
+
830
+ return downloaded_size
831
+
832
+ finally:
833
+ # Clean up temporary file
834
+ if temp_path.exists():
835
+ temp_path.unlink()
836
+
837
+
500
838
  def download_bldg_data(
501
839
  bldg_id: BuildingID,
502
840
  file_type: RequestedFileTypes,
@@ -541,7 +879,7 @@ def download_bldg_data(
541
879
  if progress and task_id is not None:
542
880
  _download_with_progress(download_url, output_file, progress, task_id)
543
881
  else:
544
- response = requests.get(download_url, timeout=30)
882
+ response = requests.get(download_url, timeout=30, verify=True)
545
883
  response.raise_for_status()
546
884
  output_file.write_bytes(response.content)
547
885
 
@@ -563,7 +901,7 @@ def download_bldg_data(
563
901
  / bldg_id.get_release_name()
564
902
  / "hpxml"
565
903
  / bldg_id.state
566
- / f"up{str(int(bldg_id.upgrade_id)).zfill(2)}"
904
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
567
905
  / new_name
568
906
  )
569
907
  new_path.parent.mkdir(parents=True, exist_ok=True)
@@ -584,7 +922,7 @@ def download_bldg_data(
584
922
  / bldg_id.get_release_name()
585
923
  / "schedule"
586
924
  / bldg_id.state
587
- / f"up{str(int(bldg_id.upgrade_id)).zfill(2)}"
925
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
588
926
  / new_name
589
927
  )
590
928
  new_path.parent.mkdir(parents=True, exist_ok=True)
@@ -598,33 +936,6 @@ def download_bldg_data(
598
936
  return downloaded_paths
599
937
 
600
938
 
601
- def download_metadata(bldg_id: BuildingID, output_dir: Path) -> Path:
602
- """Download the metadata for a given building.
603
-
604
- Args:
605
- bldg_id: A BuildingID object to download metadata for.
606
- output_dir: Directory to save the downloaded metadata.
607
- """
608
-
609
- download_url = bldg_id.get_metadata_url()
610
- if download_url == "":
611
- message = f"Metadata is not available for {bldg_id.get_release_name()}"
612
- raise NoMetadataError(message)
613
- response = requests.get(download_url, timeout=30)
614
- response.raise_for_status()
615
- output_file = (
616
- output_dir
617
- / bldg_id.get_release_name()
618
- / "metadata"
619
- / bldg_id.state
620
- / f"up{str(int(bldg_id.upgrade_id)).zfill(2)}"
621
- / "metadata.parquet"
622
- )
623
- output_file.parent.mkdir(parents=True, exist_ok=True)
624
- output_file.write_bytes(response.content)
625
- return output_file
626
-
627
-
628
939
  def download_15min_load_curve(bldg_id: BuildingID, output_dir: Path) -> Path:
629
940
  """Download the 15 min load profile timeseries for a given building.
630
941
 
@@ -637,15 +948,15 @@ def download_15min_load_curve(bldg_id: BuildingID, output_dir: Path) -> Path:
637
948
  if download_url == "":
638
949
  message = f"15 min load profile timeseries is not available for {bldg_id.get_release_name()}"
639
950
  raise No15minLoadCurveError(message)
640
- response = requests.get(download_url, timeout=30)
951
+ response = requests.get(download_url, timeout=30, verify=True)
641
952
  response.raise_for_status()
642
953
  output_file = (
643
954
  output_dir
644
955
  / bldg_id.get_release_name()
645
956
  / "load_curve_15min"
646
- / bldg_id.state
647
- / f"up{str(int(bldg_id.upgrade_id)).zfill(2)}"
648
- / f"bldg{str(bldg_id.bldg_id).zfill(7)}-up{str(int(bldg_id.upgrade_id)).zfill(2)}_load_curve_15min.parquet"
957
+ / f"state={bldg_id.state}"
958
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
959
+ / f"bldg{str(bldg_id.bldg_id).zfill(7)}_load_curve_15min.parquet"
649
960
  )
650
961
  output_file.parent.mkdir(parents=True, exist_ok=True)
651
962
  output_file.write_bytes(response.content)
@@ -675,9 +986,9 @@ def download_15min_load_curve_with_progress(
675
986
  output_dir
676
987
  / bldg_id.get_release_name()
677
988
  / "load_curve_15min"
678
- / bldg_id.state
679
- / f"up{str(int(bldg_id.upgrade_id)).zfill(2)}"
680
- / f"bldg{str(bldg_id.bldg_id).zfill(7)}-up{str(int(bldg_id.upgrade_id)).zfill(2)}_load_curve_15min.parquet"
989
+ / f"state={bldg_id.state}"
990
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
991
+ / f"bldg{str(bldg_id.bldg_id).zfill(7)}_load_curve_15min.parquet"
681
992
  )
682
993
  output_file.parent.mkdir(parents=True, exist_ok=True)
683
994
 
@@ -685,13 +996,73 @@ def download_15min_load_curve_with_progress(
685
996
  if progress and task_id is not None:
686
997
  _download_with_progress(download_url, output_file, progress, task_id)
687
998
  else:
688
- response = requests.get(download_url, timeout=30)
999
+ response = requests.get(download_url, timeout=30, verify=True)
689
1000
  response.raise_for_status()
690
1001
  output_file.write_bytes(response.content)
691
1002
 
692
1003
  return output_file
693
1004
 
694
1005
 
1006
+ def download_aggregate_time_step_load_curve_with_progress(
1007
+ bldg_id: BuildingID,
1008
+ output_dir: Path,
1009
+ progress: Optional[Progress],
1010
+ task_id: Optional[TaskID],
1011
+ aggregate_time_step: str,
1012
+ ) -> Path:
1013
+ """Download the aggregate time step load profile timeseries for a given building with progress tracking."""
1014
+
1015
+ download_url = bldg_id.get_aggregate_load_curve_url()
1016
+ if download_url == "":
1017
+ message = f"Aggregate load profile timeseries is not available for {bldg_id.get_release_name()}"
1018
+ raise NoAggregateLoadCurveError(message)
1019
+
1020
+ if aggregate_time_step == "monthly":
1021
+ load_curve_dir = "load_curve_monthly"
1022
+ elif aggregate_time_step == "hourly":
1023
+ load_curve_dir = "load_curve_hourly"
1024
+ elif aggregate_time_step == "daily":
1025
+ load_curve_dir = "load_curve_daily"
1026
+ else:
1027
+ message = f"Unknown aggregate time step: {aggregate_time_step}"
1028
+ raise ValueError(message)
1029
+
1030
+ output_file = (
1031
+ output_dir
1032
+ / bldg_id.get_release_name()
1033
+ / load_curve_dir
1034
+ / f"state={bldg_id.state}"
1035
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
1036
+ / f"bldg{str(bldg_id.bldg_id).zfill(7)}_load_curve_{aggregate_time_step}.parquet"
1037
+ )
1038
+
1039
+ output_file.parent.mkdir(parents=True, exist_ok=True)
1040
+
1041
+ # Download with progress tracking if progress object is provided
1042
+ if progress and task_id is not None:
1043
+ _download_and_process_aggregate(download_url, output_file, progress, task_id, aggregate_time_step)
1044
+ else:
1045
+ # For non-progress downloads, still use temp file approach for consistency
1046
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".parquet") as temp_file:
1047
+ temp_path = Path(temp_file.name)
1048
+ try:
1049
+ response = requests.get(download_url, timeout=30, verify=True)
1050
+ response.raise_for_status()
1051
+ temp_path.write_bytes(response.content)
1052
+
1053
+ # Process with Polars
1054
+ load_curve_15min = pl.read_parquet(temp_path)
1055
+ load_curve_aggregate = _aggregate_load_curve_aggregate(load_curve_15min, aggregate_time_step)
1056
+
1057
+ # Save processed file to final destination
1058
+ load_curve_aggregate.write_parquet(output_file)
1059
+ finally:
1060
+ if temp_path.exists():
1061
+ temp_path.unlink()
1062
+
1063
+ return output_file
1064
+
1065
+
695
1066
  def _parse_requested_file_type(file_type: tuple[str, ...]) -> RequestedFileTypes:
696
1067
  """Parse the file type string into a RequestedFileTypes object."""
697
1068
  file_type_obj = RequestedFileTypes()
@@ -711,9 +1082,38 @@ def _parse_requested_file_type(file_type: tuple[str, ...]) -> RequestedFileTypes
711
1082
  file_type_obj.load_curve_monthly = True
712
1083
  if "load_curve_annual" in file_type:
713
1084
  file_type_obj.load_curve_annual = True
1085
+ if "weather" in file_type:
1086
+ file_type_obj.weather = True
714
1087
  return file_type_obj
715
1088
 
716
1089
 
1090
+ def _process_metadata_results(bldg_ids: list[BuildingID], output_dir: Path, downloaded_paths: list[Path]) -> None:
1091
+ """Process the results of a completed metadata download."""
1092
+ metadata_to_bldg_id_mapping: dict[Path, list[int]] = {}
1093
+ for bldg_id in bldg_ids:
1094
+ output_file = (
1095
+ output_dir
1096
+ / bldg_id.get_release_name()
1097
+ / "metadata"
1098
+ / f"state={bldg_id.state}"
1099
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
1100
+ / "metadata.parquet"
1101
+ )
1102
+ if output_file in downloaded_paths:
1103
+ if output_file in metadata_to_bldg_id_mapping:
1104
+ metadata_to_bldg_id_mapping[output_file].append(bldg_id.bldg_id)
1105
+ else:
1106
+ metadata_to_bldg_id_mapping[output_file] = [bldg_id.bldg_id]
1107
+
1108
+ for metadata_file, bldg_id_list in metadata_to_bldg_id_mapping.items():
1109
+ # Use scan_parquet for lazy evaluation and better memory efficiency
1110
+ metadata_df_filtered = pl.scan_parquet(metadata_file).filter(pl.col("bldg_id").is_in(bldg_id_list)).collect()
1111
+ # Write the filtered dataframe back to the same file
1112
+ metadata_df_filtered.write_parquet(metadata_file)
1113
+
1114
+ return
1115
+
1116
+
717
1117
  def _process_download_results(
718
1118
  future: concurrent.futures.Future,
719
1119
  bldg_id: BuildingID,
@@ -740,37 +1140,73 @@ def _process_download_results(
740
1140
  console.print(f"[red]Download failed for bldg_id {bldg_id}: {e}[/red]")
741
1141
 
742
1142
 
743
- def _download_metadata_with_progress(bldg: BuildingID, output_dir: Path, progress: Progress) -> Path:
1143
+ def _download_metadata_with_progress(
1144
+ bldg_ids: list[BuildingID],
1145
+ output_dir: Path,
1146
+ progress: Progress,
1147
+ downloaded_paths: list[Path],
1148
+ failed_downloads: list[str],
1149
+ console: Console,
1150
+ ) -> tuple[list[Path], list[str]]:
744
1151
  """Download metadata file with progress tracking."""
745
- download_url = bldg.get_metadata_url()
746
- if download_url == "":
747
- message = f"Metadata is not available for {bldg.get_release_name()}"
748
- raise NoMetadataError(message)
1152
+ metadata_urls = _resolve_unique_metadata_urls(bldg_ids)
1153
+ downloaded_urls: list[str] = []
1154
+ for bldg_id in bldg_ids:
1155
+ output_file = (
1156
+ output_dir
1157
+ / bldg_id.get_release_name()
1158
+ / "metadata"
1159
+ / f"state={bldg_id.state}"
1160
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
1161
+ / "metadata.parquet"
1162
+ )
1163
+ download_url = bldg_id.get_metadata_url()
1164
+ if download_url == "":
1165
+ failed_downloads.append(str(output_file))
1166
+ continue
1167
+ if download_url in downloaded_urls:
1168
+ continue
1169
+ downloaded_urls.append(download_url)
1170
+ if download_url in metadata_urls:
1171
+ metadata_urls.remove(download_url)
1172
+ metadata_task = progress.add_task(
1173
+ f"[yellow]Downloading metadata: {download_url}",
1174
+ total=0, # Will be updated when we get the file size
1175
+ )
1176
+ # Get file size first
1177
+ response = requests.head(download_url, timeout=30)
1178
+ response.raise_for_status()
1179
+ total_size = int(response.headers.get("content-length", 0))
1180
+ progress.update(metadata_task, total=total_size)
749
1181
 
750
- # Create metadata task with progress tracking
751
- metadata_task = progress.add_task(
752
- "[yellow]Downloading metadata",
753
- total=0, # Will be updated when we get the file size
754
- )
1182
+ output_file.parent.mkdir(parents=True, exist_ok=True)
1183
+ try:
1184
+ _download_with_progress_metadata(download_url, output_file, progress, metadata_task)
1185
+ downloaded_paths.append(output_file)
1186
+ except Exception as e:
1187
+ failed_downloads.append(str(output_file))
1188
+ console.print(f"[red]Download failed for metadata {bldg_id.bldg_id}: {e}[/red]")
1189
+
1190
+ return downloaded_paths, failed_downloads
755
1191
 
756
- # Get file size first
757
- response = requests.head(download_url, timeout=30)
758
- response.raise_for_status()
759
- total_size = int(response.headers.get("content-length", 0))
760
- progress.update(metadata_task, total=total_size)
761
1192
 
762
- # Download with progress
1193
+ def download_weather_file_with_progress(
1194
+ bldg_id: BuildingID, output_dir: Path, progress: Progress, task_id: TaskID
1195
+ ) -> Path:
1196
+ """Download weather file with progress tracking."""
1197
+ download_url = bldg_id.get_weather_file_url()
1198
+ if download_url == "":
1199
+ raise NoWeatherFileError()
763
1200
  output_file = (
764
1201
  output_dir
765
- / bldg.get_release_name()
766
- / "metadata"
767
- / bldg.state
768
- / f"up{str(int(bldg.upgrade_id)).zfill(2)}"
769
- / "metadata.parquet"
1202
+ / bldg_id.get_release_name()
1203
+ / "weather"
1204
+ / f"state={bldg_id.state}"
1205
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
1206
+ / f"{bldg_id.get_weather_station_name()}.csv"
770
1207
  )
771
1208
  output_file.parent.mkdir(parents=True, exist_ok=True)
772
- _download_with_progress(download_url, output_file, progress, metadata_task)
773
-
1209
+ _download_with_progress(download_url, output_file, progress, task_id)
774
1210
  return output_file
775
1211
 
776
1212
 
@@ -851,8 +1287,9 @@ def _download_15min_load_curves_parallel(
851
1287
  output_dir
852
1288
  / bldg_id.get_release_name()
853
1289
  / "load_curve_15min"
854
- / bldg_id.state
855
- / f"bldg{str(bldg_id.bldg_id).zfill(7)}-up{str(int(bldg_id.upgrade_id)).zfill(2)}_load_curve_15min.parquet"
1290
+ / f"state={bldg_id.state}"
1291
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
1292
+ / f"bldg{str(bldg_id.bldg_id).zfill(7)}_load_curve_15min.parquet"
856
1293
  )
857
1294
  failed_downloads.append(str(output_file))
858
1295
  console.print(f"[red]15 min load curve not available for {bldg_id.get_release_name()}[/red]")
@@ -862,26 +1299,175 @@ def _download_15min_load_curves_parallel(
862
1299
  output_dir
863
1300
  / bldg_id.get_release_name()
864
1301
  / "load_curve_15min"
865
- / bldg_id.state
866
- / f"bldg{str(bldg_id.bldg_id).zfill(7)}-up{str(int(bldg_id.upgrade_id)).zfill(2)}_load_curve_15min.parquet"
1302
+ / f"state={bldg_id.state}"
1303
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
1304
+ / f"bldg{str(bldg_id.bldg_id).zfill(7)}_load_curve_15min.parquet"
867
1305
  )
868
1306
  failed_downloads.append(str(output_file))
869
1307
  console.print(f"[red]Download failed for 15 min load curve {bldg_id.bldg_id}: {e}[/red]")
870
1308
 
871
1309
 
872
- def _download_metadata_single(
1310
+ def _create_batch_progress_tasks(
1311
+ bldg_ids: list[BuildingID], aggregate_time_step: str, progress: Progress, console: Console
1312
+ ) -> dict[int, TaskID]:
1313
+ """Create progress tasks for batch processing."""
1314
+ batch_size = 100
1315
+ num_batches = (len(bldg_ids) + batch_size - 1) // batch_size
1316
+ console.print(f"[blue]Using batch processing: {len(bldg_ids)} buildings split into {num_batches} batches[/blue]")
1317
+
1318
+ load_curve_tasks = {}
1319
+ for i in range(num_batches):
1320
+ # Calculate how many buildings are in this batch
1321
+ start_idx = i * batch_size
1322
+ end_idx = min(start_idx + batch_size, len(bldg_ids))
1323
+ batch_count = end_idx - start_idx
1324
+
1325
+ console.print(f"[blue]Batch {i + 1}/{num_batches}: {batch_count} buildings[/blue]")
1326
+
1327
+ task_id = progress.add_task(
1328
+ f"[magenta]Batch {i + 1}/{num_batches} ({aggregate_time_step})",
1329
+ total=batch_count, # Set total to the number of buildings in this batch
1330
+ )
1331
+ load_curve_tasks[i] = task_id
1332
+
1333
+ return load_curve_tasks
1334
+
1335
+
1336
+ def _create_individual_progress_tasks(bldg_ids: list[BuildingID], progress: Progress) -> dict[int, TaskID]:
1337
+ """Create progress tasks for individual building processing."""
1338
+ load_curve_tasks = {}
1339
+ for i, bldg_id in enumerate(bldg_ids):
1340
+ task_id = progress.add_task(
1341
+ f"[magenta]Load curve {bldg_id.bldg_id} (upgrade {bldg_id.upgrade_id})",
1342
+ total=0, # Will be updated when we get the file size
1343
+ )
1344
+ load_curve_tasks[i] = task_id
1345
+ return load_curve_tasks
1346
+
1347
+
1348
+ def _download_aggregate_with_batch_progress(
1349
+ bldg_id: BuildingID, output_dir: Path, task_id: TaskID, aggregate_time_step: str, progress: Progress
1350
+ ) -> Path:
1351
+ """Download with batch progress tracking."""
1352
+ # Download the file without individual progress tracking
1353
+ result = download_aggregate_time_step_load_curve_with_progress(bldg_id, output_dir, None, None, aggregate_time_step)
1354
+ # Update batch progress by 1
1355
+ progress.update(task_id, advance=1)
1356
+ return result
1357
+
1358
+
1359
+ def _process_download_future(
1360
+ future: concurrent.futures.Future,
1361
+ bldg_id: BuildingID,
1362
+ output_dir: Path,
1363
+ aggregate_time_step: str,
1364
+ downloaded_paths: list[Path],
1365
+ failed_downloads: list[str],
1366
+ console: Console,
1367
+ ) -> None:
1368
+ """Process a completed download future."""
1369
+ try:
1370
+ output_file = future.result()
1371
+ downloaded_paths.append(output_file)
1372
+ except NoAggregateLoadCurveError:
1373
+ output_file = (
1374
+ output_dir
1375
+ / bldg_id.get_release_name()
1376
+ / "load_curve_monthly"
1377
+ / f"state={bldg_id.state}"
1378
+ / f"bldg{str(bldg_id.bldg_id).zfill(7)}_load_curve_monthly.parquet"
1379
+ )
1380
+ failed_downloads.append(str(output_file))
1381
+ console.print(f"[red]Monthly load curve not available for {bldg_id.get_release_name()}[/red]")
1382
+ raise
1383
+ except Exception as e:
1384
+ output_file = (
1385
+ output_dir
1386
+ / bldg_id.get_release_name()
1387
+ / "load_curve_monthly"
1388
+ / f"state={bldg_id.state}"
1389
+ / f"bldg{str(bldg_id.bldg_id).zfill(7)}_load_curve_monthly.parquet"
1390
+ )
1391
+ failed_downloads.append(str(output_file))
1392
+ console.print(f"[red]Download failed for monthly load curve {bldg_id.bldg_id}: {e}[/red]")
1393
+
1394
+
1395
+ def _download_aggregate_load_curves_parallel(
873
1396
  bldg_ids: list[BuildingID],
874
1397
  output_dir: Path,
1398
+ aggregate_time_step: str,
1399
+ max_workers: int,
875
1400
  progress: Progress,
876
1401
  downloaded_paths: list[Path],
1402
+ failed_downloads: list[str],
1403
+ console: Console,
1404
+ ) -> None:
1405
+ """Download monthly load curves in parallel with progress tracking."""
1406
+
1407
+ # Create progress tasks based on dataset size
1408
+ if len(bldg_ids) > 500:
1409
+ load_curve_tasks = _create_batch_progress_tasks(bldg_ids, aggregate_time_step, progress, console)
1410
+ else:
1411
+ load_curve_tasks = _create_individual_progress_tasks(bldg_ids, progress)
1412
+
1413
+ # Create download functions
1414
+ def download_aggregate_with_task_id(
1415
+ bldg_id: BuildingID, output_dir: Path, task_id: TaskID, aggregate_time_step: str
1416
+ ) -> Path:
1417
+ return download_aggregate_time_step_load_curve_with_progress(
1418
+ bldg_id, output_dir, progress, task_id, aggregate_time_step
1419
+ )
1420
+
1421
+ with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
1422
+ if len(bldg_ids) > 500:
1423
+ # Process in batches for large datasets
1424
+ batch_size = 100
1425
+ future_to_bldg = {}
1426
+
1427
+ for batch_idx in range(0, len(bldg_ids), batch_size):
1428
+ batch = bldg_ids[batch_idx : batch_idx + batch_size]
1429
+ task_id = load_curve_tasks[batch_idx // batch_size]
1430
+
1431
+ for bldg_id in batch:
1432
+ future = executor.submit(
1433
+ _download_aggregate_with_batch_progress,
1434
+ bldg_id,
1435
+ output_dir,
1436
+ task_id,
1437
+ aggregate_time_step,
1438
+ progress,
1439
+ )
1440
+ future_to_bldg[future] = bldg_id
1441
+ else:
1442
+ # Original behavior for smaller datasets
1443
+ future_to_bldg = {
1444
+ executor.submit(
1445
+ download_aggregate_with_task_id, bldg_id, output_dir, load_curve_tasks[i], aggregate_time_step
1446
+ ): bldg_id
1447
+ for i, bldg_id in enumerate(bldg_ids)
1448
+ }
1449
+
1450
+ # Process completed futures
1451
+ for future in concurrent.futures.as_completed(future_to_bldg):
1452
+ bldg_id = future_to_bldg[future]
1453
+ _process_download_future(
1454
+ future, bldg_id, output_dir, aggregate_time_step, downloaded_paths, failed_downloads, console
1455
+ )
1456
+
1457
+
1458
+ def _download_metadata(
1459
+ bldg_ids: list[BuildingID],
1460
+ output_dir: Path,
1461
+ progress: Progress,
1462
+ downloaded_paths: list[Path],
1463
+ failed_downloads: list[str],
1464
+ console: Console,
877
1465
  ) -> None:
878
1466
  """Download metadata file (only one needed per release)."""
879
1467
  if not bldg_ids:
880
1468
  return
881
-
882
- bldg = bldg_ids[0]
883
- metadata_file = _download_metadata_with_progress(bldg, output_dir, progress)
884
- downloaded_paths.append(metadata_file)
1469
+ _download_metadata_with_progress(bldg_ids, output_dir, progress, downloaded_paths, failed_downloads, console)
1470
+ _process_metadata_results(bldg_ids, output_dir, downloaded_paths)
885
1471
 
886
1472
 
887
1473
  def download_annual_load_curve_with_progress(
@@ -912,8 +1498,8 @@ def download_annual_load_curve_with_progress(
912
1498
  output_dir
913
1499
  / bldg_id.get_release_name()
914
1500
  / "load_curve_annual"
915
- / bldg_id.state
916
- / f"up{str(int(bldg_id.upgrade_id)).zfill(2)}"
1501
+ / f"state={bldg_id.state}"
1502
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
917
1503
  / output_filename
918
1504
  )
919
1505
 
@@ -927,7 +1513,7 @@ def download_annual_load_curve_with_progress(
927
1513
  if progress and task_id is not None:
928
1514
  _download_with_progress(download_url, output_file, progress, task_id)
929
1515
  else:
930
- response = requests.get(download_url, timeout=30)
1516
+ response = requests.get(download_url, timeout=30, verify=True)
931
1517
  response.raise_for_status()
932
1518
  with open(output_file, "wb") as file:
933
1519
  file.write(response.content)
@@ -974,8 +1560,9 @@ def _download_annual_load_curves_parallel(
974
1560
  output_dir
975
1561
  / bldg_id.get_release_name()
976
1562
  / "load_curve_annual"
977
- / bldg_id.state
978
- / f"bldg{str(bldg_id.bldg_id).zfill(7)}-up{str(int(bldg_id.upgrade_id)).zfill(2)}_load_curve_annual.parquet"
1563
+ / f"state={bldg_id.state}"
1564
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
1565
+ / f"bldg{str(bldg_id.bldg_id).zfill(7)}_load_curve_annual.parquet"
979
1566
  )
980
1567
  failed_downloads.append(str(output_file))
981
1568
  console.print(f"[red]Annual load curve not available for {bldg_id.get_release_name()}[/red]")
@@ -985,13 +1572,105 @@ def _download_annual_load_curves_parallel(
985
1572
  output_dir
986
1573
  / bldg_id.get_release_name()
987
1574
  / "load_curve_annual"
988
- / bldg_id.state
989
- / f"bldg{str(bldg_id.bldg_id).zfill(7)}-up{str(int(bldg_id.upgrade_id)).zfill(2)}_load_curve_annual.parquet"
1575
+ / f"state={bldg_id.state}"
1576
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
1577
+ / f"bldg{str(bldg_id.bldg_id).zfill(7)}_load_curve_annual.parquet"
990
1578
  )
991
1579
  failed_downloads.append(str(output_file))
992
1580
  console.print(f"[red]Download failed for annual load curve {bldg_id.bldg_id}: {e}[/red]")
993
1581
 
994
1582
 
1583
+ def _download_weather_files_parallel(
1584
+ bldg_ids: list[BuildingID],
1585
+ output_dir: Path,
1586
+ max_workers: int,
1587
+ progress: Progress,
1588
+ downloaded_paths: list[Path],
1589
+ failed_downloads: list[str],
1590
+ console: Console,
1591
+ weather_states: Union[list[str], None] = None,
1592
+ ) -> None:
1593
+ """Download weather files in parallel with progress tracking."""
1594
+ # Initialize weather_states to empty list if None
1595
+ if weather_states is None:
1596
+ weather_states = []
1597
+
1598
+ # Break if weather_states is empty
1599
+ if len(weather_states) == 0:
1600
+ for bldg_id in bldg_ids:
1601
+ output_file = (
1602
+ output_dir
1603
+ / bldg_id.get_release_name()
1604
+ / "weather"
1605
+ / f"state={bldg_id.state}"
1606
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
1607
+ / f"{bldg_id.get_weather_station_name()}.csv"
1608
+ )
1609
+ failed_downloads.append(str(output_file))
1610
+ console.print(f"[red]Weather file not available for {bldg_id.get_release_name()}[/red]")
1611
+ return
1612
+ # Create progress tasks for weather file downloads
1613
+ weather_file_tasks = {}
1614
+ for i, bldg_id in enumerate(bldg_ids):
1615
+ if bldg_id.state in weather_states:
1616
+ task_id = progress.add_task(
1617
+ f"[magenta]Weather file {bldg_id.bldg_id} (upgrade {bldg_id.upgrade_id})",
1618
+ total=0, # Will be updated when we get the file size
1619
+ )
1620
+ weather_file_tasks[i] = task_id
1621
+ else:
1622
+ output_file = (
1623
+ output_dir
1624
+ / bldg_id.get_release_name()
1625
+ / "weather"
1626
+ / f"state={bldg_id.state}"
1627
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
1628
+ / f"{bldg_id.get_weather_station_name()}.csv"
1629
+ )
1630
+ failed_downloads.append(str(output_file))
1631
+ console.print(f"[red]Weather file not available for {bldg_id.get_release_name()}[/red]")
1632
+
1633
+ with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
1634
+ # Create a modified version of the download function that uses the specific task IDs
1635
+ def download_weather_file_with_task_id(bldg_id: BuildingID, output_dir: Path, task_id: TaskID) -> Path:
1636
+ return download_weather_file_with_progress(bldg_id, output_dir, progress, task_id)
1637
+
1638
+ future_to_bldg = {
1639
+ executor.submit(download_weather_file_with_task_id, bldg_id, output_dir, weather_file_tasks[i]): bldg_id
1640
+ for i, bldg_id in enumerate(bldg_ids)
1641
+ }
1642
+
1643
+ for future in concurrent.futures.as_completed(future_to_bldg):
1644
+ bldg_id = future_to_bldg[future]
1645
+ try:
1646
+ output_file = future.result()
1647
+ downloaded_paths.append(output_file)
1648
+ except NoWeatherFileError:
1649
+ output_file = (
1650
+ output_dir
1651
+ / bldg_id.get_release_name()
1652
+ / "weather"
1653
+ / f"state={bldg_id.state}"
1654
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
1655
+ / f"{bldg_id.get_weather_station_name()}.csv"
1656
+ )
1657
+ failed_downloads.append(str(output_file))
1658
+ console.print(f"[red]Weather file not available for {bldg_id.get_release_name()}[/red]")
1659
+ raise
1660
+ except Exception as e:
1661
+ output_file = (
1662
+ output_dir
1663
+ / bldg_id.get_release_name()
1664
+ / "weather"
1665
+ / f"state={bldg_id.state}"
1666
+ / f"upgrade={str(int(bldg_id.upgrade_id)).zfill(2)}"
1667
+ / f"{bldg_id.get_weather_station_name()}.csv"
1668
+ )
1669
+ failed_downloads.append(str(output_file))
1670
+ console.print(f"[red]Download failed for weather file {bldg_id.bldg_id}: {e}[/red]")
1671
+ raise
1672
+
1673
+
995
1674
  def _print_download_summary(downloaded_paths: list[Path], failed_downloads: list[str], console: Console) -> None:
996
1675
  """Print a summary of the download results."""
997
1676
  console.print("\n[bold green]Download complete![/bold green]")
@@ -1003,7 +1682,11 @@ def _print_download_summary(downloaded_paths: list[Path], failed_downloads: list
1003
1682
 
1004
1683
 
1005
1684
  def fetch_bldg_data(
1006
- bldg_ids: list[BuildingID], file_type: tuple[str, ...], output_dir: Path, max_workers: int = 5
1685
+ bldg_ids: list[BuildingID],
1686
+ file_type: tuple[str, ...],
1687
+ output_dir: Path,
1688
+ max_workers: int = 5,
1689
+ weather_states: Union[list[str], None] = None,
1007
1690
  ) -> tuple[list[Path], list[str]]:
1008
1691
  """Download building data for a given list of building ids
1009
1692
 
@@ -1018,17 +1701,27 @@ def fetch_bldg_data(
1018
1701
  file_type_obj = _parse_requested_file_type(file_type)
1019
1702
  console = Console()
1020
1703
 
1704
+ # Initialize weather_states to empty list if None
1705
+ if weather_states is None:
1706
+ weather_states = []
1707
+
1021
1708
  downloaded_paths: list[Path] = []
1022
1709
  failed_downloads: list[str] = []
1023
1710
 
1024
1711
  # Calculate total files to download
1025
- total_files = len(bldg_ids)
1712
+ total_files = 0
1026
1713
  if file_type_obj.metadata:
1027
- total_files += 1 # Add metadata file
1714
+ unique_metadata_urls = _resolve_unique_metadata_urls(bldg_ids)
1715
+ total_files += len(unique_metadata_urls) # Add metadata file
1028
1716
  if file_type_obj.load_curve_15min:
1029
1717
  total_files += len(bldg_ids) # Add 15-minute load curve files
1718
+ if file_type_obj.load_curve_monthly:
1719
+ total_files += len(bldg_ids) # Add 15-minute load curve files
1030
1720
  if file_type_obj.load_curve_annual:
1031
1721
  total_files += len(bldg_ids) # Add annual load curve files
1722
+ if file_type_obj.weather:
1723
+ available_bldg_ids = [bldg_id for bldg_id in bldg_ids if bldg_id.state in weather_states]
1724
+ total_files += len(available_bldg_ids) * len(weather_states) # Add weather map files
1032
1725
 
1033
1726
  console.print(f"\n[bold blue]Starting download of {total_files} files...[/bold blue]")
1034
1727
 
@@ -1046,53 +1739,90 @@ def fetch_bldg_data(
1046
1739
  console=console,
1047
1740
  transient=False,
1048
1741
  ) as progress:
1049
- # Download building data if requested.
1050
- if file_type_obj.hpxml or file_type_obj.schedule:
1051
- _download_building_data_parallel(
1052
- bldg_ids, file_type_obj, output_dir, max_workers, progress, downloaded_paths, failed_downloads, console
1053
- )
1742
+ _execute_downloads(
1743
+ file_type_obj,
1744
+ bldg_ids,
1745
+ output_dir,
1746
+ max_workers,
1747
+ progress,
1748
+ downloaded_paths,
1749
+ failed_downloads,
1750
+ console,
1751
+ weather_states,
1752
+ )
1054
1753
 
1055
- # Get metadata if requested. Only one building is needed to get the metadata.
1056
- if file_type_obj.metadata:
1057
- _download_metadata_single(bldg_ids, output_dir, progress, downloaded_paths)
1754
+ _print_download_summary(downloaded_paths, failed_downloads, console)
1058
1755
 
1059
- # Get 15 min load profile timeseries if requested.
1060
- if file_type_obj.load_curve_15min:
1061
- _download_15min_load_curves_parallel(
1062
- bldg_ids, output_dir, max_workers, progress, downloaded_paths, failed_downloads, console
1063
- )
1756
+ return downloaded_paths, failed_downloads
1064
1757
 
1065
- # Get annual load curve if requested.
1066
- if file_type_obj.load_curve_annual:
1067
- _download_annual_load_curves_parallel(
1068
- bldg_ids, output_dir, max_workers, progress, downloaded_paths, failed_downloads, console
1069
- )
1070
1758
 
1071
- _print_download_summary(downloaded_paths, failed_downloads, console)
1759
+ def _execute_downloads(
1760
+ file_type_obj: RequestedFileTypes,
1761
+ bldg_ids: list[BuildingID],
1762
+ output_dir: Path,
1763
+ max_workers: int,
1764
+ progress: Progress,
1765
+ downloaded_paths: list[Path],
1766
+ failed_downloads: list[str],
1767
+ console: Console,
1768
+ weather_states: Union[list[str], None] = None,
1769
+ ) -> None:
1770
+ """Execute all requested downloads based on file type configuration."""
1771
+ # Initialize weather_states to empty list if None
1772
+ if weather_states is None:
1773
+ weather_states = []
1774
+
1775
+ # Download building data if requested.
1776
+ if file_type_obj.hpxml or file_type_obj.schedule:
1777
+ _download_building_data_parallel(
1778
+ bldg_ids, file_type_obj, output_dir, max_workers, progress, downloaded_paths, failed_downloads, console
1779
+ )
1072
1780
 
1073
- return downloaded_paths, failed_downloads
1781
+ # Get metadata if requested. Only one building is needed to get the metadata.
1782
+ if file_type_obj.metadata:
1783
+ _download_metadata(bldg_ids, output_dir, progress, downloaded_paths, failed_downloads, console)
1784
+
1785
+ # Get 15 min load profile timeseries if requested.
1786
+ if file_type_obj.load_curve_15min:
1787
+ _download_15min_load_curves_parallel(
1788
+ bldg_ids, output_dir, max_workers, progress, downloaded_paths, failed_downloads, console
1789
+ )
1790
+
1791
+ if file_type_obj.load_curve_monthly:
1792
+ aggregate_time_step = "monthly"
1793
+ _download_aggregate_load_curves_parallel(
1794
+ bldg_ids,
1795
+ output_dir,
1796
+ aggregate_time_step,
1797
+ max_workers,
1798
+ progress,
1799
+ downloaded_paths,
1800
+ failed_downloads,
1801
+ console,
1802
+ )
1803
+
1804
+ # Get annual load curve if requested.
1805
+ if file_type_obj.load_curve_annual:
1806
+ _download_annual_load_curves_parallel(
1807
+ bldg_ids, output_dir, max_workers, progress, downloaded_paths, failed_downloads, console
1808
+ )
1809
+
1810
+ # Get weather files if requested.
1811
+ if file_type_obj.weather:
1812
+ _download_weather_files_parallel(
1813
+ bldg_ids, output_dir, max_workers, progress, downloaded_paths, failed_downloads, console, weather_states
1814
+ )
1074
1815
 
1075
1816
 
1076
1817
  if __name__ == "__main__": # pragma: no cover
1077
1818
  bldg_ids = [
1078
1819
  BuildingID(
1079
- bldg_id=5634,
1080
- release_year="2024",
1081
- res_com="comstock",
1082
- weather="amy2018",
1083
- upgrade_id="1",
1084
- release_number="2",
1085
- state="AL",
1086
- ),
1087
- BuildingID(
1088
- bldg_id=78270,
1089
- release_year="2024",
1090
- res_com="comstock",
1091
- weather="amy2018",
1092
- upgrade_id="1",
1093
- release_number="2",
1094
- state="DE",
1820
+ bldg_id=67, release_year="2024", res_com="comstock", weather="tmy3", upgrade_id="0", release_number="2"
1095
1821
  ),
1096
1822
  ]
1097
- for bldg_id in bldg_ids:
1098
- print(bldg_id._get_county_name())
1823
+ file_type = ("weather",)
1824
+ output_dir = Path("data")
1825
+ weather_states: list[str] = []
1826
+ downloaded_paths, failed_downloads = fetch_bldg_data(bldg_ids, file_type, output_dir, weather_states=weather_states)
1827
+ print(downloaded_paths)
1828
+ print(failed_downloads)