buildz 0.6.59__tar.gz → 0.6.60__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (354) hide show
  1. {buildz-0.6.59/buildz.egg-info → buildz-0.6.60}/PKG-INFO +1 -1
  2. {buildz-0.6.59 → buildz-0.6.60}/buildz/__init__.py +1 -1
  3. buildz-0.6.60/buildz/gpuz/test/report.txt +50 -0
  4. buildz-0.6.60/buildz/gpuz/test/test_middle.py +118 -0
  5. buildz-0.6.60/buildz/gpuz/test/test_middle_conv.py +118 -0
  6. buildz-0.6.60/buildz/gpuz/test/test_middle_conv1.py +120 -0
  7. buildz-0.6.60/buildz/gpuz/torch/__init__.py +8 -0
  8. buildz-0.6.60/buildz/gpuz/torch/middlez.py +181 -0
  9. {buildz-0.6.59 → buildz-0.6.60/buildz.egg-info}/PKG-INFO +1 -1
  10. {buildz-0.6.59 → buildz-0.6.60}/buildz.egg-info/SOURCES.txt +6 -0
  11. {buildz-0.6.59 → buildz-0.6.60}/setup.py +1 -1
  12. {buildz-0.6.59 → buildz-0.6.60}/LICENSE +0 -0
  13. {buildz-0.6.59 → buildz-0.6.60}/MANIFEST.in +0 -0
  14. {buildz-0.6.59 → buildz-0.6.60}/README.md +0 -0
  15. {buildz-0.6.59 → buildz-0.6.60}/buildz/__main__.py +0 -0
  16. {buildz-0.6.59 → buildz-0.6.60}/buildz/argx.py +0 -0
  17. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz/__init__.py +0 -0
  18. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz/argz.py +0 -0
  19. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz/callz.py +0 -0
  20. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz/conf_argz.py +0 -0
  21. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz/conf_callz.py +0 -0
  22. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz/evalx.py +0 -0
  23. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz/init.py +0 -0
  24. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz/test.py +0 -0
  25. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz/test_call.py +0 -0
  26. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz_bk/__init__.py +0 -0
  27. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz_bk/argz.py +0 -0
  28. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz_bk/build.py +0 -0
  29. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz_bk/callz.py +0 -0
  30. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz_bk/evalx.py +0 -0
  31. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz_bk/test.py +0 -0
  32. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz_bk/test_obj.py +0 -0
  33. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz_bk/tests/conf.js +0 -0
  34. {buildz-0.6.59 → buildz-0.6.60}/buildz/argz_bk/testx.py +0 -0
  35. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/__init__.py +0 -0
  36. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/cache.py +0 -0
  37. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/config.py +0 -0
  38. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/dbs.py +0 -0
  39. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/deal.py +0 -0
  40. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/deal_list.py +0 -0
  41. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/deal_type.py +0 -0
  42. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/defs.py +0 -0
  43. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/factory.py +0 -0
  44. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/init.py +0 -0
  45. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/log.py +0 -0
  46. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/request.py +0 -0
  47. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/run.py +0 -0
  48. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/save.py +0 -0
  49. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/test/res/cache/cache.js +0 -0
  50. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/test/res/config/base.js +0 -0
  51. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/test/res/config/config.js +0 -0
  52. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/test/res/data/fp.js +0 -0
  53. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/test/res/data/item1.js +0 -0
  54. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/test/res/data/item2.js +0 -0
  55. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/test/res/data/test.js +0 -0
  56. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/test/test.py +0 -0
  57. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/test/xtest.py +0 -0
  58. {buildz-0.6.59 → buildz-0.6.60}/buildz/auto/verify.py +0 -0
  59. {buildz-0.6.59 → buildz-0.6.60}/buildz/base.py +0 -0
  60. {buildz-0.6.59 → buildz-0.6.60}/buildz/cachez/__init__.py +0 -0
  61. {buildz-0.6.59 → buildz-0.6.60}/buildz/cachez/cache.py +0 -0
  62. {buildz-0.6.59 → buildz-0.6.60}/buildz/cmd.py +0 -0
  63. {buildz-0.6.59 → buildz-0.6.60}/buildz/confz.py +0 -0
  64. {buildz-0.6.59 → buildz-0.6.60}/buildz/db/__init__.py +0 -0
  65. {buildz-0.6.59 → buildz-0.6.60}/buildz/db/__main__.py +0 -0
  66. {buildz-0.6.59 → buildz-0.6.60}/buildz/db/dv/__init__.py +0 -0
  67. {buildz-0.6.59 → buildz-0.6.60}/buildz/db/dv/basez.py +0 -0
  68. {buildz-0.6.59 → buildz-0.6.60}/buildz/db/dv/clickhousez.py +0 -0
  69. {buildz-0.6.59 → buildz-0.6.60}/buildz/db/dv/lib/readme +0 -0
  70. {buildz-0.6.59 → buildz-0.6.60}/buildz/db/dv/mysqlz.py +0 -0
  71. {buildz-0.6.59 → buildz-0.6.60}/buildz/db/dv/oraclez.py +0 -0
  72. {buildz-0.6.59 → buildz-0.6.60}/buildz/db/dv/orm.py +0 -0
  73. {buildz-0.6.59 → buildz-0.6.60}/buildz/db/dv/postgresqlz.py +0 -0
  74. {buildz-0.6.59 → buildz-0.6.60}/buildz/db/dv/sqlite3z.py +0 -0
  75. {buildz-0.6.59 → buildz-0.6.60}/buildz/db/dv/structz.py +0 -0
  76. {buildz-0.6.59 → buildz-0.6.60}/buildz/db/install.txt +0 -0
  77. {buildz-0.6.59 → buildz-0.6.60}/buildz/db/run.conf +0 -0
  78. {buildz-0.6.59 → buildz-0.6.60}/buildz/db/runz.py +0 -0
  79. {buildz-0.6.59 → buildz-0.6.60}/buildz/db/tls.py +0 -0
  80. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/ioc/deal.py +0 -0
  81. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/ioc/help.py +0 -0
  82. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/myers/deal.py +0 -0
  83. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/myers/help.py +0 -0
  84. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/res/conf/ioc.js +0 -0
  85. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/res/conf/main.js +0 -0
  86. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/res/conf/myers.js +0 -0
  87. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/res/conf/search.js +0 -0
  88. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/res/conf/xf.js +0 -0
  89. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/res/help/default.js +0 -0
  90. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/res/help/ioc.js +0 -0
  91. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/res/help/myers.js +0 -0
  92. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/res/help/search.js +0 -0
  93. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/res/help/xf.js +0 -0
  94. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/res/test.js +0 -0
  95. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/search/deal.py +0 -0
  96. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/search/help.py +0 -0
  97. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/test.py +0 -0
  98. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/xf/deal.py +0 -0
  99. {buildz-0.6.59 → buildz-0.6.60}/buildz/demo/xf/help.py +0 -0
  100. {buildz-0.6.59 → buildz-0.6.60}/buildz/dz/__init__.py +0 -0
  101. {buildz-0.6.59 → buildz-0.6.60}/buildz/dz/mapz.py +0 -0
  102. {buildz-0.6.59 → buildz-0.6.60}/buildz/evalz/__init__.py +0 -0
  103. {buildz-0.6.59 → buildz-0.6.60}/buildz/evalz/evalz.py +0 -0
  104. {buildz-0.6.59 → buildz-0.6.60}/buildz/evalz/res/default.js +0 -0
  105. {buildz-0.6.59 → buildz-0.6.60}/buildz/evalz/test.py +0 -0
  106. {buildz-0.6.59 → buildz-0.6.60}/buildz/fz/__init__.py +0 -0
  107. {buildz-0.6.59 → buildz-0.6.60}/buildz/fz/dirz.py +0 -0
  108. {buildz-0.6.59 → buildz-0.6.60}/buildz/fz/fhs.py +0 -0
  109. {buildz-0.6.59 → buildz-0.6.60}/buildz/fz/fio.py +0 -0
  110. {buildz-0.6.59 → buildz-0.6.60}/buildz/fz/lsf.py +0 -0
  111. {buildz-0.6.59 → buildz-0.6.60}/buildz/html/__init__.py +0 -0
  112. {buildz-0.6.59 → buildz-0.6.60}/buildz/html/test/demo.html +0 -0
  113. {buildz-0.6.59 → buildz-0.6.60}/buildz/html/test/test.py +0 -0
  114. {buildz-0.6.59 → buildz-0.6.60}/buildz/html/xml.py +0 -0
  115. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/__init__.py +0 -0
  116. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/base.py +0 -0
  117. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/init.py +0 -0
  118. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc/base.py +0 -0
  119. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc/conf.py +0 -0
  120. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc/confs.py +0 -0
  121. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc/decorator.py +0 -0
  122. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc/loads.py +0 -0
  123. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc/single.py +0 -0
  124. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/base.py +0 -0
  125. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/branch.py +0 -0
  126. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/call.py +0 -0
  127. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/calls.py +0 -0
  128. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/branch_lists.js +0 -0
  129. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/call_defaults.js +0 -0
  130. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/call_lists.js +0 -0
  131. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/calls_defaults.js +0 -0
  132. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/calls_lists.js +0 -0
  133. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/deal_lists.js +0 -0
  134. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/deals.js +0 -0
  135. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/env_lists.js +0 -0
  136. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/ioc_lists.js +0 -0
  137. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/iocf_lists.js +0 -0
  138. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/join_lists.js +0 -0
  139. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/list_lists.js +0 -0
  140. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/map_lists.js +0 -0
  141. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/mcall_defaults.js +0 -0
  142. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/mcall_lists.js +0 -0
  143. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/obj_cst_lists.js +0 -0
  144. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/obj_defaults.js +0 -0
  145. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/obj_lists.js +0 -0
  146. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/obj_set_lists.js +0 -0
  147. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/ovar_lists.js +0 -0
  148. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/ref_lists.js +0 -0
  149. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/refs_lists.js +0 -0
  150. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/var_lists.js +0 -0
  151. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/xfile_defaults.js +0 -0
  152. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/conf/xfile_lists.js +0 -0
  153. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/deal.py +0 -0
  154. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/demo.py +0 -0
  155. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/env.py +0 -0
  156. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/init.py +0 -0
  157. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/ioc.py +0 -0
  158. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/iocf.py +0 -0
  159. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/join.py +0 -0
  160. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/list.py +0 -0
  161. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/map.py +0 -0
  162. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/mcall.py +0 -0
  163. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/obj.py +0 -0
  164. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/ovar.py +0 -0
  165. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/ref.py +0 -0
  166. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/refs.py +0 -0
  167. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/val.py +0 -0
  168. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/var.py +0 -0
  169. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/ioc_deal/xfile.py +0 -0
  170. {buildz-0.6.59 → buildz-0.6.60}/buildz/ioc/wrap.py +0 -0
  171. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/__init__.py +0 -0
  172. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/base.py +0 -0
  173. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/conf/base.py +0 -0
  174. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/conf/conf.py +0 -0
  175. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/conf/mg.py +0 -0
  176. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/conf/unit.py +0 -0
  177. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/conf/up.py +0 -0
  178. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/conf_deal/attr.py +0 -0
  179. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/conf_deal/base.py +0 -0
  180. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/conf_deal/call.py +0 -0
  181. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/conf_deal/cvar.py +0 -0
  182. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/conf_deal/deal.py +0 -0
  183. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/conf_deal/env.py +0 -0
  184. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/conf_deal/ioc.py +0 -0
  185. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/conf_deal/method.py +0 -0
  186. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/conf_deal/obj.py +0 -0
  187. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/conf_deal/ref.py +0 -0
  188. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/conf_deal/val.py +0 -0
  189. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/init.py +0 -0
  190. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/__init__.py +0 -0
  191. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/base.py +0 -0
  192. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/builds.py +0 -0
  193. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/confs.py +0 -0
  194. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/datas.py +0 -0
  195. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/dataset.py +0 -0
  196. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/encapes.py +0 -0
  197. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/envs.py +0 -0
  198. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/ids.py +0 -0
  199. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/init.py +0 -0
  200. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/mg.py +0 -0
  201. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/single.py +0 -0
  202. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/sys_envs.py +0 -0
  203. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/tdata.py +0 -0
  204. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/tdict.py +0 -0
  205. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/unit.py +0 -0
  206. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc/vars.py +0 -0
  207. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc_deal/base.py +0 -0
  208. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc_deal/deal.py +0 -0
  209. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc_deal/ioc.py +0 -0
  210. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc_deal/obj.py +0 -0
  211. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc_deal/ref.py +0 -0
  212. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/ioc_deal/val.py +0 -0
  213. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/test/test.py +0 -0
  214. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/wrap/base.py +0 -0
  215. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/wrap/default_wraps.py +0 -0
  216. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/wrap/env.py +0 -0
  217. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/wrap/obj.py +0 -0
  218. {buildz-0.6.59 → buildz-0.6.60}/buildz/iocz/wrap/wraps.py +0 -0
  219. {buildz-0.6.59 → buildz-0.6.60}/buildz/logz.py +0 -0
  220. {buildz-0.6.59 → buildz-0.6.60}/buildz/netz/__init__.py +0 -0
  221. {buildz-0.6.59 → buildz-0.6.60}/buildz/netz/mhttp/__init__.py +0 -0
  222. {buildz-0.6.59 → buildz-0.6.60}/buildz/netz/mhttp/caps.py +0 -0
  223. {buildz-0.6.59 → buildz-0.6.60}/buildz/netz/mhttp/gateway.py +0 -0
  224. {buildz-0.6.59 → buildz-0.6.60}/buildz/netz/mhttp/mhttp.py +0 -0
  225. {buildz-0.6.59 → buildz-0.6.60}/buildz/netz/mhttp/mhttps.py +0 -0
  226. {buildz-0.6.59 → buildz-0.6.60}/buildz/netz/mhttp/proxy.py +0 -0
  227. {buildz-0.6.59 → buildz-0.6.60}/buildz/netz/mhttp/record.py +0 -0
  228. {buildz-0.6.59 → buildz-0.6.60}/buildz/netz/sslz/__init__.py +0 -0
  229. {buildz-0.6.59 → buildz-0.6.60}/buildz/netz/sslz/gen.py +0 -0
  230. {buildz-0.6.59 → buildz-0.6.60}/buildz/netz/sslz/gen.pyi +0 -0
  231. {buildz-0.6.59 → buildz-0.6.60}/buildz/netz/test/__main__.py +0 -0
  232. {buildz-0.6.59 → buildz-0.6.60}/buildz/netz/test/test.py +0 -0
  233. {buildz-0.6.59 → buildz-0.6.60}/buildz/netz/test/test_cli.py +0 -0
  234. {buildz-0.6.59 → buildz-0.6.60}/buildz/netz/test/test_gw.py +0 -0
  235. {buildz-0.6.59 → buildz-0.6.60}/buildz/pathz.py +0 -0
  236. {buildz-0.6.59 → buildz-0.6.60}/buildz/pyz.py +0 -0
  237. {buildz-0.6.59 → buildz-0.6.60}/buildz/tls.py +0 -0
  238. {buildz-0.6.59 → buildz-0.6.60}/buildz/tools.py +0 -0
  239. {buildz-0.6.59 → buildz-0.6.60}/buildz/tz/__init__.py +0 -0
  240. {buildz-0.6.59 → buildz-0.6.60}/buildz/tz/myers_diff.py +0 -0
  241. {buildz-0.6.59 → buildz-0.6.60}/buildz/tz/test_xfind.py +0 -0
  242. {buildz-0.6.59 → buildz-0.6.60}/buildz/tz/time/__init__.py +0 -0
  243. {buildz-0.6.59 → buildz-0.6.60}/buildz/tz/time/timez.py +0 -0
  244. {buildz-0.6.59 → buildz-0.6.60}/buildz/tz/tio/__init__.py +0 -0
  245. {buildz-0.6.59 → buildz-0.6.60}/buildz/tz/tio/base.py +0 -0
  246. {buildz-0.6.59 → buildz-0.6.60}/buildz/tz/tio/getch.py +0 -0
  247. {buildz-0.6.59 → buildz-0.6.60}/buildz/tz/tio/lx.py +0 -0
  248. {buildz-0.6.59 → buildz-0.6.60}/buildz/tz/tio/test.py +0 -0
  249. {buildz-0.6.59 → buildz-0.6.60}/buildz/tz/tio/win.py +0 -0
  250. {buildz-0.6.59 → buildz-0.6.60}/buildz/tz/xfind.py +0 -0
  251. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/__init__.py +0 -0
  252. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/__main__.py +0 -0
  253. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/base.py +0 -0
  254. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/code.py +0 -0
  255. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/code_modify.py +0 -0
  256. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/copy_old.py +0 -0
  257. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/file.py +0 -0
  258. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loader/base.py +0 -0
  259. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loader/buffer.py +0 -0
  260. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loader/deal/listz.py +0 -0
  261. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loader/deal/lr.py +0 -0
  262. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loader/deal/lrval.py +0 -0
  263. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loader/deal/mapz.py +0 -0
  264. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loader/deal/nextz.py +0 -0
  265. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loader/deal/reval.py +0 -0
  266. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loader/deal/setz.py +0 -0
  267. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loader/deal/spc.py +0 -0
  268. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loader/deal/spt.py +0 -0
  269. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loader/deal/strz.py +0 -0
  270. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loader/exp.py +0 -0
  271. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loader/item.py +0 -0
  272. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loader/mg.py +0 -0
  273. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loader/pos.py +0 -0
  274. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/base.py +0 -0
  275. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/buffer.py +0 -0
  276. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/deal/listmapz.py +0 -0
  277. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/deal/listz.py +0 -0
  278. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/deal/lr.py +0 -0
  279. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/deal/lrval.py +0 -0
  280. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/deal/mapz.py +0 -0
  281. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/deal/nextz.py +0 -0
  282. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/deal/reval.py +0 -0
  283. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/deal/setz.py +0 -0
  284. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/deal/spc.py +0 -0
  285. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/deal/spt.py +0 -0
  286. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/deal/strz.py +0 -0
  287. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/deal/strz_new.py +0 -0
  288. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/deal/strz_old.py +0 -0
  289. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/exp.py +0 -0
  290. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/item.py +0 -0
  291. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/mg.py +0 -0
  292. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/pos.py +0 -0
  293. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/test.py +0 -0
  294. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz/test1.py +0 -0
  295. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/base.py +0 -0
  296. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/buffer.py +0 -0
  297. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/deal/listmapz.py +0 -0
  298. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/deal/listz.py +0 -0
  299. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/deal/lr.py +0 -0
  300. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/deal/lrval.py +0 -0
  301. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/deal/mapz.py +0 -0
  302. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/deal/nextz.py +0 -0
  303. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/deal/reval.py +0 -0
  304. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/deal/setz.py +0 -0
  305. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/deal/spc.py +0 -0
  306. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/deal/spt.py +0 -0
  307. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/deal/strz.py +0 -0
  308. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/deal/strz_new.py +0 -0
  309. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/deal/strz_old.py +0 -0
  310. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/exp.py +0 -0
  311. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/item.py +0 -0
  312. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/mg.py +0 -0
  313. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/pos.py +0 -0
  314. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/test.py +0 -0
  315. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/loaderz_nexp/test1.py +0 -0
  316. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/mapz.py +0 -0
  317. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/read.py +0 -0
  318. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/readz.py +0 -0
  319. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/readz_nexp.py +0 -0
  320. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/stack.py +0 -0
  321. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/write.py +0 -0
  322. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writer/base.py +0 -0
  323. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writer/conf.py +0 -0
  324. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writer/deal/jsonval.py +0 -0
  325. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writer/deal/listmapz.py +0 -0
  326. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writer/deal/listz.py +0 -0
  327. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writer/deal/mapz.py +0 -0
  328. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writer/deal/reval.py +0 -0
  329. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writer/deal/strz.py +0 -0
  330. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writer/itemz.py +0 -0
  331. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writer/mg.py +0 -0
  332. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writerz/base.py +0 -0
  333. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writerz/conf.py +0 -0
  334. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writerz/deal/jsonval.py +0 -0
  335. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writerz/deal/listmapz.py +0 -0
  336. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writerz/deal/listz.py +0 -0
  337. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writerz/deal/mapz.py +0 -0
  338. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writerz/deal/reval.py +0 -0
  339. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writerz/deal/strz.py +0 -0
  340. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writerz/itemz.py +0 -0
  341. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writerz/mg.py +0 -0
  342. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writerz/test.py +0 -0
  343. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writerz/testx.py +0 -0
  344. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/writez.py +0 -0
  345. {buildz-0.6.59 → buildz-0.6.60}/buildz/xf/xargs.py +0 -0
  346. {buildz-0.6.59 → buildz-0.6.60}/buildz/xz/__init__.py +0 -0
  347. {buildz-0.6.59 → buildz-0.6.60}/buildz/xz/conf.js +0 -0
  348. {buildz-0.6.59 → buildz-0.6.60}/buildz/xz/data.js +0 -0
  349. {buildz-0.6.59 → buildz-0.6.60}/buildz/xz/test.py +0 -0
  350. {buildz-0.6.59 → buildz-0.6.60}/buildz/xz/trs.py +0 -0
  351. {buildz-0.6.59 → buildz-0.6.60}/buildz.egg-info/dependency_links.txt +0 -0
  352. {buildz-0.6.59 → buildz-0.6.60}/buildz.egg-info/top_level.txt +0 -0
  353. {buildz-0.6.59 → buildz-0.6.60}/notes/notes.txt +0 -0
  354. {buildz-0.6.59 → buildz-0.6.60}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: buildz
3
- Version: 0.6.59
3
+ Version: 0.6.60
4
4
  Summary: 配置读写(基于json格式进行简化)、ioc、以及其他工具代码。a json-base file format's read and write code by python, and codes to read and product object from configure file in such format(ioc), and other tool codes
5
5
  Home-page: https://github.com/buildCodeZ/buildz
6
6
  Author: Zzz
@@ -1,6 +1,6 @@
1
1
  #coding=utf-8
2
2
 
3
- __version__="0.6.59"
3
+ __version__="0.6.60"
4
4
 
5
5
  # 小号多
6
6
  __author__ = "Zzz, emails: 1174534295@qq.com, 1309458652@qq.com"
@@ -0,0 +1,50 @@
1
+
2
+ 测试结果:
3
+ 结论是:大概能达到纯显卡(75W,更高的没测)二分之一或三分之一的性能,显存不够的可以用用,主要是卷积层,卷积层cpu计算耗时很大,并且神经网络参数相对较少,线性层感觉cpu和显卡本来就差不太多
4
+
5
+ 环境:
6
+ 笔记本(游戏本)
7
+ 显卡: RTX4060 8GB 75W残血版
8
+ CPU: i7-13700H
9
+ 内存: 16GB
10
+
11
+ 1)线性层测试:
12
+ 模型:
13
+ float32,120层,每层的输入输出维度都是2000,数据量1.789GB,每个线性层后面加一个LeakyReLU激活函数
14
+ 数据:
15
+ 60个数据,每个数据2000维度,分成两个批量(每个批量30个数据),数据量0.000447GB
16
+
17
+ 训练一次(两个批量的数据)平均耗时
18
+ 纯显卡: 4.13秒
19
+ 纯CPU耗时: 14.7秒
20
+ 模型分层,轮流传显卡里计算: 3.67秒(因为显存足够,实际上等于纯显卡)
21
+ 模型分层,显卡已经被使用2.3GB: 6.5秒
22
+ 模型分层,显卡已经被使用3.9GB: 9.5秒
23
+
24
+ 2)卷积测试:
25
+ 模型:
26
+ float32,60层,卷积核输入输出channel都是3,卷积核大小5,数据量0.00005GB,每个线性层后面加一个LeakyReLU激活函数
27
+ 数据:
28
+ 20个数据,每个数据3个通道*512宽*512高,分成两个批量(每个批量10个数据),数据量0.059GB
29
+
30
+ 训练一次(两个批量的数据)平均耗时
31
+ 纯显卡: 1.52秒
32
+ 纯CPU耗时: 41秒
33
+ 模型分层,轮流传显卡里计算: 3.36秒(因为显存足够,实际上等于纯显卡)
34
+ 纯显卡,显卡已经被使用3.9GB: 23秒(cuda+torch奇葩的地方出现了,猜测是在显存不够的时候,舍弃一些计算反向梯度要用的缓存,反向梯度的时候重新计算,导致时间开销增大很多)
35
+ 模型分层,显卡已经被使用3.9GB: 3.3秒
36
+
37
+
38
+
39
+ 3)卷积测试1(test_middle_conv1.py):
40
+ 模型:
41
+ float32,22层网络,卷积核大小5,每层网络两层卷积(30通道到60通道一层卷积,60通道回到30通道一层卷积)再加一个LeakyReLU激活函数,数据量0.0074GB,每个线性层后面加一个LeakyReLU激活函数
42
+ 数据:
43
+ 6个数据,每个数据30个通道*512宽*512高,分成三个批量(每个批量2个数据),数据量0.059GB
44
+
45
+ 训练一次(两个批量的数据)平均耗时
46
+ 纯显卡: 3.73秒
47
+ 纯CPU耗时: 44秒
48
+ 模型分层,轮流传显卡里计算: 7.5秒(因为显存足够,实际上等于纯显卡)
49
+ 纯显卡,显卡已经被使用3.9GB: 116秒
50
+ 模型分层,显卡已经被使用3.9GB: 7.7秒
@@ -0,0 +1,118 @@
1
+ #
2
+
3
+ from buildz.gpuz.torch import CacheModel
4
+ import sys
5
+ import torch,time
6
+ from torch import nn,optim
7
+ from torch.utils.data import DataLoader, Dataset
8
+ cpu = torch.device('cpu')
9
+ cuda = cpu
10
+ if torch.cuda.is_available():
11
+ cuda = torch.device('cuda')
12
+ def sz(tensor):
13
+ return tensor.element_size()*tensor.nelement()
14
+ class Model(nn.Module):
15
+ def __init__(self, dims, num):
16
+ super().__init__()
17
+ #nets = [nn.Conv2d(3, 3, 5, padding=2) for i in range(num)]
18
+ nets=[]
19
+ for i in range(num):
20
+ nets.append(nn.Linear(dims,dims))
21
+ nets.append(nn.LeakyReLU())
22
+ #nets = [nn.Linear(dims,dims) for i in range(num)]
23
+ print(f"nets:{len(nets)}")
24
+ self.nets = nn.Sequential(*nets)
25
+ def forward(self, inputs):
26
+ return self.nets(inputs)
27
+ def size(self):
28
+ total = 0.0
29
+ for net in self.nets:
30
+ if not hasattr(net, "weight"):
31
+ continue
32
+ w = net.weight
33
+ b = net.bias
34
+ total+=sz(w)+sz(b)
35
+ return total
36
+
37
+ pass
38
+ class TestDataset(Dataset):
39
+ def __init__(self, n, dims):
40
+ self.n = n
41
+ self.dims = dims
42
+ self.datas = torch.rand(n, dims)
43
+ print(f"data size: {sz(self.datas)/1024/1024/1024} GB")
44
+ def __len__(self):
45
+ return self.n
46
+ def __getitem__(self, i):
47
+ return self.datas[i]
48
+ return torch.rand(self.dims)
49
+
50
+ pass
51
+ def fc_opt(net, opt):
52
+ #torch.nn.utils.clip_grad_norm_(net.parameters(), max_norm=1.0)
53
+ opt.step()
54
+ def test():
55
+ nets=10
56
+ dims=2000
57
+ trains = 5
58
+ datas = 60
59
+ batch=30
60
+ lr=0.0001
61
+ win_size=3
62
+ num = 12
63
+ mds = [Model(dims, nets) for i in range(num)]
64
+ mds_sz = [md.size() for md in mds]
65
+ print(f"Model Size: {sum(mds_sz)/1024/1024/1024} GB")
66
+ opts =[optim.Adam(md.parameters(), lr=lr) for md in mds]
67
+ cuda=cpu
68
+ ds = TestDataset(datas, dims)
69
+ dl = DataLoader(ds, batch)
70
+ #return
71
+ loss_fn = torch.nn.MSELoss()
72
+ print("start train")
73
+ nets= []
74
+ for md in mds:
75
+ nets+=md.nets
76
+ gmodel = nn.Sequential(*nets)
77
+ gmodel = gmodel.to(cuda)
78
+ gopt = optim.Adam(gmodel.parameters(), lr=lr)
79
+ gmodel.train()
80
+ #with torch.no_grad():
81
+ for i in range(trains):
82
+ total_loss = 0
83
+ curr=time.time()
84
+ for dt in dl:
85
+ dt=dt.to(cuda)
86
+ gopt.zero_grad()
87
+ out = gmodel(dt)
88
+ loss = loss_fn(out, dt)
89
+ print(f"loss: {loss, type(loss)}")
90
+ loss.backward()
91
+ gopt.step()
92
+ total_loss+=loss.item()
93
+ sec = time.time()-curr
94
+ print("train:", i, "loss:", total_loss/len(dl), "time:", sec)
95
+ del gmodel,gopt
96
+ torch.cuda.empty_cache()
97
+ input("start middle:")
98
+ md = CacheModel(cuda, cpu, mds, opts, win_size, fc_opt)
99
+ md.nfc("train")
100
+ #with torch.no_grad():
101
+ for i in range(trains):
102
+ total_loss = 0
103
+ curr=time.time()
104
+ for dt in dl:
105
+ dt=dt.to(cuda)
106
+ [opt.zero_grad() for opt in opts]
107
+ out = md.do_forward(dt)
108
+ loss = loss_fn(out, dt)
109
+ print(f"loss: {loss, type(loss)}")
110
+ md.do_backward(lambda : loss.backward())
111
+ total_loss+=loss.item()
112
+ sec = time.time()-curr
113
+ print("train:", i, "loss:", total_loss/len(dl), "time:", sec)
114
+
115
+
116
+
117
+ pass
118
+ test()
@@ -0,0 +1,118 @@
1
+ #
2
+
3
+ from buildz.gpuz.torch import CacheModel
4
+ import sys
5
+ import torch,time
6
+ from torch import nn,optim
7
+ from torch.utils.data import DataLoader, Dataset
8
+ cpu = torch.device('cpu')
9
+ cuda = cpu
10
+ if torch.cuda.is_available():
11
+ cuda = torch.device('cuda')
12
+ def sz(tensor):
13
+ return tensor.element_size()*tensor.nelement()
14
+ class Model(nn.Module):
15
+ def __init__(self, dims, num):
16
+ super().__init__()
17
+ #nets = [nn.Conv2d(3, 3, 5, padding=2) for i in range(num)]
18
+ nets=[]
19
+ for i in range(num):
20
+ nets.append(nn.Conv2d(3, 3, 5, padding=2))
21
+ nets.append(nn.LeakyReLU())
22
+ #nets = [nn.Linear(dims,dims) for i in range(num)]
23
+ print(f"nets:{len(nets)}")
24
+ self.nets = nn.Sequential(*nets)
25
+ def forward(self, inputs):
26
+ return self.nets(inputs)
27
+ def size(self):
28
+ total = 0.0
29
+ for net in self.nets:
30
+ if not hasattr(net, "weight"):
31
+ continue
32
+ w = net.weight
33
+ b = net.bias
34
+ total+=sz(w)+sz(b)
35
+ return total
36
+
37
+ pass
38
+ class TestDataset(Dataset):
39
+ def __init__(self, n, dims):
40
+ self.n = n
41
+ self.dims = dims
42
+ self.datas = torch.rand(n, 3,dims,dims)
43
+ print(f"data size: {sz(self.datas)/1024/1024/1024} GB")
44
+ def __len__(self):
45
+ return self.n
46
+ def __getitem__(self, i):
47
+ return self.datas[i]
48
+ return torch.rand(self.dims)
49
+
50
+ pass
51
+ def fc_opt(net, opt):
52
+ #torch.nn.utils.clip_grad_norm_(net.parameters(), max_norm=1.0)
53
+ opt.step()
54
+ def test():
55
+ nets=5
56
+ dims=512
57
+ trains = 5
58
+ datas = 20
59
+ batch=10
60
+ lr=0.0001
61
+ win_size=3
62
+ num = 12
63
+ mds = [Model(dims, nets) for i in range(num)]
64
+ mds_sz = [md.size() for md in mds]
65
+ print(f"Model Size: {sum(mds_sz)/1024/1024/1024} GB")
66
+ opts =[optim.Adam(md.parameters(), lr=lr) for md in mds]
67
+ #cuda=cpu
68
+ ds = TestDataset(datas, dims)
69
+ dl = DataLoader(ds, batch)
70
+ #return
71
+ loss_fn = torch.nn.MSELoss()
72
+ print("start train")
73
+ nets= []
74
+ for md in mds:
75
+ nets+=md.nets
76
+ gmodel = nn.Sequential(*nets)
77
+ gmodel = gmodel.to(cuda)
78
+ gopt = optim.Adam(gmodel.parameters(), lr=lr)
79
+ gmodel.train()
80
+ #with torch.no_grad():
81
+ for i in range(trains):
82
+ total_loss = 0
83
+ curr=time.time()
84
+ for dt in dl:
85
+ dt=dt.to(cuda)
86
+ gopt.zero_grad()
87
+ out = gmodel(dt)
88
+ loss = loss_fn(out, dt)
89
+ print(f"loss: {loss, type(loss)}")
90
+ loss.backward()
91
+ gopt.step()
92
+ total_loss+=loss.item()
93
+ sec = time.time()-curr
94
+ print("train:", i, "loss:", total_loss/len(dl), "time:", sec)
95
+ del gmodel,gopt
96
+ torch.cuda.empty_cache()
97
+ input("start middle:")
98
+ md = CacheModel(cuda, cpu, mds, opts, win_size, fc_opt)
99
+ md.nfc("train")
100
+ #with torch.no_grad():
101
+ for i in range(trains):
102
+ total_loss = 0
103
+ curr=time.time()
104
+ for dt in dl:
105
+ dt=dt.to(cuda)
106
+ [opt.zero_grad() for opt in opts]
107
+ out = md.do_forward(dt)
108
+ loss = loss_fn(out, dt)
109
+ print(f"loss: {loss, type(loss)}")
110
+ md.do_backward(lambda : loss.backward())
111
+ total_loss+=loss.item()
112
+ sec = time.time()-curr
113
+ print("train:", i, "loss:", total_loss/len(dl), "time:", sec)
114
+
115
+
116
+
117
+ pass
118
+ test()
@@ -0,0 +1,120 @@
1
+ #
2
+
3
+ import sys
4
+ from buildz.gpuz.torch import CacheModel
5
+ import torch,time
6
+ from torch import nn,optim
7
+ from torch.utils.data import DataLoader, Dataset
8
+ cpu = torch.device('cpu')
9
+ cuda = cpu
10
+ if torch.cuda.is_available():
11
+ cuda = torch.device('cuda')
12
+ def sz(tensor):
13
+ return tensor.element_size()*tensor.nelement()
14
+ class ConvModel(nn.Module):
15
+ def __init__(self, dims, num, ins_channels, middle_channels):
16
+ super().__init__()
17
+ nets=[]
18
+ for i in range(num):
19
+ nets.append(nn.Conv2d(ins_channels, middle_channels, 5, padding=2))
20
+ nets.append(nn.Conv2d(middle_channels, ins_channels, 5, padding=2))
21
+ nets.append(nn.LeakyReLU())
22
+ print(f"nets:{len(nets)}")
23
+ self.nets = nn.Sequential(*nets)
24
+ def forward(self, inputs):
25
+ return self.nets(inputs)
26
+ def size(self):
27
+ total = 0.0
28
+ for net in self.nets:
29
+ if not hasattr(net, "weight"):
30
+ continue
31
+ w = net.weight
32
+ b = net.bias
33
+ total+=sz(w)+sz(b)
34
+ return total
35
+
36
+ pass
37
+ class TestDataset(Dataset):
38
+ def __init__(self, n, dims, channels):
39
+ self.n = n
40
+ self.dims = dims
41
+ self.datas = torch.rand(n, channels, dims,dims)
42
+ print(f"data size: {sz(self.datas)/1024/1024/1024} GB")
43
+ def __len__(self):
44
+ return self.n
45
+ def __getitem__(self, i):
46
+ return self.datas[i]
47
+ return torch.rand(self.dims)
48
+
49
+ pass
50
+ def fc_opt(net, opt):
51
+ torch.nn.utils.clip_grad_norm_(net.parameters(), max_norm=1.0)
52
+ opt.step()
53
+ def test():
54
+ nets=2
55
+ channels=30
56
+ middle_channels = 60
57
+ dims=512
58
+ trains = 5
59
+ datas = 6
60
+ batch=2
61
+ lr=0.0001
62
+ win_size=3
63
+ num_conv = 11
64
+ num_ln = 3
65
+ mds = [ConvModel(dims, nets, channels, middle_channels) for i in range(num_conv)]
66
+ mds_sz = [md.size() for md in mds]
67
+ print(f"Model Size: {sum(mds_sz)/1024/1024/1024} GB")
68
+ opts =[optim.Adam(md.parameters(), lr=lr) for md in mds]
69
+ #cuda=cpu
70
+ ds = TestDataset(datas, dims, channels)
71
+ dl = DataLoader(ds, batch)
72
+ #return
73
+ loss_fn = torch.nn.MSELoss()
74
+ print("start train")
75
+ # nets= []
76
+ # for md in mds:
77
+ # nets+=md.nets
78
+ # gmodel = nn.Sequential(*nets)
79
+ # gmodel = gmodel.to(cuda)
80
+ # gopt = optim.Adam(gmodel.parameters(), lr=lr)
81
+ # gmodel.train()
82
+ # #with torch.no_grad():
83
+ # for i in range(trains):
84
+ # total_loss = 0
85
+ # curr=time.time()
86
+ # for dt in dl:
87
+ # dt=dt.to(cuda)
88
+ # gopt.zero_grad()
89
+ # out = gmodel(dt)
90
+ # loss = loss_fn(out, dt)
91
+ # print(f"loss: {loss, type(loss)}")
92
+ # loss.backward()
93
+ # gopt.step()
94
+ # total_loss+=loss.item()
95
+ # sec = time.time()-curr
96
+ # print("train:", i, "loss:", total_loss/len(dl), "time:", sec)
97
+ # del gmodel,gopt
98
+ # torch.cuda.empty_cache()
99
+ # input("start middle:")
100
+ md = CacheModel(cuda, cpu, mds, opts, win_size, fc_opt)
101
+ md.nfc("train")
102
+ #with torch.no_grad():
103
+ for i in range(trains):
104
+ total_loss = 0
105
+ curr=time.time()
106
+ for dt in dl:
107
+ dt=dt.to(cuda)
108
+ [opt.zero_grad() for opt in opts] #这一步不必也扔gpu里计算吧,直接这样写了
109
+ out = md.do_forward(dt)
110
+ loss = loss_fn(out, dt)
111
+ print(f"loss: {loss, type(loss)}")
112
+ md.do_backward(lambda : loss.backward())
113
+ total_loss+=loss.item()
114
+ sec = time.time()-curr
115
+ print("train:", i, "loss:", total_loss/len(dl), "time:", sec)
116
+
117
+
118
+
119
+ pass
120
+ test()
@@ -0,0 +1,8 @@
1
+ #coding=utf-8
2
+
3
+ __version__="0.0.1"
4
+
5
+ # 小号多
6
+ __author__ = "Zzz, emails: 1174534295@qq.com, 1309458652@qq.com"
7
+
8
+ from .middlez import CacheModel
@@ -0,0 +1,181 @@
1
+ #
2
+ import torch
3
+ from torch import nn
4
+ import threading as th
5
+ class CacheModel:
6
+ '''
7
+ 用处:显存不够,同时模型可以拆成多个小模型线性连接的时候,可以用本代码,本代码会在forward和backward的时候自动把小的多层网络轮流放到gpu里计算,计算完再转cpu里
8
+ 需要使用者手动将多层网络拆分成多个更小一点的多层网络
9
+ 测试代码见test_moddle_conv1.py
10
+ 大概有纯显卡二分之一到三分之一的性能,起码比cpu好,尤其是进行卷积计算,比cpu好太多
11
+ 代码例子:
12
+
13
+ from buildz.gpuz.torch import CacheModel
14
+ from torch import nn,optim
15
+ model1 = nn.Sequential(*[nn.Linear(1024,1024) for i in range(10)])
16
+ model2 = nn.Sequential(*[nn.Linear(1024,1024) for i in range(10)])
17
+ model3 = nn.Sequential(*[nn.Linear(1024,1024) for i in range(10)])
18
+ opt1 = optim.Adam(model1.parameters(), lr=0.001)
19
+ opt2 = optim.Adam(model2.parameters(), lr=0.001)
20
+ opt3 = optim.Adam(model3.parameters(), lr=0.001)
21
+ models = [model1,model2,model3]
22
+ opts = [opt1,opt2,opt3]
23
+ loss_fn = torch.nn.MSELoss()
24
+ def opt_step(net, opt):
25
+ # 如果模型只是用来测试,不做训练,可以不传该函数,同时opts传入空就可以
26
+ # 对模型的一些其他优化,可以写可以不写,主要是调用opt.step()进行当前小模型的模型训练
27
+ # torch.nn.utils.clip_grad_norm_(net.parameters(), max_norm=1.0)
28
+ opt.step()
29
+ cmodel = CacheModel(torch.device('cuda'), torch.device('cpu'),models,opts,3,opt_step)
30
+
31
+ # 训练:
32
+ [md.train() for md in models]
33
+ for inputs,targets in dataloader: #批量数据集,这个自己实现
34
+ [opt.zero_grad() for opt in opts]
35
+ outs = cmodel.do_forward(inputs)
36
+ loss = loss_fn(outs, targets)
37
+ cmodel.do_backward(lambda: loss.backward())
38
+ # opt.step()在do_backward里会自动调用
39
+ print(loss.item())
40
+
41
+ # 测试:
42
+ with torch.no_grad():
43
+ outputs = cmodel.do_forward(inputs)
44
+ print(outputs)
45
+ '''
46
+ def __init__(self, gdv, cdv, nets, opts, win_size = 1, backward_deal = None):
47
+ '''
48
+ gdv: 显卡设备,应该传入torch.device('cuda')
49
+ cdv: CPU设备,应该传入torch.device('cpu')
50
+ gdv和cdv如果都传入torch.device('cpu'),则是完全CPU存储和计算
51
+ 如果都传入torch.device('cuda'),则是完全显卡存储和计算
52
+ '''
53
+ self.gdv = gdv
54
+ self.cdv = cdv
55
+ # event和condition未使用,本来打算做成多线程,但python线程一次只能有一个在运行,抢占有点严重,待修改
56
+ self.event = th.Event()
57
+ self.condition=th.Condition()
58
+ [net.register_full_backward_hook(self.hook_backward) for net in nets]
59
+ self.nets = nets
60
+ self.size = len(nets)
61
+ self.ctxs = [[] for i in range(self.size)]
62
+ self.size_1 = self.size-1
63
+ self.opts = opts
64
+ self.win_size = win_size
65
+ self.backward_deal = backward_deal
66
+ self.base = -1
67
+ self.curr = 0
68
+ self.last = -1
69
+ self.running = False
70
+ def hook_pack(self, dt):
71
+ # forward时候为了后面计算梯度存的缓存,放到列表里方便转cpu和gpu
72
+ self.ctxs[self.curr].append(dt)
73
+ return len(self.ctxs[self.curr])-1
74
+ def hook_unpack(self, x):
75
+ dt = self.ctxs[self.curr][x]
76
+ return dt
77
+ def nfc(self, fc, *a,**b):
78
+ [getattr(net, fc)(*a,**b) for net in self.nets]
79
+ def reset(self):
80
+ for i in range(self.base,self.last+1):
81
+ self.nets[i].to(self.cdv)
82
+ self.ctxs_to(i, self.cdv)
83
+ self.base,self.last=-1,-1
84
+ def ctxs_to(self, i, dv):
85
+ if dv is None:
86
+ self.ctxs[i] = []
87
+ else:
88
+ self.ctxs[i] = [k.to(dv) for k in self.ctxs[i]]
89
+ def copy_backward(self):
90
+ if self.last<self.curr:
91
+ self.reset()
92
+ if self.base==0:
93
+ return False
94
+ if self.last<0:
95
+ self.nets[self.size_1].to(self.gdv)
96
+ self.last = self.size_1
97
+ self.base = self.size_1
98
+ diff = self.win_size-(self.last-self.base+1)
99
+ diff = min(diff, self.base)
100
+ for i in range(diff):
101
+ self.nets[self.base-1].to(self.gdv)
102
+ self.ctxs_to(self.base-1, self.gdv)
103
+ self.base-=1
104
+ rels = self.last-self.curr
105
+ for i in range(rels):
106
+ self.nets[self.last].to(self.cdv)
107
+ self.ctxs_to(self.last, None)
108
+ self.last-=1
109
+ return True
110
+ def copy_forward(self):
111
+ if self.base>self.curr:
112
+ self.reset()
113
+ if self.last==self.size_1:
114
+ return False
115
+ if self.base<0:
116
+ self.nets[0].to(self.gdv)
117
+ self.base=0
118
+ self.last=0
119
+ diff = self.win_size-(self.last-self.base+1)
120
+ diff = min(diff, self.size_1-self.last)
121
+ for i in range(diff):
122
+ self.nets[self.last+1].to(self.gdv)
123
+ self.last+=1
124
+ rels = self.curr-self.base
125
+ for i in range(rels):
126
+ self.nets[self.base].to(self.cdv)
127
+ self.ctxs_to(self.base, self.cdv)
128
+ self.base+=1
129
+ return True
130
+ def th_copy_forward(self):
131
+ while self.copy_forward():
132
+ self.event.set()
133
+ self.running = False
134
+ def th_copy_backward(self):
135
+ while self.copy_backward():
136
+ self.event.set()
137
+ def wait(self):
138
+ with self.condition:
139
+ self.condition.notify()
140
+ self.event.wait()
141
+ def do_forward(self, inputs):
142
+ # while self.running:
143
+ # import time
144
+ # time.sleep(0.01)
145
+ # t = th.Thread(target=self.th_copy_forward, daemon=True)
146
+ # self.running = True
147
+ # t.start()
148
+ self.ctxs = [[] for i in range(self.size)]
149
+ with torch.autograd.graph.saved_tensors_hooks(self.hook_pack, self.hook_unpack):
150
+ rst = self.forward(inputs)
151
+ return rst
152
+ def forward(self, inputs):
153
+ for self.curr in range(len(self.nets)):
154
+ while not (self.base<=self.curr<=self.last):
155
+ self.copy_forward()
156
+ #self.wait()
157
+ inputs = self.nets[self.curr](inputs)
158
+ return inputs
159
+ def wrap_backward_deal(self, i):
160
+ if self.backward_deal is None:
161
+ return
162
+ try:
163
+ self.backward_deal(self.nets[i], self.opts[i])
164
+ finally:
165
+ pass
166
+ def hook_backward(self, model, ins, outs):
167
+ if self.backward_curr<self.size_1:
168
+ self.wrap_backward_deal(self.backward_curr+1)
169
+ self.curr = self.backward_curr
170
+ while not (self.base<=self.backward_curr<=self.last):
171
+ #self.wait()
172
+ self.copy_backward()
173
+ self.backward_curr-=1
174
+ def do_backward(self, fc):
175
+ self.backward_curr=self.curr
176
+ # t = th.Thread(target=self.th_copy_backward,daemon=True)
177
+ # t.start()
178
+ fc()
179
+ self.wrap_backward_deal(0)
180
+
181
+ pass
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: buildz
3
- Version: 0.6.59
3
+ Version: 0.6.60
4
4
  Summary: 配置读写(基于json格式进行简化)、ioc、以及其他工具代码。a json-base file format's read and write code by python, and codes to read and product object from configure file in such format(ioc), and other tool codes
5
5
  Home-page: https://github.com/buildCodeZ/buildz
6
6
  Author: Zzz
@@ -108,6 +108,12 @@ buildz/fz/dirz.py
108
108
  buildz/fz/fhs.py
109
109
  buildz/fz/fio.py
110
110
  buildz/fz/lsf.py
111
+ buildz/gpuz/test/report.txt
112
+ buildz/gpuz/test/test_middle.py
113
+ buildz/gpuz/test/test_middle_conv.py
114
+ buildz/gpuz/test/test_middle_conv1.py
115
+ buildz/gpuz/torch/__init__.py
116
+ buildz/gpuz/torch/middlez.py
111
117
  buildz/html/__init__.py
112
118
  buildz/html/xml.py
113
119
  buildz/html/test/demo.html
@@ -7,7 +7,7 @@ from setuptools import setup, find_packages
7
7
 
8
8
  setup(
9
9
  name = 'buildz',
10
- version = '0.6.59',
10
+ version = '0.6.60',
11
11
  keywords='buildz',
12
12
  long_description=open('README.md', 'r', encoding="utf-8").read(),
13
13
  long_description_content_type='text/markdown',
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes