pyxllib 0.3.96__py3-none-any.whl → 0.3.197__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (306) hide show
  1. pyxllib/algo/geo.py +12 -0
  2. pyxllib/algo/intervals.py +1 -1
  3. pyxllib/algo/matcher.py +78 -0
  4. pyxllib/algo/pupil.py +187 -19
  5. pyxllib/algo/specialist.py +2 -1
  6. pyxllib/algo/stat.py +38 -2
  7. {pyxlpr → pyxllib/autogui}/__init__.py +1 -1
  8. pyxllib/autogui/activewin.py +246 -0
  9. pyxllib/autogui/all.py +9 -0
  10. pyxllib/{ext/autogui → autogui}/autogui.py +40 -11
  11. pyxllib/autogui/uiautolib.py +362 -0
  12. pyxllib/autogui/wechat.py +827 -0
  13. pyxllib/autogui/wechat_msg.py +421 -0
  14. pyxllib/autogui/wxautolib.py +84 -0
  15. pyxllib/cv/slidercaptcha.py +137 -0
  16. pyxllib/data/echarts.py +123 -12
  17. pyxllib/data/jsonlib.py +89 -0
  18. pyxllib/data/pglib.py +514 -30
  19. pyxllib/data/sqlite.py +231 -4
  20. pyxllib/ext/JLineViewer.py +14 -1
  21. pyxllib/ext/drissionlib.py +277 -0
  22. pyxllib/ext/kq5034lib.py +0 -1594
  23. pyxllib/ext/robustprocfile.py +497 -0
  24. pyxllib/ext/unixlib.py +6 -5
  25. pyxllib/ext/utools.py +108 -95
  26. pyxllib/ext/webhook.py +32 -14
  27. pyxllib/ext/wjxlib.py +88 -0
  28. pyxllib/ext/wpsapi.py +124 -0
  29. pyxllib/ext/xlwork.py +9 -0
  30. pyxllib/ext/yuquelib.py +1003 -71
  31. pyxllib/file/docxlib.py +1 -1
  32. pyxllib/file/libreoffice.py +165 -0
  33. pyxllib/file/movielib.py +9 -0
  34. pyxllib/file/packlib/__init__.py +112 -75
  35. pyxllib/file/pdflib.py +1 -1
  36. pyxllib/file/pupil.py +1 -1
  37. pyxllib/file/specialist/dirlib.py +1 -1
  38. pyxllib/file/specialist/download.py +10 -3
  39. pyxllib/file/specialist/filelib.py +266 -55
  40. pyxllib/file/xlsxlib.py +205 -50
  41. pyxllib/file/xlsyncfile.py +341 -0
  42. pyxllib/prog/cachetools.py +64 -0
  43. pyxllib/prog/filelock.py +42 -0
  44. pyxllib/prog/multiprogs.py +940 -0
  45. pyxllib/prog/newbie.py +9 -2
  46. pyxllib/prog/pupil.py +129 -60
  47. pyxllib/prog/specialist/__init__.py +176 -2
  48. pyxllib/prog/specialist/bc.py +5 -2
  49. pyxllib/prog/specialist/browser.py +11 -2
  50. pyxllib/prog/specialist/datetime.py +68 -0
  51. pyxllib/prog/specialist/tictoc.py +12 -13
  52. pyxllib/prog/specialist/xllog.py +5 -5
  53. pyxllib/prog/xlosenv.py +7 -0
  54. pyxllib/text/airscript.js +744 -0
  55. pyxllib/text/charclasslib.py +17 -5
  56. pyxllib/text/jiebalib.py +6 -3
  57. pyxllib/text/jinjalib.py +32 -0
  58. pyxllib/text/jsa_ai_prompt.md +271 -0
  59. pyxllib/text/jscode.py +159 -4
  60. pyxllib/text/nestenv.py +1 -1
  61. pyxllib/text/newbie.py +12 -0
  62. pyxllib/text/pupil/common.py +26 -0
  63. pyxllib/text/specialist/ptag.py +2 -2
  64. pyxllib/text/templates/echart_base.html +11 -0
  65. pyxllib/text/templates/highlight_code.html +17 -0
  66. pyxllib/text/templates/latex_editor.html +103 -0
  67. pyxllib/text/xmllib.py +76 -14
  68. pyxllib/xl.py +2 -1
  69. pyxllib-0.3.197.dist-info/METADATA +48 -0
  70. pyxllib-0.3.197.dist-info/RECORD +126 -0
  71. {pyxllib-0.3.96.dist-info → pyxllib-0.3.197.dist-info}/WHEEL +1 -2
  72. pyxllib/ext/autogui/__init__.py +0 -8
  73. pyxllib-0.3.96.dist-info/METADATA +0 -51
  74. pyxllib-0.3.96.dist-info/RECORD +0 -333
  75. pyxllib-0.3.96.dist-info/top_level.txt +0 -2
  76. pyxlpr/ai/__init__.py +0 -5
  77. pyxlpr/ai/clientlib.py +0 -1281
  78. pyxlpr/ai/specialist.py +0 -286
  79. pyxlpr/ai/torch_app.py +0 -172
  80. pyxlpr/ai/xlpaddle.py +0 -655
  81. pyxlpr/ai/xltorch.py +0 -705
  82. pyxlpr/data/__init__.py +0 -11
  83. pyxlpr/data/coco.py +0 -1325
  84. pyxlpr/data/datacls.py +0 -365
  85. pyxlpr/data/datasets.py +0 -200
  86. pyxlpr/data/gptlib.py +0 -1291
  87. pyxlpr/data/icdar/__init__.py +0 -96
  88. pyxlpr/data/icdar/deteval.py +0 -377
  89. pyxlpr/data/icdar/icdar2013.py +0 -341
  90. pyxlpr/data/icdar/iou.py +0 -340
  91. pyxlpr/data/icdar/rrc_evaluation_funcs_1_1.py +0 -463
  92. pyxlpr/data/imtextline.py +0 -473
  93. pyxlpr/data/labelme.py +0 -866
  94. pyxlpr/data/removeline.py +0 -179
  95. pyxlpr/data/specialist.py +0 -57
  96. pyxlpr/eval/__init__.py +0 -85
  97. pyxlpr/paddleocr.py +0 -776
  98. pyxlpr/ppocr/__init__.py +0 -15
  99. pyxlpr/ppocr/configs/rec/multi_language/generate_multi_language_configs.py +0 -226
  100. pyxlpr/ppocr/data/__init__.py +0 -135
  101. pyxlpr/ppocr/data/imaug/ColorJitter.py +0 -26
  102. pyxlpr/ppocr/data/imaug/__init__.py +0 -67
  103. pyxlpr/ppocr/data/imaug/copy_paste.py +0 -170
  104. pyxlpr/ppocr/data/imaug/east_process.py +0 -437
  105. pyxlpr/ppocr/data/imaug/gen_table_mask.py +0 -244
  106. pyxlpr/ppocr/data/imaug/iaa_augment.py +0 -114
  107. pyxlpr/ppocr/data/imaug/label_ops.py +0 -789
  108. pyxlpr/ppocr/data/imaug/make_border_map.py +0 -184
  109. pyxlpr/ppocr/data/imaug/make_pse_gt.py +0 -106
  110. pyxlpr/ppocr/data/imaug/make_shrink_map.py +0 -126
  111. pyxlpr/ppocr/data/imaug/operators.py +0 -433
  112. pyxlpr/ppocr/data/imaug/pg_process.py +0 -906
  113. pyxlpr/ppocr/data/imaug/randaugment.py +0 -143
  114. pyxlpr/ppocr/data/imaug/random_crop_data.py +0 -239
  115. pyxlpr/ppocr/data/imaug/rec_img_aug.py +0 -533
  116. pyxlpr/ppocr/data/imaug/sast_process.py +0 -777
  117. pyxlpr/ppocr/data/imaug/text_image_aug/__init__.py +0 -17
  118. pyxlpr/ppocr/data/imaug/text_image_aug/augment.py +0 -120
  119. pyxlpr/ppocr/data/imaug/text_image_aug/warp_mls.py +0 -168
  120. pyxlpr/ppocr/data/lmdb_dataset.py +0 -115
  121. pyxlpr/ppocr/data/pgnet_dataset.py +0 -104
  122. pyxlpr/ppocr/data/pubtab_dataset.py +0 -107
  123. pyxlpr/ppocr/data/simple_dataset.py +0 -372
  124. pyxlpr/ppocr/losses/__init__.py +0 -61
  125. pyxlpr/ppocr/losses/ace_loss.py +0 -52
  126. pyxlpr/ppocr/losses/basic_loss.py +0 -135
  127. pyxlpr/ppocr/losses/center_loss.py +0 -88
  128. pyxlpr/ppocr/losses/cls_loss.py +0 -30
  129. pyxlpr/ppocr/losses/combined_loss.py +0 -67
  130. pyxlpr/ppocr/losses/det_basic_loss.py +0 -208
  131. pyxlpr/ppocr/losses/det_db_loss.py +0 -80
  132. pyxlpr/ppocr/losses/det_east_loss.py +0 -63
  133. pyxlpr/ppocr/losses/det_pse_loss.py +0 -149
  134. pyxlpr/ppocr/losses/det_sast_loss.py +0 -121
  135. pyxlpr/ppocr/losses/distillation_loss.py +0 -272
  136. pyxlpr/ppocr/losses/e2e_pg_loss.py +0 -140
  137. pyxlpr/ppocr/losses/kie_sdmgr_loss.py +0 -113
  138. pyxlpr/ppocr/losses/rec_aster_loss.py +0 -99
  139. pyxlpr/ppocr/losses/rec_att_loss.py +0 -39
  140. pyxlpr/ppocr/losses/rec_ctc_loss.py +0 -44
  141. pyxlpr/ppocr/losses/rec_enhanced_ctc_loss.py +0 -70
  142. pyxlpr/ppocr/losses/rec_nrtr_loss.py +0 -30
  143. pyxlpr/ppocr/losses/rec_sar_loss.py +0 -28
  144. pyxlpr/ppocr/losses/rec_srn_loss.py +0 -47
  145. pyxlpr/ppocr/losses/table_att_loss.py +0 -109
  146. pyxlpr/ppocr/metrics/__init__.py +0 -44
  147. pyxlpr/ppocr/metrics/cls_metric.py +0 -45
  148. pyxlpr/ppocr/metrics/det_metric.py +0 -82
  149. pyxlpr/ppocr/metrics/distillation_metric.py +0 -73
  150. pyxlpr/ppocr/metrics/e2e_metric.py +0 -86
  151. pyxlpr/ppocr/metrics/eval_det_iou.py +0 -274
  152. pyxlpr/ppocr/metrics/kie_metric.py +0 -70
  153. pyxlpr/ppocr/metrics/rec_metric.py +0 -75
  154. pyxlpr/ppocr/metrics/table_metric.py +0 -50
  155. pyxlpr/ppocr/modeling/architectures/__init__.py +0 -32
  156. pyxlpr/ppocr/modeling/architectures/base_model.py +0 -88
  157. pyxlpr/ppocr/modeling/architectures/distillation_model.py +0 -60
  158. pyxlpr/ppocr/modeling/backbones/__init__.py +0 -54
  159. pyxlpr/ppocr/modeling/backbones/det_mobilenet_v3.py +0 -268
  160. pyxlpr/ppocr/modeling/backbones/det_resnet_vd.py +0 -246
  161. pyxlpr/ppocr/modeling/backbones/det_resnet_vd_sast.py +0 -285
  162. pyxlpr/ppocr/modeling/backbones/e2e_resnet_vd_pg.py +0 -265
  163. pyxlpr/ppocr/modeling/backbones/kie_unet_sdmgr.py +0 -186
  164. pyxlpr/ppocr/modeling/backbones/rec_mobilenet_v3.py +0 -138
  165. pyxlpr/ppocr/modeling/backbones/rec_mv1_enhance.py +0 -258
  166. pyxlpr/ppocr/modeling/backbones/rec_nrtr_mtb.py +0 -48
  167. pyxlpr/ppocr/modeling/backbones/rec_resnet_31.py +0 -210
  168. pyxlpr/ppocr/modeling/backbones/rec_resnet_aster.py +0 -143
  169. pyxlpr/ppocr/modeling/backbones/rec_resnet_fpn.py +0 -307
  170. pyxlpr/ppocr/modeling/backbones/rec_resnet_vd.py +0 -286
  171. pyxlpr/ppocr/modeling/heads/__init__.py +0 -54
  172. pyxlpr/ppocr/modeling/heads/cls_head.py +0 -52
  173. pyxlpr/ppocr/modeling/heads/det_db_head.py +0 -118
  174. pyxlpr/ppocr/modeling/heads/det_east_head.py +0 -121
  175. pyxlpr/ppocr/modeling/heads/det_pse_head.py +0 -37
  176. pyxlpr/ppocr/modeling/heads/det_sast_head.py +0 -128
  177. pyxlpr/ppocr/modeling/heads/e2e_pg_head.py +0 -253
  178. pyxlpr/ppocr/modeling/heads/kie_sdmgr_head.py +0 -206
  179. pyxlpr/ppocr/modeling/heads/multiheadAttention.py +0 -163
  180. pyxlpr/ppocr/modeling/heads/rec_aster_head.py +0 -393
  181. pyxlpr/ppocr/modeling/heads/rec_att_head.py +0 -202
  182. pyxlpr/ppocr/modeling/heads/rec_ctc_head.py +0 -88
  183. pyxlpr/ppocr/modeling/heads/rec_nrtr_head.py +0 -826
  184. pyxlpr/ppocr/modeling/heads/rec_sar_head.py +0 -402
  185. pyxlpr/ppocr/modeling/heads/rec_srn_head.py +0 -280
  186. pyxlpr/ppocr/modeling/heads/self_attention.py +0 -406
  187. pyxlpr/ppocr/modeling/heads/table_att_head.py +0 -246
  188. pyxlpr/ppocr/modeling/necks/__init__.py +0 -32
  189. pyxlpr/ppocr/modeling/necks/db_fpn.py +0 -111
  190. pyxlpr/ppocr/modeling/necks/east_fpn.py +0 -188
  191. pyxlpr/ppocr/modeling/necks/fpn.py +0 -138
  192. pyxlpr/ppocr/modeling/necks/pg_fpn.py +0 -314
  193. pyxlpr/ppocr/modeling/necks/rnn.py +0 -92
  194. pyxlpr/ppocr/modeling/necks/sast_fpn.py +0 -284
  195. pyxlpr/ppocr/modeling/necks/table_fpn.py +0 -110
  196. pyxlpr/ppocr/modeling/transforms/__init__.py +0 -28
  197. pyxlpr/ppocr/modeling/transforms/stn.py +0 -135
  198. pyxlpr/ppocr/modeling/transforms/tps.py +0 -308
  199. pyxlpr/ppocr/modeling/transforms/tps_spatial_transformer.py +0 -156
  200. pyxlpr/ppocr/optimizer/__init__.py +0 -61
  201. pyxlpr/ppocr/optimizer/learning_rate.py +0 -228
  202. pyxlpr/ppocr/optimizer/lr_scheduler.py +0 -49
  203. pyxlpr/ppocr/optimizer/optimizer.py +0 -160
  204. pyxlpr/ppocr/optimizer/regularizer.py +0 -52
  205. pyxlpr/ppocr/postprocess/__init__.py +0 -55
  206. pyxlpr/ppocr/postprocess/cls_postprocess.py +0 -33
  207. pyxlpr/ppocr/postprocess/db_postprocess.py +0 -234
  208. pyxlpr/ppocr/postprocess/east_postprocess.py +0 -143
  209. pyxlpr/ppocr/postprocess/locality_aware_nms.py +0 -200
  210. pyxlpr/ppocr/postprocess/pg_postprocess.py +0 -52
  211. pyxlpr/ppocr/postprocess/pse_postprocess/__init__.py +0 -15
  212. pyxlpr/ppocr/postprocess/pse_postprocess/pse/__init__.py +0 -29
  213. pyxlpr/ppocr/postprocess/pse_postprocess/pse/setup.py +0 -14
  214. pyxlpr/ppocr/postprocess/pse_postprocess/pse_postprocess.py +0 -118
  215. pyxlpr/ppocr/postprocess/rec_postprocess.py +0 -654
  216. pyxlpr/ppocr/postprocess/sast_postprocess.py +0 -355
  217. pyxlpr/ppocr/tools/__init__.py +0 -14
  218. pyxlpr/ppocr/tools/eval.py +0 -83
  219. pyxlpr/ppocr/tools/export_center.py +0 -77
  220. pyxlpr/ppocr/tools/export_model.py +0 -129
  221. pyxlpr/ppocr/tools/infer/predict_cls.py +0 -151
  222. pyxlpr/ppocr/tools/infer/predict_det.py +0 -300
  223. pyxlpr/ppocr/tools/infer/predict_e2e.py +0 -169
  224. pyxlpr/ppocr/tools/infer/predict_rec.py +0 -414
  225. pyxlpr/ppocr/tools/infer/predict_system.py +0 -204
  226. pyxlpr/ppocr/tools/infer/utility.py +0 -629
  227. pyxlpr/ppocr/tools/infer_cls.py +0 -83
  228. pyxlpr/ppocr/tools/infer_det.py +0 -134
  229. pyxlpr/ppocr/tools/infer_e2e.py +0 -122
  230. pyxlpr/ppocr/tools/infer_kie.py +0 -153
  231. pyxlpr/ppocr/tools/infer_rec.py +0 -146
  232. pyxlpr/ppocr/tools/infer_table.py +0 -107
  233. pyxlpr/ppocr/tools/program.py +0 -596
  234. pyxlpr/ppocr/tools/test_hubserving.py +0 -117
  235. pyxlpr/ppocr/tools/train.py +0 -163
  236. pyxlpr/ppocr/tools/xlprog.py +0 -748
  237. pyxlpr/ppocr/utils/EN_symbol_dict.txt +0 -94
  238. pyxlpr/ppocr/utils/__init__.py +0 -24
  239. pyxlpr/ppocr/utils/dict/ar_dict.txt +0 -117
  240. pyxlpr/ppocr/utils/dict/arabic_dict.txt +0 -162
  241. pyxlpr/ppocr/utils/dict/be_dict.txt +0 -145
  242. pyxlpr/ppocr/utils/dict/bg_dict.txt +0 -140
  243. pyxlpr/ppocr/utils/dict/chinese_cht_dict.txt +0 -8421
  244. pyxlpr/ppocr/utils/dict/cyrillic_dict.txt +0 -163
  245. pyxlpr/ppocr/utils/dict/devanagari_dict.txt +0 -167
  246. pyxlpr/ppocr/utils/dict/en_dict.txt +0 -63
  247. pyxlpr/ppocr/utils/dict/fa_dict.txt +0 -136
  248. pyxlpr/ppocr/utils/dict/french_dict.txt +0 -136
  249. pyxlpr/ppocr/utils/dict/german_dict.txt +0 -143
  250. pyxlpr/ppocr/utils/dict/hi_dict.txt +0 -162
  251. pyxlpr/ppocr/utils/dict/it_dict.txt +0 -118
  252. pyxlpr/ppocr/utils/dict/japan_dict.txt +0 -4399
  253. pyxlpr/ppocr/utils/dict/ka_dict.txt +0 -153
  254. pyxlpr/ppocr/utils/dict/korean_dict.txt +0 -3688
  255. pyxlpr/ppocr/utils/dict/latin_dict.txt +0 -185
  256. pyxlpr/ppocr/utils/dict/mr_dict.txt +0 -153
  257. pyxlpr/ppocr/utils/dict/ne_dict.txt +0 -153
  258. pyxlpr/ppocr/utils/dict/oc_dict.txt +0 -96
  259. pyxlpr/ppocr/utils/dict/pu_dict.txt +0 -130
  260. pyxlpr/ppocr/utils/dict/rs_dict.txt +0 -91
  261. pyxlpr/ppocr/utils/dict/rsc_dict.txt +0 -134
  262. pyxlpr/ppocr/utils/dict/ru_dict.txt +0 -125
  263. pyxlpr/ppocr/utils/dict/ta_dict.txt +0 -128
  264. pyxlpr/ppocr/utils/dict/table_dict.txt +0 -277
  265. pyxlpr/ppocr/utils/dict/table_structure_dict.txt +0 -2759
  266. pyxlpr/ppocr/utils/dict/te_dict.txt +0 -151
  267. pyxlpr/ppocr/utils/dict/ug_dict.txt +0 -114
  268. pyxlpr/ppocr/utils/dict/uk_dict.txt +0 -142
  269. pyxlpr/ppocr/utils/dict/ur_dict.txt +0 -137
  270. pyxlpr/ppocr/utils/dict/xi_dict.txt +0 -110
  271. pyxlpr/ppocr/utils/dict90.txt +0 -90
  272. pyxlpr/ppocr/utils/e2e_metric/Deteval.py +0 -574
  273. pyxlpr/ppocr/utils/e2e_metric/polygon_fast.py +0 -83
  274. pyxlpr/ppocr/utils/e2e_utils/extract_batchsize.py +0 -87
  275. pyxlpr/ppocr/utils/e2e_utils/extract_textpoint_fast.py +0 -457
  276. pyxlpr/ppocr/utils/e2e_utils/extract_textpoint_slow.py +0 -592
  277. pyxlpr/ppocr/utils/e2e_utils/pgnet_pp_utils.py +0 -162
  278. pyxlpr/ppocr/utils/e2e_utils/visual.py +0 -162
  279. pyxlpr/ppocr/utils/en_dict.txt +0 -95
  280. pyxlpr/ppocr/utils/gen_label.py +0 -81
  281. pyxlpr/ppocr/utils/ic15_dict.txt +0 -36
  282. pyxlpr/ppocr/utils/iou.py +0 -54
  283. pyxlpr/ppocr/utils/logging.py +0 -69
  284. pyxlpr/ppocr/utils/network.py +0 -84
  285. pyxlpr/ppocr/utils/ppocr_keys_v1.txt +0 -6623
  286. pyxlpr/ppocr/utils/profiler.py +0 -110
  287. pyxlpr/ppocr/utils/save_load.py +0 -150
  288. pyxlpr/ppocr/utils/stats.py +0 -72
  289. pyxlpr/ppocr/utils/utility.py +0 -80
  290. pyxlpr/ppstructure/__init__.py +0 -13
  291. pyxlpr/ppstructure/predict_system.py +0 -187
  292. pyxlpr/ppstructure/table/__init__.py +0 -13
  293. pyxlpr/ppstructure/table/eval_table.py +0 -72
  294. pyxlpr/ppstructure/table/matcher.py +0 -192
  295. pyxlpr/ppstructure/table/predict_structure.py +0 -136
  296. pyxlpr/ppstructure/table/predict_table.py +0 -221
  297. pyxlpr/ppstructure/table/table_metric/__init__.py +0 -16
  298. pyxlpr/ppstructure/table/table_metric/parallel.py +0 -51
  299. pyxlpr/ppstructure/table/table_metric/table_metric.py +0 -247
  300. pyxlpr/ppstructure/table/tablepyxl/__init__.py +0 -13
  301. pyxlpr/ppstructure/table/tablepyxl/style.py +0 -283
  302. pyxlpr/ppstructure/table/tablepyxl/tablepyxl.py +0 -118
  303. pyxlpr/ppstructure/utility.py +0 -71
  304. pyxlpr/xlai.py +0 -10
  305. /pyxllib/{ext/autogui → autogui}/virtualkey.py +0 -0
  306. {pyxllib-0.3.96.dist-info → pyxllib-0.3.197.dist-info/licenses}/LICENSE +0 -0
@@ -1,99 +0,0 @@
1
- # copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- from __future__ import absolute_import
16
- from __future__ import division
17
- from __future__ import print_function
18
-
19
- import paddle
20
- from paddle import nn
21
-
22
-
23
- class CosineEmbeddingLoss(nn.Layer):
24
- def __init__(self, margin=0.):
25
- super(CosineEmbeddingLoss, self).__init__()
26
- self.margin = margin
27
- self.epsilon = 1e-12
28
-
29
- def forward(self, x1, x2, target):
30
- similarity = paddle.fluid.layers.reduce_sum(
31
- x1 * x2, dim=-1) / (paddle.norm(
32
- x1, axis=-1) * paddle.norm(
33
- x2, axis=-1) + self.epsilon)
34
- one_list = paddle.full_like(target, fill_value=1)
35
- out = paddle.fluid.layers.reduce_mean(
36
- paddle.where(
37
- paddle.equal(target, one_list), 1. - similarity,
38
- paddle.maximum(
39
- paddle.zeros_like(similarity), similarity - self.margin)))
40
-
41
- return out
42
-
43
-
44
- class AsterLoss(nn.Layer):
45
- def __init__(self,
46
- weight=None,
47
- size_average=True,
48
- ignore_index=-100,
49
- sequence_normalize=False,
50
- sample_normalize=True,
51
- **kwargs):
52
- super(AsterLoss, self).__init__()
53
- self.weight = weight
54
- self.size_average = size_average
55
- self.ignore_index = ignore_index
56
- self.sequence_normalize = sequence_normalize
57
- self.sample_normalize = sample_normalize
58
- self.loss_sem = CosineEmbeddingLoss()
59
- self.is_cosin_loss = True
60
- self.loss_func_rec = nn.CrossEntropyLoss(weight=None, reduction='none')
61
-
62
- def forward(self, predicts, batch):
63
- targets = batch[1].astype("int64")
64
- label_lengths = batch[2].astype('int64')
65
- sem_target = batch[3].astype('float32')
66
- embedding_vectors = predicts['embedding_vectors']
67
- rec_pred = predicts['rec_pred']
68
-
69
- if not self.is_cosin_loss:
70
- sem_loss = paddle.sum(self.loss_sem(embedding_vectors, sem_target))
71
- else:
72
- label_target = paddle.ones([embedding_vectors.shape[0]])
73
- sem_loss = paddle.sum(
74
- self.loss_sem(embedding_vectors, sem_target, label_target))
75
-
76
- # rec loss
77
- batch_size, def_max_length = targets.shape[0], targets.shape[1]
78
-
79
- mask = paddle.zeros([batch_size, def_max_length])
80
- for i in range(batch_size):
81
- mask[i, :label_lengths[i]] = 1
82
- mask = paddle.cast(mask, "float32")
83
- max_length = max(label_lengths)
84
- assert max_length == rec_pred.shape[1]
85
- targets = targets[:, :max_length]
86
- mask = mask[:, :max_length]
87
- rec_pred = paddle.reshape(rec_pred, [-1, rec_pred.shape[2]])
88
- input = nn.functional.log_softmax(rec_pred, axis=1)
89
- targets = paddle.reshape(targets, [-1, 1])
90
- mask = paddle.reshape(mask, [-1, 1])
91
- output = -paddle.index_sample(input, index=targets) * mask
92
- output = paddle.sum(output)
93
- if self.sequence_normalize:
94
- output = output / paddle.sum(mask)
95
- if self.sample_normalize:
96
- output = output / batch_size
97
-
98
- loss = output + sem_loss * 0.1
99
- return {'loss': loss}
@@ -1,39 +0,0 @@
1
- # copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- from __future__ import absolute_import
16
- from __future__ import division
17
- from __future__ import print_function
18
-
19
- import paddle
20
- from paddle import nn
21
-
22
-
23
- class AttentionLoss(nn.Layer):
24
- def __init__(self, **kwargs):
25
- super(AttentionLoss, self).__init__()
26
- self.loss_func = nn.CrossEntropyLoss(weight=None, reduction='none')
27
-
28
- def forward(self, predicts, batch):
29
- targets = batch[1].astype("int64")
30
- label_lengths = batch[2].astype('int64')
31
- batch_size, num_steps, num_classes = predicts.shape[0], predicts.shape[
32
- 1], predicts.shape[2]
33
- assert len(targets.shape) == len(list(predicts.shape)) - 1, \
34
- "The target's shape and inputs's shape is [N, d] and [N, num_steps]"
35
-
36
- inputs = paddle.reshape(predicts, [-1, predicts.shape[-1]])
37
- targets = paddle.reshape(targets, [-1])
38
-
39
- return {'loss': paddle.sum(self.loss_func(inputs, targets))}
@@ -1,44 +0,0 @@
1
- # copyright (c) 2019 PaddlePaddle Authors. All Rights Reserve.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- from __future__ import absolute_import
16
- from __future__ import division
17
- from __future__ import print_function
18
-
19
- import paddle
20
- from paddle import nn
21
-
22
-
23
- class CTCLoss(nn.Layer):
24
- def __init__(self, use_focal_loss=False, **kwargs):
25
- super(CTCLoss, self).__init__()
26
- self.loss_func = nn.CTCLoss(blank=0, reduction='none')
27
- self.use_focal_loss = use_focal_loss
28
-
29
- def forward(self, predicts, batch):
30
- if isinstance(predicts, (list, tuple)):
31
- predicts = predicts[-1]
32
- predicts = predicts.transpose((1, 0, 2))
33
- N, B, _ = predicts.shape
34
- preds_lengths = paddle.to_tensor([N] * B, dtype='int64')
35
- labels = batch[1].astype("int32")
36
- label_lengths = batch[2].astype('int64')
37
- loss = self.loss_func(predicts, labels, preds_lengths, label_lengths)
38
- if self.use_focal_loss:
39
- weight = paddle.exp(-loss)
40
- weight = paddle.subtract(paddle.to_tensor([1.0]), weight)
41
- weight = paddle.square(weight)
42
- loss = paddle.multiply(loss, weight)
43
- loss = loss.mean()
44
- return {'loss': loss}
@@ -1,70 +0,0 @@
1
- # copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- from __future__ import absolute_import
16
- from __future__ import division
17
- from __future__ import print_function
18
-
19
- import paddle
20
- from paddle import nn
21
- from .ace_loss import ACELoss
22
- from .center_loss import CenterLoss
23
- from .rec_ctc_loss import CTCLoss
24
-
25
-
26
- class EnhancedCTCLoss(nn.Layer):
27
- def __init__(self,
28
- use_focal_loss=False,
29
- use_ace_loss=False,
30
- ace_loss_weight=0.1,
31
- use_center_loss=False,
32
- center_loss_weight=0.05,
33
- num_classes=6625,
34
- feat_dim=96,
35
- init_center=False,
36
- center_file_path=None,
37
- **kwargs):
38
- super(EnhancedCTCLoss, self).__init__()
39
- self.ctc_loss_func = CTCLoss(use_focal_loss=use_focal_loss)
40
-
41
- self.use_ace_loss = False
42
- if use_ace_loss:
43
- self.use_ace_loss = use_ace_loss
44
- self.ace_loss_func = ACELoss()
45
- self.ace_loss_weight = ace_loss_weight
46
-
47
- self.use_center_loss = False
48
- if use_center_loss:
49
- self.use_center_loss = use_center_loss
50
- self.center_loss_func = CenterLoss(
51
- num_classes=num_classes,
52
- feat_dim=feat_dim,
53
- init_center=init_center,
54
- center_file_path=center_file_path)
55
- self.center_loss_weight = center_loss_weight
56
-
57
- def __call__(self, predicts, batch):
58
- loss = self.ctc_loss_func(predicts, batch)["loss"]
59
-
60
- if self.use_center_loss:
61
- center_loss = self.center_loss_func(
62
- predicts, batch)["loss_center"] * self.center_loss_weight
63
- loss = loss + center_loss
64
-
65
- if self.use_ace_loss:
66
- ace_loss = self.ace_loss_func(
67
- predicts, batch)["loss_ace"] * self.ace_loss_weight
68
- loss = loss + ace_loss
69
-
70
- return {'enhanced_ctc_loss': loss}
@@ -1,30 +0,0 @@
1
- import paddle
2
- from paddle import nn
3
- import paddle.nn.functional as F
4
-
5
-
6
- class NRTRLoss(nn.Layer):
7
- def __init__(self, smoothing=True, **kwargs):
8
- super(NRTRLoss, self).__init__()
9
- self.loss_func = nn.CrossEntropyLoss(reduction='mean', ignore_index=0)
10
- self.smoothing = smoothing
11
-
12
- def forward(self, pred, batch):
13
- pred = pred.reshape([-1, pred.shape[2]])
14
- max_len = batch[2].max()
15
- tgt = batch[1][:, 1:2 + max_len]
16
- tgt = tgt.reshape([-1])
17
- if self.smoothing:
18
- eps = 0.1
19
- n_class = pred.shape[1]
20
- one_hot = F.one_hot(tgt, pred.shape[1])
21
- one_hot = one_hot * (1 - eps) + (1 - one_hot) * eps / (n_class - 1)
22
- log_prb = F.log_softmax(pred, axis=1)
23
- non_pad_mask = paddle.not_equal(
24
- tgt, paddle.zeros(
25
- tgt.shape, dtype=tgt.dtype))
26
- loss = -(one_hot * log_prb).sum(axis=1)
27
- loss = loss.masked_select(non_pad_mask).mean()
28
- else:
29
- loss = self.loss_func(pred, tgt)
30
- return {'loss': loss}
@@ -1,28 +0,0 @@
1
- from __future__ import absolute_import
2
- from __future__ import division
3
- from __future__ import print_function
4
-
5
- import paddle
6
- from paddle import nn
7
-
8
-
9
- class SARLoss(nn.Layer):
10
- def __init__(self, **kwargs):
11
- super(SARLoss, self).__init__()
12
- self.loss_func = paddle.nn.loss.CrossEntropyLoss(
13
- reduction="mean", ignore_index=92)
14
-
15
- def forward(self, predicts, batch):
16
- predict = predicts[:, :
17
- -1, :] # ignore last index of outputs to be in same seq_len with targets
18
- label = batch[1].astype(
19
- "int64")[:, 1:] # ignore first index of target in loss calculation
20
- batch_size, num_steps, num_classes = predict.shape[0], predict.shape[
21
- 1], predict.shape[2]
22
- assert len(label.shape) == len(list(predict.shape)) - 1, \
23
- "The target's shape and inputs's shape is [N, d] and [N, num_steps]"
24
-
25
- inputs = paddle.reshape(predict, [-1, num_classes])
26
- targets = paddle.reshape(label, [-1])
27
- loss = self.loss_func(inputs, targets)
28
- return {'loss': loss}
@@ -1,47 +0,0 @@
1
- # copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- from __future__ import absolute_import
16
- from __future__ import division
17
- from __future__ import print_function
18
-
19
- import paddle
20
- from paddle import nn
21
-
22
-
23
- class SRNLoss(nn.Layer):
24
- def __init__(self, **kwargs):
25
- super(SRNLoss, self).__init__()
26
- self.loss_func = paddle.nn.loss.CrossEntropyLoss(reduction="sum")
27
-
28
- def forward(self, predicts, batch):
29
- predict = predicts['predict']
30
- word_predict = predicts['word_out']
31
- gsrm_predict = predicts['gsrm_out']
32
- label = batch[1]
33
-
34
- casted_label = paddle.cast(x=label, dtype='int64')
35
- casted_label = paddle.reshape(x=casted_label, shape=[-1, 1])
36
-
37
- cost_word = self.loss_func(word_predict, label=casted_label)
38
- cost_gsrm = self.loss_func(gsrm_predict, label=casted_label)
39
- cost_vsfd = self.loss_func(predict, label=casted_label)
40
-
41
- cost_word = paddle.reshape(x=paddle.sum(cost_word), shape=[1])
42
- cost_gsrm = paddle.reshape(x=paddle.sum(cost_gsrm), shape=[1])
43
- cost_vsfd = paddle.reshape(x=paddle.sum(cost_vsfd), shape=[1])
44
-
45
- sum_cost = cost_word * 3.0 + cost_vsfd + cost_gsrm * 0.15
46
-
47
- return {'loss': sum_cost, 'word_loss': cost_word, 'img_loss': cost_vsfd}
@@ -1,109 +0,0 @@
1
- # copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- from __future__ import absolute_import
16
- from __future__ import division
17
- from __future__ import print_function
18
-
19
- import paddle
20
- from paddle import nn
21
- from paddle.nn import functional as F
22
- from paddle import fluid
23
-
24
- class TableAttentionLoss(nn.Layer):
25
- def __init__(self, structure_weight, loc_weight, use_giou=False, giou_weight=1.0, **kwargs):
26
- super(TableAttentionLoss, self).__init__()
27
- self.loss_func = nn.CrossEntropyLoss(weight=None, reduction='none')
28
- self.structure_weight = structure_weight
29
- self.loc_weight = loc_weight
30
- self.use_giou = use_giou
31
- self.giou_weight = giou_weight
32
-
33
- def giou_loss(self, preds, bbox, eps=1e-7, reduction='mean'):
34
- '''
35
- :param preds:[[x1,y1,x2,y2], [x1,y1,x2,y2],,,]
36
- :param bbox:[[x1,y1,x2,y2], [x1,y1,x2,y2],,,]
37
- :return: loss
38
- '''
39
- ix1 = fluid.layers.elementwise_max(preds[:, 0], bbox[:, 0])
40
- iy1 = fluid.layers.elementwise_max(preds[:, 1], bbox[:, 1])
41
- ix2 = fluid.layers.elementwise_min(preds[:, 2], bbox[:, 2])
42
- iy2 = fluid.layers.elementwise_min(preds[:, 3], bbox[:, 3])
43
-
44
- iw = fluid.layers.clip(ix2 - ix1 + 1e-3, 0., 1e10)
45
- ih = fluid.layers.clip(iy2 - iy1 + 1e-3, 0., 1e10)
46
-
47
- # overlap
48
- inters = iw * ih
49
-
50
- # union
51
- uni = (preds[:, 2] - preds[:, 0] + 1e-3) * (preds[:, 3] - preds[:, 1] + 1e-3
52
- ) + (bbox[:, 2] - bbox[:, 0] + 1e-3) * (
53
- bbox[:, 3] - bbox[:, 1] + 1e-3) - inters + eps
54
-
55
- # ious
56
- ious = inters / uni
57
-
58
- ex1 = fluid.layers.elementwise_min(preds[:, 0], bbox[:, 0])
59
- ey1 = fluid.layers.elementwise_min(preds[:, 1], bbox[:, 1])
60
- ex2 = fluid.layers.elementwise_max(preds[:, 2], bbox[:, 2])
61
- ey2 = fluid.layers.elementwise_max(preds[:, 3], bbox[:, 3])
62
- ew = fluid.layers.clip(ex2 - ex1 + 1e-3, 0., 1e10)
63
- eh = fluid.layers.clip(ey2 - ey1 + 1e-3, 0., 1e10)
64
-
65
- # enclose erea
66
- enclose = ew * eh + eps
67
- giou = ious - (enclose - uni) / enclose
68
-
69
- loss = 1 - giou
70
-
71
- if reduction == 'mean':
72
- loss = paddle.mean(loss)
73
- elif reduction == 'sum':
74
- loss = paddle.sum(loss)
75
- else:
76
- raise NotImplementedError
77
- return loss
78
-
79
- def forward(self, predicts, batch):
80
- structure_probs = predicts['structure_probs']
81
- structure_targets = batch[1].astype("int64")
82
- structure_targets = structure_targets[:, 1:]
83
- if len(batch) == 6:
84
- structure_mask = batch[5].astype("int64")
85
- structure_mask = structure_mask[:, 1:]
86
- structure_mask = paddle.reshape(structure_mask, [-1])
87
- structure_probs = paddle.reshape(structure_probs, [-1, structure_probs.shape[-1]])
88
- structure_targets = paddle.reshape(structure_targets, [-1])
89
- structure_loss = self.loss_func(structure_probs, structure_targets)
90
-
91
- if len(batch) == 6:
92
- structure_loss = structure_loss * structure_mask
93
-
94
- # structure_loss = paddle.sum(structure_loss) * self.structure_weight
95
- structure_loss = paddle.mean(structure_loss) * self.structure_weight
96
-
97
- loc_preds = predicts['loc_preds']
98
- loc_targets = batch[2].astype("float32")
99
- loc_targets_mask = batch[4].astype("float32")
100
- loc_targets = loc_targets[:, 1:, :]
101
- loc_targets_mask = loc_targets_mask[:, 1:, :]
102
- loc_loss = F.mse_loss(loc_preds * loc_targets_mask, loc_targets) * self.loc_weight
103
- if self.use_giou:
104
- loc_loss_giou = self.giou_loss(loc_preds * loc_targets_mask, loc_targets) * self.giou_weight
105
- total_loss = structure_loss + loc_loss + loc_loss_giou
106
- return {'loss':total_loss, "structure_loss":structure_loss, "loc_loss":loc_loss, "loc_loss_giou":loc_loss_giou}
107
- else:
108
- total_loss = structure_loss + loc_loss
109
- return {'loss':total_loss, "structure_loss":structure_loss, "loc_loss":loc_loss}
@@ -1,44 +0,0 @@
1
- # copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- from __future__ import absolute_import
16
- from __future__ import division
17
- from __future__ import print_function
18
- from __future__ import unicode_literals
19
-
20
- import copy
21
-
22
- __all__ = ["build_metric"]
23
-
24
- from .det_metric import DetMetric
25
- from .rec_metric import RecMetric
26
- from .cls_metric import ClsMetric
27
- from .e2e_metric import E2EMetric
28
- from .distillation_metric import DistillationMetric
29
- from .table_metric import TableMetric
30
- from .kie_metric import KIEMetric
31
-
32
-
33
- def build_metric(config):
34
- support_dict = [
35
- "DetMetric", "RecMetric", "ClsMetric", "E2EMetric",
36
- "DistillationMetric", "TableMetric", 'KIEMetric'
37
- ]
38
-
39
- config = copy.deepcopy(config)
40
- module_name = config.pop("name")
41
- assert module_name in support_dict, Exception(
42
- "metric only support {}".format(support_dict))
43
- module_class = eval(module_name)(**config)
44
- return module_class
@@ -1,45 +0,0 @@
1
- # copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
-
16
- class ClsMetric(object):
17
- def __init__(self, main_indicator='acc', **kwargs):
18
- self.main_indicator = main_indicator
19
- self.reset()
20
-
21
- def __call__(self, pred_label, *args, **kwargs):
22
- preds, labels = pred_label
23
- correct_num = 0
24
- all_num = 0
25
- for (pred, pred_conf), (target, _) in zip(preds, labels):
26
- if pred == target:
27
- correct_num += 1
28
- all_num += 1
29
- self.correct_num += correct_num
30
- self.all_num += all_num
31
- return {'acc': correct_num / all_num, }
32
-
33
- def get_metric(self):
34
- """
35
- return metrics {
36
- 'acc': 0
37
- }
38
- """
39
- acc = self.correct_num / self.all_num
40
- self.reset()
41
- return {'acc': acc}
42
-
43
- def reset(self):
44
- self.correct_num = 0
45
- self.all_num = 0
@@ -1,82 +0,0 @@
1
- # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- from __future__ import absolute_import
16
- from __future__ import division
17
- from __future__ import print_function
18
-
19
- __all__ = ['DetMetric']
20
-
21
- from .eval_det_iou import DetectionIoUEvaluator
22
-
23
-
24
- class DetMetric(object):
25
- def __init__(self, main_indicator='hmean', **kwargs):
26
- self.evaluator = DetectionIoUEvaluator()
27
- self.main_indicator = main_indicator
28
- self.reset()
29
-
30
- def __call__(self, preds, batch, **kwargs):
31
- '''
32
- batch: a list produced by dataloaders.
33
- image: np.ndarray of shape (N, C, H, W).
34
- ratio_list: np.ndarray of shape(N,2)
35
- polygons: np.ndarray of shape (N, K, 4, 2), the polygons of objective regions.
36
- ignore_tags: np.ndarray of shape (N, K), indicates whether a region is ignorable or not.
37
- preds: a list of dict produced by post process
38
- points: np.ndarray of shape (N, K, 4, 2), the polygons of objective regions.
39
-
40
- 自己调试补充的笔记:
41
- list batch:
42
- image: (1, 3, 736, 1280),原始图数据,应该只有一张图N=1,检测不方便多图批量处理吧
43
- ratio_list: (1, 4),与描述不符,这里应该是 (N, 4)
44
- polygons: (1, 4, 4, 2),K是检测框数量,这里是gt标注框
45
- ignore_tags: (1, 4),是否为难样本,一般都是False
46
- list[dict] preds: 一般长度只有1,直接取 preds[0]['points']
47
- points: (3, 4, 2),检测出3个框
48
- '''
49
- gt_polyons_batch = batch[2]
50
- ignore_tags_batch = batch[3]
51
- for pred, gt_polyons, ignore_tags in zip(preds, gt_polyons_batch,
52
- ignore_tags_batch):
53
- # prepare gt
54
- gt_info_list = [{
55
- 'points': gt_polyon,
56
- 'text': '',
57
- 'ignore': ignore_tag
58
- } for gt_polyon, ignore_tag in zip(gt_polyons, ignore_tags)]
59
- # prepare det
60
- det_info_list = [{
61
- 'points': det_polyon,
62
- 'text': ''
63
- } for det_polyon in pred['points']]
64
- result = self.evaluator.evaluate_image(gt_info_list, det_info_list)
65
- self.results.append(result)
66
-
67
-
68
- def get_metric(self):
69
- """
70
- return metrics {
71
- 'precision': 0,
72
- 'recall': 0,
73
- 'hmean': 0
74
- }
75
- """
76
-
77
- metircs = self.evaluator.combine_results(self.results)
78
- self.reset()
79
- return metircs
80
-
81
- def reset(self):
82
- self.results = [] # clear results