pyxllib 0.3.96__py3-none-any.whl → 0.3.197__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (306) hide show
  1. pyxllib/algo/geo.py +12 -0
  2. pyxllib/algo/intervals.py +1 -1
  3. pyxllib/algo/matcher.py +78 -0
  4. pyxllib/algo/pupil.py +187 -19
  5. pyxllib/algo/specialist.py +2 -1
  6. pyxllib/algo/stat.py +38 -2
  7. {pyxlpr → pyxllib/autogui}/__init__.py +1 -1
  8. pyxllib/autogui/activewin.py +246 -0
  9. pyxllib/autogui/all.py +9 -0
  10. pyxllib/{ext/autogui → autogui}/autogui.py +40 -11
  11. pyxllib/autogui/uiautolib.py +362 -0
  12. pyxllib/autogui/wechat.py +827 -0
  13. pyxllib/autogui/wechat_msg.py +421 -0
  14. pyxllib/autogui/wxautolib.py +84 -0
  15. pyxllib/cv/slidercaptcha.py +137 -0
  16. pyxllib/data/echarts.py +123 -12
  17. pyxllib/data/jsonlib.py +89 -0
  18. pyxllib/data/pglib.py +514 -30
  19. pyxllib/data/sqlite.py +231 -4
  20. pyxllib/ext/JLineViewer.py +14 -1
  21. pyxllib/ext/drissionlib.py +277 -0
  22. pyxllib/ext/kq5034lib.py +0 -1594
  23. pyxllib/ext/robustprocfile.py +497 -0
  24. pyxllib/ext/unixlib.py +6 -5
  25. pyxllib/ext/utools.py +108 -95
  26. pyxllib/ext/webhook.py +32 -14
  27. pyxllib/ext/wjxlib.py +88 -0
  28. pyxllib/ext/wpsapi.py +124 -0
  29. pyxllib/ext/xlwork.py +9 -0
  30. pyxllib/ext/yuquelib.py +1003 -71
  31. pyxllib/file/docxlib.py +1 -1
  32. pyxllib/file/libreoffice.py +165 -0
  33. pyxllib/file/movielib.py +9 -0
  34. pyxllib/file/packlib/__init__.py +112 -75
  35. pyxllib/file/pdflib.py +1 -1
  36. pyxllib/file/pupil.py +1 -1
  37. pyxllib/file/specialist/dirlib.py +1 -1
  38. pyxllib/file/specialist/download.py +10 -3
  39. pyxllib/file/specialist/filelib.py +266 -55
  40. pyxllib/file/xlsxlib.py +205 -50
  41. pyxllib/file/xlsyncfile.py +341 -0
  42. pyxllib/prog/cachetools.py +64 -0
  43. pyxllib/prog/filelock.py +42 -0
  44. pyxllib/prog/multiprogs.py +940 -0
  45. pyxllib/prog/newbie.py +9 -2
  46. pyxllib/prog/pupil.py +129 -60
  47. pyxllib/prog/specialist/__init__.py +176 -2
  48. pyxllib/prog/specialist/bc.py +5 -2
  49. pyxllib/prog/specialist/browser.py +11 -2
  50. pyxllib/prog/specialist/datetime.py +68 -0
  51. pyxllib/prog/specialist/tictoc.py +12 -13
  52. pyxllib/prog/specialist/xllog.py +5 -5
  53. pyxllib/prog/xlosenv.py +7 -0
  54. pyxllib/text/airscript.js +744 -0
  55. pyxllib/text/charclasslib.py +17 -5
  56. pyxllib/text/jiebalib.py +6 -3
  57. pyxllib/text/jinjalib.py +32 -0
  58. pyxllib/text/jsa_ai_prompt.md +271 -0
  59. pyxllib/text/jscode.py +159 -4
  60. pyxllib/text/nestenv.py +1 -1
  61. pyxllib/text/newbie.py +12 -0
  62. pyxllib/text/pupil/common.py +26 -0
  63. pyxllib/text/specialist/ptag.py +2 -2
  64. pyxllib/text/templates/echart_base.html +11 -0
  65. pyxllib/text/templates/highlight_code.html +17 -0
  66. pyxllib/text/templates/latex_editor.html +103 -0
  67. pyxllib/text/xmllib.py +76 -14
  68. pyxllib/xl.py +2 -1
  69. pyxllib-0.3.197.dist-info/METADATA +48 -0
  70. pyxllib-0.3.197.dist-info/RECORD +126 -0
  71. {pyxllib-0.3.96.dist-info → pyxllib-0.3.197.dist-info}/WHEEL +1 -2
  72. pyxllib/ext/autogui/__init__.py +0 -8
  73. pyxllib-0.3.96.dist-info/METADATA +0 -51
  74. pyxllib-0.3.96.dist-info/RECORD +0 -333
  75. pyxllib-0.3.96.dist-info/top_level.txt +0 -2
  76. pyxlpr/ai/__init__.py +0 -5
  77. pyxlpr/ai/clientlib.py +0 -1281
  78. pyxlpr/ai/specialist.py +0 -286
  79. pyxlpr/ai/torch_app.py +0 -172
  80. pyxlpr/ai/xlpaddle.py +0 -655
  81. pyxlpr/ai/xltorch.py +0 -705
  82. pyxlpr/data/__init__.py +0 -11
  83. pyxlpr/data/coco.py +0 -1325
  84. pyxlpr/data/datacls.py +0 -365
  85. pyxlpr/data/datasets.py +0 -200
  86. pyxlpr/data/gptlib.py +0 -1291
  87. pyxlpr/data/icdar/__init__.py +0 -96
  88. pyxlpr/data/icdar/deteval.py +0 -377
  89. pyxlpr/data/icdar/icdar2013.py +0 -341
  90. pyxlpr/data/icdar/iou.py +0 -340
  91. pyxlpr/data/icdar/rrc_evaluation_funcs_1_1.py +0 -463
  92. pyxlpr/data/imtextline.py +0 -473
  93. pyxlpr/data/labelme.py +0 -866
  94. pyxlpr/data/removeline.py +0 -179
  95. pyxlpr/data/specialist.py +0 -57
  96. pyxlpr/eval/__init__.py +0 -85
  97. pyxlpr/paddleocr.py +0 -776
  98. pyxlpr/ppocr/__init__.py +0 -15
  99. pyxlpr/ppocr/configs/rec/multi_language/generate_multi_language_configs.py +0 -226
  100. pyxlpr/ppocr/data/__init__.py +0 -135
  101. pyxlpr/ppocr/data/imaug/ColorJitter.py +0 -26
  102. pyxlpr/ppocr/data/imaug/__init__.py +0 -67
  103. pyxlpr/ppocr/data/imaug/copy_paste.py +0 -170
  104. pyxlpr/ppocr/data/imaug/east_process.py +0 -437
  105. pyxlpr/ppocr/data/imaug/gen_table_mask.py +0 -244
  106. pyxlpr/ppocr/data/imaug/iaa_augment.py +0 -114
  107. pyxlpr/ppocr/data/imaug/label_ops.py +0 -789
  108. pyxlpr/ppocr/data/imaug/make_border_map.py +0 -184
  109. pyxlpr/ppocr/data/imaug/make_pse_gt.py +0 -106
  110. pyxlpr/ppocr/data/imaug/make_shrink_map.py +0 -126
  111. pyxlpr/ppocr/data/imaug/operators.py +0 -433
  112. pyxlpr/ppocr/data/imaug/pg_process.py +0 -906
  113. pyxlpr/ppocr/data/imaug/randaugment.py +0 -143
  114. pyxlpr/ppocr/data/imaug/random_crop_data.py +0 -239
  115. pyxlpr/ppocr/data/imaug/rec_img_aug.py +0 -533
  116. pyxlpr/ppocr/data/imaug/sast_process.py +0 -777
  117. pyxlpr/ppocr/data/imaug/text_image_aug/__init__.py +0 -17
  118. pyxlpr/ppocr/data/imaug/text_image_aug/augment.py +0 -120
  119. pyxlpr/ppocr/data/imaug/text_image_aug/warp_mls.py +0 -168
  120. pyxlpr/ppocr/data/lmdb_dataset.py +0 -115
  121. pyxlpr/ppocr/data/pgnet_dataset.py +0 -104
  122. pyxlpr/ppocr/data/pubtab_dataset.py +0 -107
  123. pyxlpr/ppocr/data/simple_dataset.py +0 -372
  124. pyxlpr/ppocr/losses/__init__.py +0 -61
  125. pyxlpr/ppocr/losses/ace_loss.py +0 -52
  126. pyxlpr/ppocr/losses/basic_loss.py +0 -135
  127. pyxlpr/ppocr/losses/center_loss.py +0 -88
  128. pyxlpr/ppocr/losses/cls_loss.py +0 -30
  129. pyxlpr/ppocr/losses/combined_loss.py +0 -67
  130. pyxlpr/ppocr/losses/det_basic_loss.py +0 -208
  131. pyxlpr/ppocr/losses/det_db_loss.py +0 -80
  132. pyxlpr/ppocr/losses/det_east_loss.py +0 -63
  133. pyxlpr/ppocr/losses/det_pse_loss.py +0 -149
  134. pyxlpr/ppocr/losses/det_sast_loss.py +0 -121
  135. pyxlpr/ppocr/losses/distillation_loss.py +0 -272
  136. pyxlpr/ppocr/losses/e2e_pg_loss.py +0 -140
  137. pyxlpr/ppocr/losses/kie_sdmgr_loss.py +0 -113
  138. pyxlpr/ppocr/losses/rec_aster_loss.py +0 -99
  139. pyxlpr/ppocr/losses/rec_att_loss.py +0 -39
  140. pyxlpr/ppocr/losses/rec_ctc_loss.py +0 -44
  141. pyxlpr/ppocr/losses/rec_enhanced_ctc_loss.py +0 -70
  142. pyxlpr/ppocr/losses/rec_nrtr_loss.py +0 -30
  143. pyxlpr/ppocr/losses/rec_sar_loss.py +0 -28
  144. pyxlpr/ppocr/losses/rec_srn_loss.py +0 -47
  145. pyxlpr/ppocr/losses/table_att_loss.py +0 -109
  146. pyxlpr/ppocr/metrics/__init__.py +0 -44
  147. pyxlpr/ppocr/metrics/cls_metric.py +0 -45
  148. pyxlpr/ppocr/metrics/det_metric.py +0 -82
  149. pyxlpr/ppocr/metrics/distillation_metric.py +0 -73
  150. pyxlpr/ppocr/metrics/e2e_metric.py +0 -86
  151. pyxlpr/ppocr/metrics/eval_det_iou.py +0 -274
  152. pyxlpr/ppocr/metrics/kie_metric.py +0 -70
  153. pyxlpr/ppocr/metrics/rec_metric.py +0 -75
  154. pyxlpr/ppocr/metrics/table_metric.py +0 -50
  155. pyxlpr/ppocr/modeling/architectures/__init__.py +0 -32
  156. pyxlpr/ppocr/modeling/architectures/base_model.py +0 -88
  157. pyxlpr/ppocr/modeling/architectures/distillation_model.py +0 -60
  158. pyxlpr/ppocr/modeling/backbones/__init__.py +0 -54
  159. pyxlpr/ppocr/modeling/backbones/det_mobilenet_v3.py +0 -268
  160. pyxlpr/ppocr/modeling/backbones/det_resnet_vd.py +0 -246
  161. pyxlpr/ppocr/modeling/backbones/det_resnet_vd_sast.py +0 -285
  162. pyxlpr/ppocr/modeling/backbones/e2e_resnet_vd_pg.py +0 -265
  163. pyxlpr/ppocr/modeling/backbones/kie_unet_sdmgr.py +0 -186
  164. pyxlpr/ppocr/modeling/backbones/rec_mobilenet_v3.py +0 -138
  165. pyxlpr/ppocr/modeling/backbones/rec_mv1_enhance.py +0 -258
  166. pyxlpr/ppocr/modeling/backbones/rec_nrtr_mtb.py +0 -48
  167. pyxlpr/ppocr/modeling/backbones/rec_resnet_31.py +0 -210
  168. pyxlpr/ppocr/modeling/backbones/rec_resnet_aster.py +0 -143
  169. pyxlpr/ppocr/modeling/backbones/rec_resnet_fpn.py +0 -307
  170. pyxlpr/ppocr/modeling/backbones/rec_resnet_vd.py +0 -286
  171. pyxlpr/ppocr/modeling/heads/__init__.py +0 -54
  172. pyxlpr/ppocr/modeling/heads/cls_head.py +0 -52
  173. pyxlpr/ppocr/modeling/heads/det_db_head.py +0 -118
  174. pyxlpr/ppocr/modeling/heads/det_east_head.py +0 -121
  175. pyxlpr/ppocr/modeling/heads/det_pse_head.py +0 -37
  176. pyxlpr/ppocr/modeling/heads/det_sast_head.py +0 -128
  177. pyxlpr/ppocr/modeling/heads/e2e_pg_head.py +0 -253
  178. pyxlpr/ppocr/modeling/heads/kie_sdmgr_head.py +0 -206
  179. pyxlpr/ppocr/modeling/heads/multiheadAttention.py +0 -163
  180. pyxlpr/ppocr/modeling/heads/rec_aster_head.py +0 -393
  181. pyxlpr/ppocr/modeling/heads/rec_att_head.py +0 -202
  182. pyxlpr/ppocr/modeling/heads/rec_ctc_head.py +0 -88
  183. pyxlpr/ppocr/modeling/heads/rec_nrtr_head.py +0 -826
  184. pyxlpr/ppocr/modeling/heads/rec_sar_head.py +0 -402
  185. pyxlpr/ppocr/modeling/heads/rec_srn_head.py +0 -280
  186. pyxlpr/ppocr/modeling/heads/self_attention.py +0 -406
  187. pyxlpr/ppocr/modeling/heads/table_att_head.py +0 -246
  188. pyxlpr/ppocr/modeling/necks/__init__.py +0 -32
  189. pyxlpr/ppocr/modeling/necks/db_fpn.py +0 -111
  190. pyxlpr/ppocr/modeling/necks/east_fpn.py +0 -188
  191. pyxlpr/ppocr/modeling/necks/fpn.py +0 -138
  192. pyxlpr/ppocr/modeling/necks/pg_fpn.py +0 -314
  193. pyxlpr/ppocr/modeling/necks/rnn.py +0 -92
  194. pyxlpr/ppocr/modeling/necks/sast_fpn.py +0 -284
  195. pyxlpr/ppocr/modeling/necks/table_fpn.py +0 -110
  196. pyxlpr/ppocr/modeling/transforms/__init__.py +0 -28
  197. pyxlpr/ppocr/modeling/transforms/stn.py +0 -135
  198. pyxlpr/ppocr/modeling/transforms/tps.py +0 -308
  199. pyxlpr/ppocr/modeling/transforms/tps_spatial_transformer.py +0 -156
  200. pyxlpr/ppocr/optimizer/__init__.py +0 -61
  201. pyxlpr/ppocr/optimizer/learning_rate.py +0 -228
  202. pyxlpr/ppocr/optimizer/lr_scheduler.py +0 -49
  203. pyxlpr/ppocr/optimizer/optimizer.py +0 -160
  204. pyxlpr/ppocr/optimizer/regularizer.py +0 -52
  205. pyxlpr/ppocr/postprocess/__init__.py +0 -55
  206. pyxlpr/ppocr/postprocess/cls_postprocess.py +0 -33
  207. pyxlpr/ppocr/postprocess/db_postprocess.py +0 -234
  208. pyxlpr/ppocr/postprocess/east_postprocess.py +0 -143
  209. pyxlpr/ppocr/postprocess/locality_aware_nms.py +0 -200
  210. pyxlpr/ppocr/postprocess/pg_postprocess.py +0 -52
  211. pyxlpr/ppocr/postprocess/pse_postprocess/__init__.py +0 -15
  212. pyxlpr/ppocr/postprocess/pse_postprocess/pse/__init__.py +0 -29
  213. pyxlpr/ppocr/postprocess/pse_postprocess/pse/setup.py +0 -14
  214. pyxlpr/ppocr/postprocess/pse_postprocess/pse_postprocess.py +0 -118
  215. pyxlpr/ppocr/postprocess/rec_postprocess.py +0 -654
  216. pyxlpr/ppocr/postprocess/sast_postprocess.py +0 -355
  217. pyxlpr/ppocr/tools/__init__.py +0 -14
  218. pyxlpr/ppocr/tools/eval.py +0 -83
  219. pyxlpr/ppocr/tools/export_center.py +0 -77
  220. pyxlpr/ppocr/tools/export_model.py +0 -129
  221. pyxlpr/ppocr/tools/infer/predict_cls.py +0 -151
  222. pyxlpr/ppocr/tools/infer/predict_det.py +0 -300
  223. pyxlpr/ppocr/tools/infer/predict_e2e.py +0 -169
  224. pyxlpr/ppocr/tools/infer/predict_rec.py +0 -414
  225. pyxlpr/ppocr/tools/infer/predict_system.py +0 -204
  226. pyxlpr/ppocr/tools/infer/utility.py +0 -629
  227. pyxlpr/ppocr/tools/infer_cls.py +0 -83
  228. pyxlpr/ppocr/tools/infer_det.py +0 -134
  229. pyxlpr/ppocr/tools/infer_e2e.py +0 -122
  230. pyxlpr/ppocr/tools/infer_kie.py +0 -153
  231. pyxlpr/ppocr/tools/infer_rec.py +0 -146
  232. pyxlpr/ppocr/tools/infer_table.py +0 -107
  233. pyxlpr/ppocr/tools/program.py +0 -596
  234. pyxlpr/ppocr/tools/test_hubserving.py +0 -117
  235. pyxlpr/ppocr/tools/train.py +0 -163
  236. pyxlpr/ppocr/tools/xlprog.py +0 -748
  237. pyxlpr/ppocr/utils/EN_symbol_dict.txt +0 -94
  238. pyxlpr/ppocr/utils/__init__.py +0 -24
  239. pyxlpr/ppocr/utils/dict/ar_dict.txt +0 -117
  240. pyxlpr/ppocr/utils/dict/arabic_dict.txt +0 -162
  241. pyxlpr/ppocr/utils/dict/be_dict.txt +0 -145
  242. pyxlpr/ppocr/utils/dict/bg_dict.txt +0 -140
  243. pyxlpr/ppocr/utils/dict/chinese_cht_dict.txt +0 -8421
  244. pyxlpr/ppocr/utils/dict/cyrillic_dict.txt +0 -163
  245. pyxlpr/ppocr/utils/dict/devanagari_dict.txt +0 -167
  246. pyxlpr/ppocr/utils/dict/en_dict.txt +0 -63
  247. pyxlpr/ppocr/utils/dict/fa_dict.txt +0 -136
  248. pyxlpr/ppocr/utils/dict/french_dict.txt +0 -136
  249. pyxlpr/ppocr/utils/dict/german_dict.txt +0 -143
  250. pyxlpr/ppocr/utils/dict/hi_dict.txt +0 -162
  251. pyxlpr/ppocr/utils/dict/it_dict.txt +0 -118
  252. pyxlpr/ppocr/utils/dict/japan_dict.txt +0 -4399
  253. pyxlpr/ppocr/utils/dict/ka_dict.txt +0 -153
  254. pyxlpr/ppocr/utils/dict/korean_dict.txt +0 -3688
  255. pyxlpr/ppocr/utils/dict/latin_dict.txt +0 -185
  256. pyxlpr/ppocr/utils/dict/mr_dict.txt +0 -153
  257. pyxlpr/ppocr/utils/dict/ne_dict.txt +0 -153
  258. pyxlpr/ppocr/utils/dict/oc_dict.txt +0 -96
  259. pyxlpr/ppocr/utils/dict/pu_dict.txt +0 -130
  260. pyxlpr/ppocr/utils/dict/rs_dict.txt +0 -91
  261. pyxlpr/ppocr/utils/dict/rsc_dict.txt +0 -134
  262. pyxlpr/ppocr/utils/dict/ru_dict.txt +0 -125
  263. pyxlpr/ppocr/utils/dict/ta_dict.txt +0 -128
  264. pyxlpr/ppocr/utils/dict/table_dict.txt +0 -277
  265. pyxlpr/ppocr/utils/dict/table_structure_dict.txt +0 -2759
  266. pyxlpr/ppocr/utils/dict/te_dict.txt +0 -151
  267. pyxlpr/ppocr/utils/dict/ug_dict.txt +0 -114
  268. pyxlpr/ppocr/utils/dict/uk_dict.txt +0 -142
  269. pyxlpr/ppocr/utils/dict/ur_dict.txt +0 -137
  270. pyxlpr/ppocr/utils/dict/xi_dict.txt +0 -110
  271. pyxlpr/ppocr/utils/dict90.txt +0 -90
  272. pyxlpr/ppocr/utils/e2e_metric/Deteval.py +0 -574
  273. pyxlpr/ppocr/utils/e2e_metric/polygon_fast.py +0 -83
  274. pyxlpr/ppocr/utils/e2e_utils/extract_batchsize.py +0 -87
  275. pyxlpr/ppocr/utils/e2e_utils/extract_textpoint_fast.py +0 -457
  276. pyxlpr/ppocr/utils/e2e_utils/extract_textpoint_slow.py +0 -592
  277. pyxlpr/ppocr/utils/e2e_utils/pgnet_pp_utils.py +0 -162
  278. pyxlpr/ppocr/utils/e2e_utils/visual.py +0 -162
  279. pyxlpr/ppocr/utils/en_dict.txt +0 -95
  280. pyxlpr/ppocr/utils/gen_label.py +0 -81
  281. pyxlpr/ppocr/utils/ic15_dict.txt +0 -36
  282. pyxlpr/ppocr/utils/iou.py +0 -54
  283. pyxlpr/ppocr/utils/logging.py +0 -69
  284. pyxlpr/ppocr/utils/network.py +0 -84
  285. pyxlpr/ppocr/utils/ppocr_keys_v1.txt +0 -6623
  286. pyxlpr/ppocr/utils/profiler.py +0 -110
  287. pyxlpr/ppocr/utils/save_load.py +0 -150
  288. pyxlpr/ppocr/utils/stats.py +0 -72
  289. pyxlpr/ppocr/utils/utility.py +0 -80
  290. pyxlpr/ppstructure/__init__.py +0 -13
  291. pyxlpr/ppstructure/predict_system.py +0 -187
  292. pyxlpr/ppstructure/table/__init__.py +0 -13
  293. pyxlpr/ppstructure/table/eval_table.py +0 -72
  294. pyxlpr/ppstructure/table/matcher.py +0 -192
  295. pyxlpr/ppstructure/table/predict_structure.py +0 -136
  296. pyxlpr/ppstructure/table/predict_table.py +0 -221
  297. pyxlpr/ppstructure/table/table_metric/__init__.py +0 -16
  298. pyxlpr/ppstructure/table/table_metric/parallel.py +0 -51
  299. pyxlpr/ppstructure/table/table_metric/table_metric.py +0 -247
  300. pyxlpr/ppstructure/table/tablepyxl/__init__.py +0 -13
  301. pyxlpr/ppstructure/table/tablepyxl/style.py +0 -283
  302. pyxlpr/ppstructure/table/tablepyxl/tablepyxl.py +0 -118
  303. pyxlpr/ppstructure/utility.py +0 -71
  304. pyxlpr/xlai.py +0 -10
  305. /pyxllib/{ext/autogui → autogui}/virtualkey.py +0 -0
  306. {pyxllib-0.3.96.dist-info → pyxllib-0.3.197.dist-info/licenses}/LICENSE +0 -0
@@ -1,574 +0,0 @@
1
- # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- import numpy as np
16
- import scipy.io as io
17
- from pyxlpr.ppocr.utils.e2e_metric.polygon_fast import iod, area_of_intersection, area
18
-
19
-
20
- def get_socre_A(gt_dir, pred_dict):
21
- allInputs = 1
22
-
23
- def input_reading_mod(pred_dict):
24
- """This helper reads input from txt files"""
25
- det = []
26
- n = len(pred_dict)
27
- for i in range(n):
28
- points = pred_dict[i]['points']
29
- text = pred_dict[i]['texts']
30
- point = ",".join(map(str, points.reshape(-1, )))
31
- det.append([point, text])
32
- return det
33
-
34
- def gt_reading_mod(gt_dict):
35
- """This helper reads groundtruths from mat files"""
36
- gt = []
37
- n = len(gt_dict)
38
- for i in range(n):
39
- points = gt_dict[i]['points'].tolist()
40
- h = len(points)
41
- text = gt_dict[i]['text']
42
- xx = [
43
- np.array(
44
- ['x:'], dtype='<U2'), 0, np.array(
45
- ['y:'], dtype='<U2'), 0, np.array(
46
- ['#'], dtype='<U1'), np.array(
47
- ['#'], dtype='<U1')
48
- ]
49
- t_x, t_y = [], []
50
- for j in range(h):
51
- t_x.append(points[j][0])
52
- t_y.append(points[j][1])
53
- xx[1] = np.array([t_x], dtype='int16')
54
- xx[3] = np.array([t_y], dtype='int16')
55
- if text != "":
56
- xx[4] = np.array([text], dtype='U{}'.format(len(text)))
57
- xx[5] = np.array(['c'], dtype='<U1')
58
- gt.append(xx)
59
- return gt
60
-
61
- def detection_filtering(detections, groundtruths, threshold=0.5):
62
- for gt_id, gt in enumerate(groundtruths):
63
- if (gt[5] == '#') and (gt[1].shape[1] > 1):
64
- gt_x = list(map(int, np.squeeze(gt[1])))
65
- gt_y = list(map(int, np.squeeze(gt[3])))
66
- for det_id, detection in enumerate(detections):
67
- detection_orig = detection
68
- detection = [float(x) for x in detection[0].split(',')]
69
- detection = list(map(int, detection))
70
- det_x = detection[0::2]
71
- det_y = detection[1::2]
72
- det_gt_iou = iod(det_x, det_y, gt_x, gt_y)
73
- if det_gt_iou > threshold:
74
- detections[det_id] = []
75
-
76
- detections[:] = [item for item in detections if item != []]
77
- return detections
78
-
79
- def sigma_calculation(det_x, det_y, gt_x, gt_y):
80
- """
81
- sigma = inter_area / gt_area
82
- """
83
- return np.round((area_of_intersection(det_x, det_y, gt_x, gt_y) /
84
- area(gt_x, gt_y)), 2)
85
-
86
- def tau_calculation(det_x, det_y, gt_x, gt_y):
87
- if area(det_x, det_y) == 0.0:
88
- return 0
89
- return np.round((area_of_intersection(det_x, det_y, gt_x, gt_y) /
90
- area(det_x, det_y)), 2)
91
-
92
- ##############################Initialization###################################
93
- # global_sigma = []
94
- # global_tau = []
95
- # global_pred_str = []
96
- # global_gt_str = []
97
- ###############################################################################
98
-
99
- for input_id in range(allInputs):
100
- if (input_id != '.DS_Store') and (input_id != 'Pascal_result.txt') and (
101
- input_id != 'Pascal_result_curved.txt') and (input_id != 'Pascal_result_non_curved.txt') and (
102
- input_id != 'Deteval_result.txt') and (input_id != 'Deteval_result_curved.txt') \
103
- and (input_id != 'Deteval_result_non_curved.txt'):
104
- detections = input_reading_mod(pred_dict)
105
- groundtruths = gt_reading_mod(gt_dir)
106
- detections = detection_filtering(
107
- detections,
108
- groundtruths) # filters detections overlapping with DC area
109
- dc_id = []
110
- for i in range(len(groundtruths)):
111
- if groundtruths[i][5] == '#':
112
- dc_id.append(i)
113
- cnt = 0
114
- for a in dc_id:
115
- num = a - cnt
116
- del groundtruths[num]
117
- cnt += 1
118
-
119
- local_sigma_table = np.zeros((len(groundtruths), len(detections)))
120
- local_tau_table = np.zeros((len(groundtruths), len(detections)))
121
- local_pred_str = {}
122
- local_gt_str = {}
123
-
124
- for gt_id, gt in enumerate(groundtruths):
125
- if len(detections) > 0:
126
- for det_id, detection in enumerate(detections):
127
- detection_orig = detection
128
- detection = [float(x) for x in detection[0].split(',')]
129
- detection = list(map(int, detection))
130
- pred_seq_str = detection_orig[1].strip()
131
- det_x = detection[0::2]
132
- det_y = detection[1::2]
133
- gt_x = list(map(int, np.squeeze(gt[1])))
134
- gt_y = list(map(int, np.squeeze(gt[3])))
135
- gt_seq_str = str(gt[4].tolist()[0])
136
-
137
- local_sigma_table[gt_id, det_id] = sigma_calculation(
138
- det_x, det_y, gt_x, gt_y)
139
- local_tau_table[gt_id, det_id] = tau_calculation(
140
- det_x, det_y, gt_x, gt_y)
141
- local_pred_str[det_id] = pred_seq_str
142
- local_gt_str[gt_id] = gt_seq_str
143
-
144
- global_sigma = local_sigma_table
145
- global_tau = local_tau_table
146
- global_pred_str = local_pred_str
147
- global_gt_str = local_gt_str
148
-
149
- single_data = {}
150
- single_data['sigma'] = global_sigma
151
- single_data['global_tau'] = global_tau
152
- single_data['global_pred_str'] = global_pred_str
153
- single_data['global_gt_str'] = global_gt_str
154
- return single_data
155
-
156
-
157
- def get_socre_B(gt_dir, img_id, pred_dict):
158
- allInputs = 1
159
-
160
- def input_reading_mod(pred_dict):
161
- """This helper reads input from txt files"""
162
- det = []
163
- n = len(pred_dict)
164
- for i in range(n):
165
- points = pred_dict[i]['points']
166
- text = pred_dict[i]['texts']
167
- point = ",".join(map(str, points.reshape(-1, )))
168
- det.append([point, text])
169
- return det
170
-
171
- def gt_reading_mod(gt_dir, gt_id):
172
- gt = io.loadmat('%s/poly_gt_img%s.mat' % (gt_dir, gt_id))
173
- gt = gt['polygt']
174
- return gt
175
-
176
- def detection_filtering(detections, groundtruths, threshold=0.5):
177
- for gt_id, gt in enumerate(groundtruths):
178
- if (gt[5] == '#') and (gt[1].shape[1] > 1):
179
- gt_x = list(map(int, np.squeeze(gt[1])))
180
- gt_y = list(map(int, np.squeeze(gt[3])))
181
- for det_id, detection in enumerate(detections):
182
- detection_orig = detection
183
- detection = [float(x) for x in detection[0].split(',')]
184
- detection = list(map(int, detection))
185
- det_x = detection[0::2]
186
- det_y = detection[1::2]
187
- det_gt_iou = iod(det_x, det_y, gt_x, gt_y)
188
- if det_gt_iou > threshold:
189
- detections[det_id] = []
190
-
191
- detections[:] = [item for item in detections if item != []]
192
- return detections
193
-
194
- def sigma_calculation(det_x, det_y, gt_x, gt_y):
195
- """
196
- sigma = inter_area / gt_area
197
- """
198
- return np.round((area_of_intersection(det_x, det_y, gt_x, gt_y) /
199
- area(gt_x, gt_y)), 2)
200
-
201
- def tau_calculation(det_x, det_y, gt_x, gt_y):
202
- if area(det_x, det_y) == 0.0:
203
- return 0
204
- return np.round((area_of_intersection(det_x, det_y, gt_x, gt_y) /
205
- area(det_x, det_y)), 2)
206
-
207
- ##############################Initialization###################################
208
- # global_sigma = []
209
- # global_tau = []
210
- # global_pred_str = []
211
- # global_gt_str = []
212
- ###############################################################################
213
-
214
- for input_id in range(allInputs):
215
- if (input_id != '.DS_Store') and (input_id != 'Pascal_result.txt') and (
216
- input_id != 'Pascal_result_curved.txt') and (input_id != 'Pascal_result_non_curved.txt') and (
217
- input_id != 'Deteval_result.txt') and (input_id != 'Deteval_result_curved.txt') \
218
- and (input_id != 'Deteval_result_non_curved.txt'):
219
- detections = input_reading_mod(pred_dict)
220
- groundtruths = gt_reading_mod(gt_dir, img_id).tolist()
221
- detections = detection_filtering(
222
- detections,
223
- groundtruths) # filters detections overlapping with DC area
224
- dc_id = []
225
- for i in range(len(groundtruths)):
226
- if groundtruths[i][5] == '#':
227
- dc_id.append(i)
228
- cnt = 0
229
- for a in dc_id:
230
- num = a - cnt
231
- del groundtruths[num]
232
- cnt += 1
233
-
234
- local_sigma_table = np.zeros((len(groundtruths), len(detections)))
235
- local_tau_table = np.zeros((len(groundtruths), len(detections)))
236
- local_pred_str = {}
237
- local_gt_str = {}
238
-
239
- for gt_id, gt in enumerate(groundtruths):
240
- if len(detections) > 0:
241
- for det_id, detection in enumerate(detections):
242
- detection_orig = detection
243
- detection = [float(x) for x in detection[0].split(',')]
244
- detection = list(map(int, detection))
245
- pred_seq_str = detection_orig[1].strip()
246
- det_x = detection[0::2]
247
- det_y = detection[1::2]
248
- gt_x = list(map(int, np.squeeze(gt[1])))
249
- gt_y = list(map(int, np.squeeze(gt[3])))
250
- gt_seq_str = str(gt[4].tolist()[0])
251
-
252
- local_sigma_table[gt_id, det_id] = sigma_calculation(
253
- det_x, det_y, gt_x, gt_y)
254
- local_tau_table[gt_id, det_id] = tau_calculation(
255
- det_x, det_y, gt_x, gt_y)
256
- local_pred_str[det_id] = pred_seq_str
257
- local_gt_str[gt_id] = gt_seq_str
258
-
259
- global_sigma = local_sigma_table
260
- global_tau = local_tau_table
261
- global_pred_str = local_pred_str
262
- global_gt_str = local_gt_str
263
-
264
- single_data = {}
265
- single_data['sigma'] = global_sigma
266
- single_data['global_tau'] = global_tau
267
- single_data['global_pred_str'] = global_pred_str
268
- single_data['global_gt_str'] = global_gt_str
269
- return single_data
270
-
271
-
272
- def combine_results(all_data):
273
- tr = 0.7
274
- tp = 0.6
275
- fsc_k = 0.8
276
- k = 2
277
- global_sigma = []
278
- global_tau = []
279
- global_pred_str = []
280
- global_gt_str = []
281
- for data in all_data:
282
- global_sigma.append(data['sigma'])
283
- global_tau.append(data['global_tau'])
284
- global_pred_str.append(data['global_pred_str'])
285
- global_gt_str.append(data['global_gt_str'])
286
-
287
- global_accumulative_recall = 0
288
- global_accumulative_precision = 0
289
- total_num_gt = 0
290
- total_num_det = 0
291
- hit_str_count = 0
292
- hit_count = 0
293
-
294
- def one_to_one(local_sigma_table, local_tau_table,
295
- local_accumulative_recall, local_accumulative_precision,
296
- global_accumulative_recall, global_accumulative_precision,
297
- gt_flag, det_flag, idy):
298
- hit_str_num = 0
299
- for gt_id in range(num_gt):
300
- gt_matching_qualified_sigma_candidates = np.where(
301
- local_sigma_table[gt_id, :] > tr)
302
- gt_matching_num_qualified_sigma_candidates = gt_matching_qualified_sigma_candidates[
303
- 0].shape[0]
304
- gt_matching_qualified_tau_candidates = np.where(
305
- local_tau_table[gt_id, :] > tp)
306
- gt_matching_num_qualified_tau_candidates = gt_matching_qualified_tau_candidates[
307
- 0].shape[0]
308
-
309
- det_matching_qualified_sigma_candidates = np.where(
310
- local_sigma_table[:, gt_matching_qualified_sigma_candidates[0]]
311
- > tr)
312
- det_matching_num_qualified_sigma_candidates = det_matching_qualified_sigma_candidates[
313
- 0].shape[0]
314
- det_matching_qualified_tau_candidates = np.where(
315
- local_tau_table[:, gt_matching_qualified_tau_candidates[0]] >
316
- tp)
317
- det_matching_num_qualified_tau_candidates = det_matching_qualified_tau_candidates[
318
- 0].shape[0]
319
-
320
- if (gt_matching_num_qualified_sigma_candidates == 1) and (gt_matching_num_qualified_tau_candidates == 1) and \
321
- (det_matching_num_qualified_sigma_candidates == 1) and (
322
- det_matching_num_qualified_tau_candidates == 1):
323
- global_accumulative_recall = global_accumulative_recall + 1.0
324
- global_accumulative_precision = global_accumulative_precision + 1.0
325
- local_accumulative_recall = local_accumulative_recall + 1.0
326
- local_accumulative_precision = local_accumulative_precision + 1.0
327
-
328
- gt_flag[0, gt_id] = 1
329
- matched_det_id = np.where(local_sigma_table[gt_id, :] > tr)
330
- # recg start
331
- gt_str_cur = global_gt_str[idy][gt_id]
332
- pred_str_cur = global_pred_str[idy][matched_det_id[0].tolist()[
333
- 0]]
334
- if pred_str_cur == gt_str_cur:
335
- hit_str_num += 1
336
- else:
337
- if pred_str_cur.lower() == gt_str_cur.lower():
338
- hit_str_num += 1
339
- # recg end
340
- det_flag[0, matched_det_id] = 1
341
- return local_accumulative_recall, local_accumulative_precision, global_accumulative_recall, global_accumulative_precision, gt_flag, det_flag, hit_str_num
342
-
343
- def one_to_many(local_sigma_table, local_tau_table,
344
- local_accumulative_recall, local_accumulative_precision,
345
- global_accumulative_recall, global_accumulative_precision,
346
- gt_flag, det_flag, idy):
347
- hit_str_num = 0
348
- for gt_id in range(num_gt):
349
- # skip the following if the groundtruth was matched
350
- if gt_flag[0, gt_id] > 0:
351
- continue
352
-
353
- non_zero_in_sigma = np.where(local_sigma_table[gt_id, :] > 0)
354
- num_non_zero_in_sigma = non_zero_in_sigma[0].shape[0]
355
-
356
- if num_non_zero_in_sigma >= k:
357
- ####search for all detections that overlaps with this groundtruth
358
- qualified_tau_candidates = np.where((local_tau_table[
359
- gt_id, :] >= tp) & (det_flag[0, :] == 0))
360
- num_qualified_tau_candidates = qualified_tau_candidates[
361
- 0].shape[0]
362
-
363
- if num_qualified_tau_candidates == 1:
364
- if ((local_tau_table[gt_id, qualified_tau_candidates] >= tp)
365
- and
366
- (local_sigma_table[gt_id, qualified_tau_candidates] >=
367
- tr)):
368
- # became an one-to-one case
369
- global_accumulative_recall = global_accumulative_recall + 1.0
370
- global_accumulative_precision = global_accumulative_precision + 1.0
371
- local_accumulative_recall = local_accumulative_recall + 1.0
372
- local_accumulative_precision = local_accumulative_precision + 1.0
373
-
374
- gt_flag[0, gt_id] = 1
375
- det_flag[0, qualified_tau_candidates] = 1
376
- # recg start
377
- gt_str_cur = global_gt_str[idy][gt_id]
378
- pred_str_cur = global_pred_str[idy][
379
- qualified_tau_candidates[0].tolist()[0]]
380
- if pred_str_cur == gt_str_cur:
381
- hit_str_num += 1
382
- else:
383
- if pred_str_cur.lower() == gt_str_cur.lower():
384
- hit_str_num += 1
385
- # recg end
386
- elif (np.sum(local_sigma_table[gt_id, qualified_tau_candidates])
387
- >= tr):
388
- gt_flag[0, gt_id] = 1
389
- det_flag[0, qualified_tau_candidates] = 1
390
- # recg start
391
- gt_str_cur = global_gt_str[idy][gt_id]
392
- pred_str_cur = global_pred_str[idy][
393
- qualified_tau_candidates[0].tolist()[0]]
394
- if pred_str_cur == gt_str_cur:
395
- hit_str_num += 1
396
- else:
397
- if pred_str_cur.lower() == gt_str_cur.lower():
398
- hit_str_num += 1
399
- # recg end
400
-
401
- global_accumulative_recall = global_accumulative_recall + fsc_k
402
- global_accumulative_precision = global_accumulative_precision + num_qualified_tau_candidates * fsc_k
403
-
404
- local_accumulative_recall = local_accumulative_recall + fsc_k
405
- local_accumulative_precision = local_accumulative_precision + num_qualified_tau_candidates * fsc_k
406
-
407
- return local_accumulative_recall, local_accumulative_precision, global_accumulative_recall, global_accumulative_precision, gt_flag, det_flag, hit_str_num
408
-
409
- def many_to_one(local_sigma_table, local_tau_table,
410
- local_accumulative_recall, local_accumulative_precision,
411
- global_accumulative_recall, global_accumulative_precision,
412
- gt_flag, det_flag, idy):
413
- hit_str_num = 0
414
- for det_id in range(num_det):
415
- # skip the following if the detection was matched
416
- if det_flag[0, det_id] > 0:
417
- continue
418
-
419
- non_zero_in_tau = np.where(local_tau_table[:, det_id] > 0)
420
- num_non_zero_in_tau = non_zero_in_tau[0].shape[0]
421
-
422
- if num_non_zero_in_tau >= k:
423
- ####search for all detections that overlaps with this groundtruth
424
- qualified_sigma_candidates = np.where((
425
- local_sigma_table[:, det_id] >= tp) & (gt_flag[0, :] == 0))
426
- num_qualified_sigma_candidates = qualified_sigma_candidates[
427
- 0].shape[0]
428
-
429
- if num_qualified_sigma_candidates == 1:
430
- if ((local_tau_table[qualified_sigma_candidates, det_id] >=
431
- tp) and
432
- (local_sigma_table[qualified_sigma_candidates, det_id]
433
- >= tr)):
434
- # became an one-to-one case
435
- global_accumulative_recall = global_accumulative_recall + 1.0
436
- global_accumulative_precision = global_accumulative_precision + 1.0
437
- local_accumulative_recall = local_accumulative_recall + 1.0
438
- local_accumulative_precision = local_accumulative_precision + 1.0
439
-
440
- gt_flag[0, qualified_sigma_candidates] = 1
441
- det_flag[0, det_id] = 1
442
- # recg start
443
- pred_str_cur = global_pred_str[idy][det_id]
444
- gt_len = len(qualified_sigma_candidates[0])
445
- for idx in range(gt_len):
446
- ele_gt_id = qualified_sigma_candidates[0].tolist()[
447
- idx]
448
- if ele_gt_id not in global_gt_str[idy]:
449
- continue
450
- gt_str_cur = global_gt_str[idy][ele_gt_id]
451
- if pred_str_cur == gt_str_cur:
452
- hit_str_num += 1
453
- break
454
- else:
455
- if pred_str_cur.lower() == gt_str_cur.lower():
456
- hit_str_num += 1
457
- break
458
- # recg end
459
- elif (np.sum(local_tau_table[qualified_sigma_candidates,
460
- det_id]) >= tp):
461
- det_flag[0, det_id] = 1
462
- gt_flag[0, qualified_sigma_candidates] = 1
463
- # recg start
464
- pred_str_cur = global_pred_str[idy][det_id]
465
- gt_len = len(qualified_sigma_candidates[0])
466
- for idx in range(gt_len):
467
- ele_gt_id = qualified_sigma_candidates[0].tolist()[idx]
468
- if ele_gt_id not in global_gt_str[idy]:
469
- continue
470
- gt_str_cur = global_gt_str[idy][ele_gt_id]
471
- if pred_str_cur == gt_str_cur:
472
- hit_str_num += 1
473
- break
474
- else:
475
- if pred_str_cur.lower() == gt_str_cur.lower():
476
- hit_str_num += 1
477
- break
478
- # recg end
479
-
480
- global_accumulative_recall = global_accumulative_recall + num_qualified_sigma_candidates * fsc_k
481
- global_accumulative_precision = global_accumulative_precision + fsc_k
482
-
483
- local_accumulative_recall = local_accumulative_recall + num_qualified_sigma_candidates * fsc_k
484
- local_accumulative_precision = local_accumulative_precision + fsc_k
485
- return local_accumulative_recall, local_accumulative_precision, global_accumulative_recall, global_accumulative_precision, gt_flag, det_flag, hit_str_num
486
-
487
- for idx in range(len(global_sigma)):
488
- local_sigma_table = np.array(global_sigma[idx])
489
- local_tau_table = global_tau[idx]
490
-
491
- num_gt = local_sigma_table.shape[0]
492
- num_det = local_sigma_table.shape[1]
493
-
494
- total_num_gt = total_num_gt + num_gt
495
- total_num_det = total_num_det + num_det
496
-
497
- local_accumulative_recall = 0
498
- local_accumulative_precision = 0
499
- gt_flag = np.zeros((1, num_gt))
500
- det_flag = np.zeros((1, num_det))
501
-
502
- #######first check for one-to-one case##########
503
- local_accumulative_recall, local_accumulative_precision, global_accumulative_recall, global_accumulative_precision, \
504
- gt_flag, det_flag, hit_str_num = one_to_one(local_sigma_table, local_tau_table,
505
- local_accumulative_recall, local_accumulative_precision,
506
- global_accumulative_recall, global_accumulative_precision,
507
- gt_flag, det_flag, idx)
508
-
509
- hit_str_count += hit_str_num
510
- #######then check for one-to-many case##########
511
- local_accumulative_recall, local_accumulative_precision, global_accumulative_recall, global_accumulative_precision, \
512
- gt_flag, det_flag, hit_str_num = one_to_many(local_sigma_table, local_tau_table,
513
- local_accumulative_recall, local_accumulative_precision,
514
- global_accumulative_recall, global_accumulative_precision,
515
- gt_flag, det_flag, idx)
516
- hit_str_count += hit_str_num
517
- #######then check for many-to-one case##########
518
- local_accumulative_recall, local_accumulative_precision, global_accumulative_recall, global_accumulative_precision, \
519
- gt_flag, det_flag, hit_str_num = many_to_one(local_sigma_table, local_tau_table,
520
- local_accumulative_recall, local_accumulative_precision,
521
- global_accumulative_recall, global_accumulative_precision,
522
- gt_flag, det_flag, idx)
523
- hit_str_count += hit_str_num
524
-
525
- try:
526
- recall = global_accumulative_recall / total_num_gt
527
- except ZeroDivisionError:
528
- recall = 0
529
-
530
- try:
531
- precision = global_accumulative_precision / total_num_det
532
- except ZeroDivisionError:
533
- precision = 0
534
-
535
- try:
536
- f_score = 2 * precision * recall / (precision + recall)
537
- except ZeroDivisionError:
538
- f_score = 0
539
-
540
- try:
541
- seqerr = 1 - float(hit_str_count) / global_accumulative_recall
542
- except ZeroDivisionError:
543
- seqerr = 1
544
-
545
- try:
546
- recall_e2e = float(hit_str_count) / total_num_gt
547
- except ZeroDivisionError:
548
- recall_e2e = 0
549
-
550
- try:
551
- precision_e2e = float(hit_str_count) / total_num_det
552
- except ZeroDivisionError:
553
- precision_e2e = 0
554
-
555
- try:
556
- f_score_e2e = 2 * precision_e2e * recall_e2e / (
557
- precision_e2e + recall_e2e)
558
- except ZeroDivisionError:
559
- f_score_e2e = 0
560
-
561
- final = {
562
- 'total_num_gt': total_num_gt,
563
- 'total_num_det': total_num_det,
564
- 'global_accumulative_recall': global_accumulative_recall,
565
- 'hit_str_count': hit_str_count,
566
- 'recall': recall,
567
- 'precision': precision,
568
- 'f_score': f_score,
569
- 'seqerr': seqerr,
570
- 'recall_e2e': recall_e2e,
571
- 'precision_e2e': precision_e2e,
572
- 'f_score_e2e': f_score_e2e
573
- }
574
- return final
@@ -1,83 +0,0 @@
1
- # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- import numpy as np
15
- from shapely.geometry import Polygon
16
- """
17
- :param det_x: [1, N] Xs of detection's vertices
18
- :param det_y: [1, N] Ys of detection's vertices
19
- :param gt_x: [1, N] Xs of groundtruth's vertices
20
- :param gt_y: [1, N] Ys of groundtruth's vertices
21
-
22
- ##############
23
- All the calculation of 'AREA' in this script is handled by:
24
- 1) First generating a binary mask with the polygon area filled up with 1's
25
- 2) Summing up all the 1's
26
- """
27
-
28
-
29
- def area(x, y):
30
- polygon = Polygon(np.stack([x, y], axis=1))
31
- return float(polygon.area)
32
-
33
-
34
- def approx_area_of_intersection(det_x, det_y, gt_x, gt_y):
35
- """
36
- This helper determine if both polygons are intersecting with each others with an approximation method.
37
- Area of intersection represented by the minimum bounding rectangular [xmin, ymin, xmax, ymax]
38
- """
39
- det_ymax = np.max(det_y)
40
- det_xmax = np.max(det_x)
41
- det_ymin = np.min(det_y)
42
- det_xmin = np.min(det_x)
43
-
44
- gt_ymax = np.max(gt_y)
45
- gt_xmax = np.max(gt_x)
46
- gt_ymin = np.min(gt_y)
47
- gt_xmin = np.min(gt_x)
48
-
49
- all_min_ymax = np.minimum(det_ymax, gt_ymax)
50
- all_max_ymin = np.maximum(det_ymin, gt_ymin)
51
-
52
- intersect_heights = np.maximum(0.0, (all_min_ymax - all_max_ymin))
53
-
54
- all_min_xmax = np.minimum(det_xmax, gt_xmax)
55
- all_max_xmin = np.maximum(det_xmin, gt_xmin)
56
- intersect_widths = np.maximum(0.0, (all_min_xmax - all_max_xmin))
57
-
58
- return intersect_heights * intersect_widths
59
-
60
-
61
- def area_of_intersection(det_x, det_y, gt_x, gt_y):
62
- p1 = Polygon(np.stack([det_x, det_y], axis=1)).buffer(0)
63
- p2 = Polygon(np.stack([gt_x, gt_y], axis=1)).buffer(0)
64
- return float(p1.intersection(p2).area)
65
-
66
-
67
- def area_of_union(det_x, det_y, gt_x, gt_y):
68
- p1 = Polygon(np.stack([det_x, det_y], axis=1)).buffer(0)
69
- p2 = Polygon(np.stack([gt_x, gt_y], axis=1)).buffer(0)
70
- return float(p1.union(p2).area)
71
-
72
-
73
- def iou(det_x, det_y, gt_x, gt_y):
74
- return area_of_intersection(det_x, det_y, gt_x, gt_y) / (
75
- area_of_union(det_x, det_y, gt_x, gt_y) + 1.0)
76
-
77
-
78
- def iod(det_x, det_y, gt_x, gt_y):
79
- """
80
- This helper determine the fraction of intersection area over detection area
81
- """
82
- return area_of_intersection(det_x, det_y, gt_x, gt_y) / (
83
- area(det_x, det_y) + 1.0)