pyxllib 0.3.96__py3-none-any.whl → 0.3.200__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (358) hide show
  1. pyxllib/__init__.py +21 -21
  2. pyxllib/algo/__init__.py +8 -8
  3. pyxllib/algo/disjoint.py +54 -54
  4. pyxllib/algo/geo.py +541 -529
  5. pyxllib/algo/intervals.py +964 -964
  6. pyxllib/algo/matcher.py +389 -311
  7. pyxllib/algo/newbie.py +166 -166
  8. pyxllib/algo/pupil.py +629 -461
  9. pyxllib/algo/shapelylib.py +67 -67
  10. pyxllib/algo/specialist.py +241 -240
  11. pyxllib/algo/stat.py +494 -458
  12. pyxllib/algo/treelib.py +149 -149
  13. pyxllib/algo/unitlib.py +66 -66
  14. {pyxlpr → pyxllib/autogui}/__init__.py +5 -5
  15. pyxllib/autogui/activewin.py +246 -0
  16. pyxllib/autogui/all.py +9 -0
  17. pyxllib/{ext/autogui → autogui}/autogui.py +852 -823
  18. pyxllib/autogui/uiautolib.py +362 -0
  19. pyxllib/{ext/autogui → autogui}/virtualkey.py +102 -102
  20. pyxllib/autogui/wechat.py +827 -0
  21. pyxllib/autogui/wechat_msg.py +421 -0
  22. pyxllib/autogui/wxautolib.py +84 -0
  23. pyxllib/cv/__init__.py +5 -5
  24. pyxllib/cv/expert.py +267 -267
  25. pyxllib/cv/imfile.py +159 -159
  26. pyxllib/cv/imhash.py +39 -39
  27. pyxllib/cv/pupil.py +9 -9
  28. pyxllib/cv/rgbfmt.py +1525 -1525
  29. pyxllib/cv/slidercaptcha.py +137 -0
  30. pyxllib/cv/trackbartools.py +251 -251
  31. pyxllib/cv/xlcvlib.py +1040 -1040
  32. pyxllib/cv/xlpillib.py +423 -423
  33. pyxllib/data/echarts.py +240 -129
  34. pyxllib/data/jsonlib.py +89 -0
  35. pyxllib/data/oss.py +72 -72
  36. pyxllib/data/pglib.py +1127 -643
  37. pyxllib/data/sqlite.py +568 -341
  38. pyxllib/data/sqllib.py +297 -297
  39. pyxllib/ext/JLineViewer.py +505 -492
  40. pyxllib/ext/__init__.py +6 -6
  41. pyxllib/ext/demolib.py +246 -246
  42. pyxllib/ext/drissionlib.py +277 -0
  43. pyxllib/ext/kq5034lib.py +12 -1606
  44. pyxllib/ext/old.py +663 -663
  45. pyxllib/ext/qt.py +449 -449
  46. pyxllib/ext/robustprocfile.py +497 -0
  47. pyxllib/ext/seleniumlib.py +76 -76
  48. pyxllib/ext/tk.py +173 -173
  49. pyxllib/ext/unixlib.py +827 -826
  50. pyxllib/ext/utools.py +351 -338
  51. pyxllib/ext/webhook.py +124 -101
  52. pyxllib/ext/win32lib.py +40 -40
  53. pyxllib/ext/wjxlib.py +88 -0
  54. pyxllib/ext/wpsapi.py +124 -0
  55. pyxllib/ext/xlwork.py +9 -0
  56. pyxllib/ext/yuquelib.py +1105 -173
  57. pyxllib/file/__init__.py +17 -17
  58. pyxllib/file/docxlib.py +761 -761
  59. pyxllib/file/gitlib.py +309 -309
  60. pyxllib/file/libreoffice.py +165 -0
  61. pyxllib/file/movielib.py +148 -139
  62. pyxllib/file/newbie.py +10 -10
  63. pyxllib/file/onenotelib.py +1469 -1469
  64. pyxllib/file/packlib/__init__.py +330 -293
  65. pyxllib/file/packlib/zipfile.py +2441 -2441
  66. pyxllib/file/pdflib.py +426 -426
  67. pyxllib/file/pupil.py +185 -185
  68. pyxllib/file/specialist/__init__.py +685 -685
  69. pyxllib/file/specialist/dirlib.py +799 -799
  70. pyxllib/file/specialist/download.py +193 -186
  71. pyxllib/file/specialist/filelib.py +2829 -2618
  72. pyxllib/file/xlsxlib.py +3131 -2976
  73. pyxllib/file/xlsyncfile.py +341 -0
  74. pyxllib/prog/__init__.py +5 -5
  75. pyxllib/prog/cachetools.py +64 -0
  76. pyxllib/prog/deprecatedlib.py +233 -233
  77. pyxllib/prog/filelock.py +42 -0
  78. pyxllib/prog/ipyexec.py +253 -253
  79. pyxllib/prog/multiprogs.py +940 -0
  80. pyxllib/prog/newbie.py +451 -444
  81. pyxllib/prog/pupil.py +1197 -1128
  82. pyxllib/prog/sitepackages.py +33 -33
  83. pyxllib/prog/specialist/__init__.py +391 -217
  84. pyxllib/prog/specialist/bc.py +203 -200
  85. pyxllib/prog/specialist/browser.py +497 -488
  86. pyxllib/prog/specialist/common.py +347 -347
  87. pyxllib/prog/specialist/datetime.py +199 -131
  88. pyxllib/prog/specialist/tictoc.py +240 -241
  89. pyxllib/prog/specialist/xllog.py +180 -180
  90. pyxllib/prog/xlosenv.py +108 -101
  91. pyxllib/stdlib/__init__.py +17 -17
  92. pyxllib/stdlib/tablepyxl/__init__.py +10 -10
  93. pyxllib/stdlib/tablepyxl/style.py +303 -303
  94. pyxllib/stdlib/tablepyxl/tablepyxl.py +130 -130
  95. pyxllib/text/__init__.py +8 -8
  96. pyxllib/text/ahocorasick.py +39 -39
  97. pyxllib/text/airscript.js +744 -0
  98. pyxllib/text/charclasslib.py +121 -109
  99. pyxllib/text/jiebalib.py +267 -264
  100. pyxllib/text/jinjalib.py +32 -0
  101. pyxllib/text/jsa_ai_prompt.md +271 -0
  102. pyxllib/text/jscode.py +922 -767
  103. pyxllib/text/latex/__init__.py +158 -158
  104. pyxllib/text/levenshtein.py +303 -303
  105. pyxllib/text/nestenv.py +1215 -1215
  106. pyxllib/text/newbie.py +300 -288
  107. pyxllib/text/pupil/__init__.py +8 -8
  108. pyxllib/text/pupil/common.py +1121 -1095
  109. pyxllib/text/pupil/xlalign.py +326 -326
  110. pyxllib/text/pycode.py +47 -47
  111. pyxllib/text/specialist/__init__.py +8 -8
  112. pyxllib/text/specialist/common.py +112 -112
  113. pyxllib/text/specialist/ptag.py +186 -186
  114. pyxllib/text/spellchecker.py +172 -172
  115. pyxllib/text/templates/echart_base.html +11 -0
  116. pyxllib/text/templates/highlight_code.html +17 -0
  117. pyxllib/text/templates/latex_editor.html +103 -0
  118. pyxllib/text/vbacode.py +17 -17
  119. pyxllib/text/xmllib.py +747 -685
  120. pyxllib/xl.py +42 -38
  121. pyxllib/xlcv.py +17 -17
  122. pyxllib-0.3.200.dist-info/METADATA +48 -0
  123. pyxllib-0.3.200.dist-info/RECORD +126 -0
  124. {pyxllib-0.3.96.dist-info → pyxllib-0.3.200.dist-info}/WHEEL +1 -2
  125. {pyxllib-0.3.96.dist-info → pyxllib-0.3.200.dist-info/licenses}/LICENSE +190 -190
  126. pyxllib/ext/autogui/__init__.py +0 -8
  127. pyxllib-0.3.96.dist-info/METADATA +0 -51
  128. pyxllib-0.3.96.dist-info/RECORD +0 -333
  129. pyxllib-0.3.96.dist-info/top_level.txt +0 -2
  130. pyxlpr/ai/__init__.py +0 -5
  131. pyxlpr/ai/clientlib.py +0 -1281
  132. pyxlpr/ai/specialist.py +0 -286
  133. pyxlpr/ai/torch_app.py +0 -172
  134. pyxlpr/ai/xlpaddle.py +0 -655
  135. pyxlpr/ai/xltorch.py +0 -705
  136. pyxlpr/data/__init__.py +0 -11
  137. pyxlpr/data/coco.py +0 -1325
  138. pyxlpr/data/datacls.py +0 -365
  139. pyxlpr/data/datasets.py +0 -200
  140. pyxlpr/data/gptlib.py +0 -1291
  141. pyxlpr/data/icdar/__init__.py +0 -96
  142. pyxlpr/data/icdar/deteval.py +0 -377
  143. pyxlpr/data/icdar/icdar2013.py +0 -341
  144. pyxlpr/data/icdar/iou.py +0 -340
  145. pyxlpr/data/icdar/rrc_evaluation_funcs_1_1.py +0 -463
  146. pyxlpr/data/imtextline.py +0 -473
  147. pyxlpr/data/labelme.py +0 -866
  148. pyxlpr/data/removeline.py +0 -179
  149. pyxlpr/data/specialist.py +0 -57
  150. pyxlpr/eval/__init__.py +0 -85
  151. pyxlpr/paddleocr.py +0 -776
  152. pyxlpr/ppocr/__init__.py +0 -15
  153. pyxlpr/ppocr/configs/rec/multi_language/generate_multi_language_configs.py +0 -226
  154. pyxlpr/ppocr/data/__init__.py +0 -135
  155. pyxlpr/ppocr/data/imaug/ColorJitter.py +0 -26
  156. pyxlpr/ppocr/data/imaug/__init__.py +0 -67
  157. pyxlpr/ppocr/data/imaug/copy_paste.py +0 -170
  158. pyxlpr/ppocr/data/imaug/east_process.py +0 -437
  159. pyxlpr/ppocr/data/imaug/gen_table_mask.py +0 -244
  160. pyxlpr/ppocr/data/imaug/iaa_augment.py +0 -114
  161. pyxlpr/ppocr/data/imaug/label_ops.py +0 -789
  162. pyxlpr/ppocr/data/imaug/make_border_map.py +0 -184
  163. pyxlpr/ppocr/data/imaug/make_pse_gt.py +0 -106
  164. pyxlpr/ppocr/data/imaug/make_shrink_map.py +0 -126
  165. pyxlpr/ppocr/data/imaug/operators.py +0 -433
  166. pyxlpr/ppocr/data/imaug/pg_process.py +0 -906
  167. pyxlpr/ppocr/data/imaug/randaugment.py +0 -143
  168. pyxlpr/ppocr/data/imaug/random_crop_data.py +0 -239
  169. pyxlpr/ppocr/data/imaug/rec_img_aug.py +0 -533
  170. pyxlpr/ppocr/data/imaug/sast_process.py +0 -777
  171. pyxlpr/ppocr/data/imaug/text_image_aug/__init__.py +0 -17
  172. pyxlpr/ppocr/data/imaug/text_image_aug/augment.py +0 -120
  173. pyxlpr/ppocr/data/imaug/text_image_aug/warp_mls.py +0 -168
  174. pyxlpr/ppocr/data/lmdb_dataset.py +0 -115
  175. pyxlpr/ppocr/data/pgnet_dataset.py +0 -104
  176. pyxlpr/ppocr/data/pubtab_dataset.py +0 -107
  177. pyxlpr/ppocr/data/simple_dataset.py +0 -372
  178. pyxlpr/ppocr/losses/__init__.py +0 -61
  179. pyxlpr/ppocr/losses/ace_loss.py +0 -52
  180. pyxlpr/ppocr/losses/basic_loss.py +0 -135
  181. pyxlpr/ppocr/losses/center_loss.py +0 -88
  182. pyxlpr/ppocr/losses/cls_loss.py +0 -30
  183. pyxlpr/ppocr/losses/combined_loss.py +0 -67
  184. pyxlpr/ppocr/losses/det_basic_loss.py +0 -208
  185. pyxlpr/ppocr/losses/det_db_loss.py +0 -80
  186. pyxlpr/ppocr/losses/det_east_loss.py +0 -63
  187. pyxlpr/ppocr/losses/det_pse_loss.py +0 -149
  188. pyxlpr/ppocr/losses/det_sast_loss.py +0 -121
  189. pyxlpr/ppocr/losses/distillation_loss.py +0 -272
  190. pyxlpr/ppocr/losses/e2e_pg_loss.py +0 -140
  191. pyxlpr/ppocr/losses/kie_sdmgr_loss.py +0 -113
  192. pyxlpr/ppocr/losses/rec_aster_loss.py +0 -99
  193. pyxlpr/ppocr/losses/rec_att_loss.py +0 -39
  194. pyxlpr/ppocr/losses/rec_ctc_loss.py +0 -44
  195. pyxlpr/ppocr/losses/rec_enhanced_ctc_loss.py +0 -70
  196. pyxlpr/ppocr/losses/rec_nrtr_loss.py +0 -30
  197. pyxlpr/ppocr/losses/rec_sar_loss.py +0 -28
  198. pyxlpr/ppocr/losses/rec_srn_loss.py +0 -47
  199. pyxlpr/ppocr/losses/table_att_loss.py +0 -109
  200. pyxlpr/ppocr/metrics/__init__.py +0 -44
  201. pyxlpr/ppocr/metrics/cls_metric.py +0 -45
  202. pyxlpr/ppocr/metrics/det_metric.py +0 -82
  203. pyxlpr/ppocr/metrics/distillation_metric.py +0 -73
  204. pyxlpr/ppocr/metrics/e2e_metric.py +0 -86
  205. pyxlpr/ppocr/metrics/eval_det_iou.py +0 -274
  206. pyxlpr/ppocr/metrics/kie_metric.py +0 -70
  207. pyxlpr/ppocr/metrics/rec_metric.py +0 -75
  208. pyxlpr/ppocr/metrics/table_metric.py +0 -50
  209. pyxlpr/ppocr/modeling/architectures/__init__.py +0 -32
  210. pyxlpr/ppocr/modeling/architectures/base_model.py +0 -88
  211. pyxlpr/ppocr/modeling/architectures/distillation_model.py +0 -60
  212. pyxlpr/ppocr/modeling/backbones/__init__.py +0 -54
  213. pyxlpr/ppocr/modeling/backbones/det_mobilenet_v3.py +0 -268
  214. pyxlpr/ppocr/modeling/backbones/det_resnet_vd.py +0 -246
  215. pyxlpr/ppocr/modeling/backbones/det_resnet_vd_sast.py +0 -285
  216. pyxlpr/ppocr/modeling/backbones/e2e_resnet_vd_pg.py +0 -265
  217. pyxlpr/ppocr/modeling/backbones/kie_unet_sdmgr.py +0 -186
  218. pyxlpr/ppocr/modeling/backbones/rec_mobilenet_v3.py +0 -138
  219. pyxlpr/ppocr/modeling/backbones/rec_mv1_enhance.py +0 -258
  220. pyxlpr/ppocr/modeling/backbones/rec_nrtr_mtb.py +0 -48
  221. pyxlpr/ppocr/modeling/backbones/rec_resnet_31.py +0 -210
  222. pyxlpr/ppocr/modeling/backbones/rec_resnet_aster.py +0 -143
  223. pyxlpr/ppocr/modeling/backbones/rec_resnet_fpn.py +0 -307
  224. pyxlpr/ppocr/modeling/backbones/rec_resnet_vd.py +0 -286
  225. pyxlpr/ppocr/modeling/heads/__init__.py +0 -54
  226. pyxlpr/ppocr/modeling/heads/cls_head.py +0 -52
  227. pyxlpr/ppocr/modeling/heads/det_db_head.py +0 -118
  228. pyxlpr/ppocr/modeling/heads/det_east_head.py +0 -121
  229. pyxlpr/ppocr/modeling/heads/det_pse_head.py +0 -37
  230. pyxlpr/ppocr/modeling/heads/det_sast_head.py +0 -128
  231. pyxlpr/ppocr/modeling/heads/e2e_pg_head.py +0 -253
  232. pyxlpr/ppocr/modeling/heads/kie_sdmgr_head.py +0 -206
  233. pyxlpr/ppocr/modeling/heads/multiheadAttention.py +0 -163
  234. pyxlpr/ppocr/modeling/heads/rec_aster_head.py +0 -393
  235. pyxlpr/ppocr/modeling/heads/rec_att_head.py +0 -202
  236. pyxlpr/ppocr/modeling/heads/rec_ctc_head.py +0 -88
  237. pyxlpr/ppocr/modeling/heads/rec_nrtr_head.py +0 -826
  238. pyxlpr/ppocr/modeling/heads/rec_sar_head.py +0 -402
  239. pyxlpr/ppocr/modeling/heads/rec_srn_head.py +0 -280
  240. pyxlpr/ppocr/modeling/heads/self_attention.py +0 -406
  241. pyxlpr/ppocr/modeling/heads/table_att_head.py +0 -246
  242. pyxlpr/ppocr/modeling/necks/__init__.py +0 -32
  243. pyxlpr/ppocr/modeling/necks/db_fpn.py +0 -111
  244. pyxlpr/ppocr/modeling/necks/east_fpn.py +0 -188
  245. pyxlpr/ppocr/modeling/necks/fpn.py +0 -138
  246. pyxlpr/ppocr/modeling/necks/pg_fpn.py +0 -314
  247. pyxlpr/ppocr/modeling/necks/rnn.py +0 -92
  248. pyxlpr/ppocr/modeling/necks/sast_fpn.py +0 -284
  249. pyxlpr/ppocr/modeling/necks/table_fpn.py +0 -110
  250. pyxlpr/ppocr/modeling/transforms/__init__.py +0 -28
  251. pyxlpr/ppocr/modeling/transforms/stn.py +0 -135
  252. pyxlpr/ppocr/modeling/transforms/tps.py +0 -308
  253. pyxlpr/ppocr/modeling/transforms/tps_spatial_transformer.py +0 -156
  254. pyxlpr/ppocr/optimizer/__init__.py +0 -61
  255. pyxlpr/ppocr/optimizer/learning_rate.py +0 -228
  256. pyxlpr/ppocr/optimizer/lr_scheduler.py +0 -49
  257. pyxlpr/ppocr/optimizer/optimizer.py +0 -160
  258. pyxlpr/ppocr/optimizer/regularizer.py +0 -52
  259. pyxlpr/ppocr/postprocess/__init__.py +0 -55
  260. pyxlpr/ppocr/postprocess/cls_postprocess.py +0 -33
  261. pyxlpr/ppocr/postprocess/db_postprocess.py +0 -234
  262. pyxlpr/ppocr/postprocess/east_postprocess.py +0 -143
  263. pyxlpr/ppocr/postprocess/locality_aware_nms.py +0 -200
  264. pyxlpr/ppocr/postprocess/pg_postprocess.py +0 -52
  265. pyxlpr/ppocr/postprocess/pse_postprocess/__init__.py +0 -15
  266. pyxlpr/ppocr/postprocess/pse_postprocess/pse/__init__.py +0 -29
  267. pyxlpr/ppocr/postprocess/pse_postprocess/pse/setup.py +0 -14
  268. pyxlpr/ppocr/postprocess/pse_postprocess/pse_postprocess.py +0 -118
  269. pyxlpr/ppocr/postprocess/rec_postprocess.py +0 -654
  270. pyxlpr/ppocr/postprocess/sast_postprocess.py +0 -355
  271. pyxlpr/ppocr/tools/__init__.py +0 -14
  272. pyxlpr/ppocr/tools/eval.py +0 -83
  273. pyxlpr/ppocr/tools/export_center.py +0 -77
  274. pyxlpr/ppocr/tools/export_model.py +0 -129
  275. pyxlpr/ppocr/tools/infer/predict_cls.py +0 -151
  276. pyxlpr/ppocr/tools/infer/predict_det.py +0 -300
  277. pyxlpr/ppocr/tools/infer/predict_e2e.py +0 -169
  278. pyxlpr/ppocr/tools/infer/predict_rec.py +0 -414
  279. pyxlpr/ppocr/tools/infer/predict_system.py +0 -204
  280. pyxlpr/ppocr/tools/infer/utility.py +0 -629
  281. pyxlpr/ppocr/tools/infer_cls.py +0 -83
  282. pyxlpr/ppocr/tools/infer_det.py +0 -134
  283. pyxlpr/ppocr/tools/infer_e2e.py +0 -122
  284. pyxlpr/ppocr/tools/infer_kie.py +0 -153
  285. pyxlpr/ppocr/tools/infer_rec.py +0 -146
  286. pyxlpr/ppocr/tools/infer_table.py +0 -107
  287. pyxlpr/ppocr/tools/program.py +0 -596
  288. pyxlpr/ppocr/tools/test_hubserving.py +0 -117
  289. pyxlpr/ppocr/tools/train.py +0 -163
  290. pyxlpr/ppocr/tools/xlprog.py +0 -748
  291. pyxlpr/ppocr/utils/EN_symbol_dict.txt +0 -94
  292. pyxlpr/ppocr/utils/__init__.py +0 -24
  293. pyxlpr/ppocr/utils/dict/ar_dict.txt +0 -117
  294. pyxlpr/ppocr/utils/dict/arabic_dict.txt +0 -162
  295. pyxlpr/ppocr/utils/dict/be_dict.txt +0 -145
  296. pyxlpr/ppocr/utils/dict/bg_dict.txt +0 -140
  297. pyxlpr/ppocr/utils/dict/chinese_cht_dict.txt +0 -8421
  298. pyxlpr/ppocr/utils/dict/cyrillic_dict.txt +0 -163
  299. pyxlpr/ppocr/utils/dict/devanagari_dict.txt +0 -167
  300. pyxlpr/ppocr/utils/dict/en_dict.txt +0 -63
  301. pyxlpr/ppocr/utils/dict/fa_dict.txt +0 -136
  302. pyxlpr/ppocr/utils/dict/french_dict.txt +0 -136
  303. pyxlpr/ppocr/utils/dict/german_dict.txt +0 -143
  304. pyxlpr/ppocr/utils/dict/hi_dict.txt +0 -162
  305. pyxlpr/ppocr/utils/dict/it_dict.txt +0 -118
  306. pyxlpr/ppocr/utils/dict/japan_dict.txt +0 -4399
  307. pyxlpr/ppocr/utils/dict/ka_dict.txt +0 -153
  308. pyxlpr/ppocr/utils/dict/korean_dict.txt +0 -3688
  309. pyxlpr/ppocr/utils/dict/latin_dict.txt +0 -185
  310. pyxlpr/ppocr/utils/dict/mr_dict.txt +0 -153
  311. pyxlpr/ppocr/utils/dict/ne_dict.txt +0 -153
  312. pyxlpr/ppocr/utils/dict/oc_dict.txt +0 -96
  313. pyxlpr/ppocr/utils/dict/pu_dict.txt +0 -130
  314. pyxlpr/ppocr/utils/dict/rs_dict.txt +0 -91
  315. pyxlpr/ppocr/utils/dict/rsc_dict.txt +0 -134
  316. pyxlpr/ppocr/utils/dict/ru_dict.txt +0 -125
  317. pyxlpr/ppocr/utils/dict/ta_dict.txt +0 -128
  318. pyxlpr/ppocr/utils/dict/table_dict.txt +0 -277
  319. pyxlpr/ppocr/utils/dict/table_structure_dict.txt +0 -2759
  320. pyxlpr/ppocr/utils/dict/te_dict.txt +0 -151
  321. pyxlpr/ppocr/utils/dict/ug_dict.txt +0 -114
  322. pyxlpr/ppocr/utils/dict/uk_dict.txt +0 -142
  323. pyxlpr/ppocr/utils/dict/ur_dict.txt +0 -137
  324. pyxlpr/ppocr/utils/dict/xi_dict.txt +0 -110
  325. pyxlpr/ppocr/utils/dict90.txt +0 -90
  326. pyxlpr/ppocr/utils/e2e_metric/Deteval.py +0 -574
  327. pyxlpr/ppocr/utils/e2e_metric/polygon_fast.py +0 -83
  328. pyxlpr/ppocr/utils/e2e_utils/extract_batchsize.py +0 -87
  329. pyxlpr/ppocr/utils/e2e_utils/extract_textpoint_fast.py +0 -457
  330. pyxlpr/ppocr/utils/e2e_utils/extract_textpoint_slow.py +0 -592
  331. pyxlpr/ppocr/utils/e2e_utils/pgnet_pp_utils.py +0 -162
  332. pyxlpr/ppocr/utils/e2e_utils/visual.py +0 -162
  333. pyxlpr/ppocr/utils/en_dict.txt +0 -95
  334. pyxlpr/ppocr/utils/gen_label.py +0 -81
  335. pyxlpr/ppocr/utils/ic15_dict.txt +0 -36
  336. pyxlpr/ppocr/utils/iou.py +0 -54
  337. pyxlpr/ppocr/utils/logging.py +0 -69
  338. pyxlpr/ppocr/utils/network.py +0 -84
  339. pyxlpr/ppocr/utils/ppocr_keys_v1.txt +0 -6623
  340. pyxlpr/ppocr/utils/profiler.py +0 -110
  341. pyxlpr/ppocr/utils/save_load.py +0 -150
  342. pyxlpr/ppocr/utils/stats.py +0 -72
  343. pyxlpr/ppocr/utils/utility.py +0 -80
  344. pyxlpr/ppstructure/__init__.py +0 -13
  345. pyxlpr/ppstructure/predict_system.py +0 -187
  346. pyxlpr/ppstructure/table/__init__.py +0 -13
  347. pyxlpr/ppstructure/table/eval_table.py +0 -72
  348. pyxlpr/ppstructure/table/matcher.py +0 -192
  349. pyxlpr/ppstructure/table/predict_structure.py +0 -136
  350. pyxlpr/ppstructure/table/predict_table.py +0 -221
  351. pyxlpr/ppstructure/table/table_metric/__init__.py +0 -16
  352. pyxlpr/ppstructure/table/table_metric/parallel.py +0 -51
  353. pyxlpr/ppstructure/table/table_metric/table_metric.py +0 -247
  354. pyxlpr/ppstructure/table/tablepyxl/__init__.py +0 -13
  355. pyxlpr/ppstructure/table/tablepyxl/style.py +0 -283
  356. pyxlpr/ppstructure/table/tablepyxl/tablepyxl.py +0 -118
  357. pyxlpr/ppstructure/utility.py +0 -71
  358. pyxlpr/xlai.py +0 -10
@@ -1,355 +0,0 @@
1
- # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- from __future__ import absolute_import
16
- from __future__ import division
17
- from __future__ import print_function
18
-
19
- import os
20
- import sys
21
-
22
- __dir__ = os.path.dirname(__file__)
23
- sys.path.append(__dir__)
24
- sys.path.append(os.path.join(__dir__, '..'))
25
-
26
- import numpy as np
27
- from .locality_aware_nms import nms_locality
28
- import paddle
29
- import cv2
30
- import time
31
-
32
-
33
- class SASTPostProcess(object):
34
- """
35
- The post process for SAST.
36
- """
37
-
38
- def __init__(self,
39
- score_thresh=0.5,
40
- nms_thresh=0.2,
41
- sample_pts_num=2,
42
- shrink_ratio_of_width=0.3,
43
- expand_scale=1.0,
44
- tcl_map_thresh=0.5,
45
- **kwargs):
46
-
47
- self.score_thresh = score_thresh
48
- self.nms_thresh = nms_thresh
49
- self.sample_pts_num = sample_pts_num
50
- self.shrink_ratio_of_width = shrink_ratio_of_width
51
- self.expand_scale = expand_scale
52
- self.tcl_map_thresh = tcl_map_thresh
53
-
54
- # c++ la-nms is faster, but only support python 3.5
55
- self.is_python35 = False
56
- if sys.version_info.major == 3 and sys.version_info.minor == 5:
57
- self.is_python35 = True
58
-
59
- def point_pair2poly(self, point_pair_list):
60
- """
61
- Transfer vertical point_pairs into poly point in clockwise.
62
- """
63
- # constract poly
64
- point_num = len(point_pair_list) * 2
65
- point_list = [0] * point_num
66
- for idx, point_pair in enumerate(point_pair_list):
67
- point_list[idx] = point_pair[0]
68
- point_list[point_num - 1 - idx] = point_pair[1]
69
- return np.array(point_list).reshape(-1, 2)
70
-
71
- def shrink_quad_along_width(self,
72
- quad,
73
- begin_width_ratio=0.,
74
- end_width_ratio=1.):
75
- """
76
- Generate shrink_quad_along_width.
77
- """
78
- ratio_pair = np.array(
79
- [[begin_width_ratio], [end_width_ratio]], dtype=np.float32)
80
- p0_1 = quad[0] + (quad[1] - quad[0]) * ratio_pair
81
- p3_2 = quad[3] + (quad[2] - quad[3]) * ratio_pair
82
- return np.array([p0_1[0], p0_1[1], p3_2[1], p3_2[0]])
83
-
84
- def expand_poly_along_width(self, poly, shrink_ratio_of_width=0.3):
85
- """
86
- expand poly along width.
87
- """
88
- point_num = poly.shape[0]
89
- left_quad = np.array(
90
- [poly[0], poly[1], poly[-2], poly[-1]], dtype=np.float32)
91
- left_ratio = -shrink_ratio_of_width * np.linalg.norm(left_quad[0] - left_quad[3]) / \
92
- (np.linalg.norm(left_quad[0] - left_quad[1]) + 1e-6)
93
- left_quad_expand = self.shrink_quad_along_width(left_quad, left_ratio,
94
- 1.0)
95
- right_quad = np.array(
96
- [
97
- poly[point_num // 2 - 2], poly[point_num // 2 - 1],
98
- poly[point_num // 2], poly[point_num // 2 + 1]
99
- ],
100
- dtype=np.float32)
101
- right_ratio = 1.0 + \
102
- shrink_ratio_of_width * np.linalg.norm(right_quad[0] - right_quad[3]) / \
103
- (np.linalg.norm(right_quad[0] - right_quad[1]) + 1e-6)
104
- right_quad_expand = self.shrink_quad_along_width(right_quad, 0.0,
105
- right_ratio)
106
- poly[0] = left_quad_expand[0]
107
- poly[-1] = left_quad_expand[-1]
108
- poly[point_num // 2 - 1] = right_quad_expand[1]
109
- poly[point_num // 2] = right_quad_expand[2]
110
- return poly
111
-
112
- def restore_quad(self, tcl_map, tcl_map_thresh, tvo_map):
113
- """Restore quad."""
114
- xy_text = np.argwhere(tcl_map[:, :, 0] > tcl_map_thresh)
115
- xy_text = xy_text[:, ::-1] # (n, 2)
116
-
117
- # Sort the text boxes via the y axis
118
- xy_text = xy_text[np.argsort(xy_text[:, 1])]
119
-
120
- scores = tcl_map[xy_text[:, 1], xy_text[:, 0], 0]
121
- scores = scores[:, np.newaxis]
122
-
123
- # Restore
124
- point_num = int(tvo_map.shape[-1] / 2)
125
- assert point_num == 4
126
- tvo_map = tvo_map[xy_text[:, 1], xy_text[:, 0], :]
127
- xy_text_tile = np.tile(xy_text, (1, point_num)) # (n, point_num * 2)
128
- quads = xy_text_tile - tvo_map
129
-
130
- return scores, quads, xy_text
131
-
132
- def quad_area(self, quad):
133
- """
134
- compute area of a quad.
135
- """
136
- edge = [(quad[1][0] - quad[0][0]) * (quad[1][1] + quad[0][1]),
137
- (quad[2][0] - quad[1][0]) * (quad[2][1] + quad[1][1]),
138
- (quad[3][0] - quad[2][0]) * (quad[3][1] + quad[2][1]),
139
- (quad[0][0] - quad[3][0]) * (quad[0][1] + quad[3][1])]
140
- return np.sum(edge) / 2.
141
-
142
- def nms(self, dets):
143
- if self.is_python35:
144
- import lanms
145
- dets = lanms.merge_quadrangle_n9(dets, self.nms_thresh)
146
- else:
147
- dets = nms_locality(dets, self.nms_thresh)
148
- return dets
149
-
150
- def cluster_by_quads_tco(self, tcl_map, tcl_map_thresh, quads, tco_map):
151
- """
152
- Cluster pixels in tcl_map based on quads.
153
- """
154
- instance_count = quads.shape[0] + 1 # contain background
155
- instance_label_map = np.zeros(tcl_map.shape[:2], dtype=np.int32)
156
- if instance_count == 1:
157
- return instance_count, instance_label_map
158
-
159
- # predict text center
160
- xy_text = np.argwhere(tcl_map[:, :, 0] > tcl_map_thresh)
161
- n = xy_text.shape[0]
162
- xy_text = xy_text[:, ::-1] # (n, 2)
163
- tco = tco_map[xy_text[:, 1], xy_text[:, 0], :] # (n, 2)
164
- pred_tc = xy_text - tco
165
-
166
- # get gt text center
167
- m = quads.shape[0]
168
- gt_tc = np.mean(quads, axis=1) # (m, 2)
169
-
170
- pred_tc_tile = np.tile(pred_tc[:, np.newaxis, :],
171
- (1, m, 1)) # (n, m, 2)
172
- gt_tc_tile = np.tile(gt_tc[np.newaxis, :, :], (n, 1, 1)) # (n, m, 2)
173
- dist_mat = np.linalg.norm(pred_tc_tile - gt_tc_tile, axis=2) # (n, m)
174
- xy_text_assign = np.argmin(dist_mat, axis=1) + 1 # (n,)
175
-
176
- instance_label_map[xy_text[:, 1], xy_text[:, 0]] = xy_text_assign
177
- return instance_count, instance_label_map
178
-
179
- def estimate_sample_pts_num(self, quad, xy_text):
180
- """
181
- Estimate sample points number.
182
- """
183
- eh = (np.linalg.norm(quad[0] - quad[3]) +
184
- np.linalg.norm(quad[1] - quad[2])) / 2.0
185
- ew = (np.linalg.norm(quad[0] - quad[1]) +
186
- np.linalg.norm(quad[2] - quad[3])) / 2.0
187
-
188
- dense_sample_pts_num = max(2, int(ew))
189
- dense_xy_center_line = xy_text[np.linspace(
190
- 0,
191
- xy_text.shape[0] - 1,
192
- dense_sample_pts_num,
193
- endpoint=True,
194
- dtype=np.float32).astype(np.int32)]
195
-
196
- dense_xy_center_line_diff = dense_xy_center_line[
197
- 1:] - dense_xy_center_line[:-1]
198
- estimate_arc_len = np.sum(
199
- np.linalg.norm(
200
- dense_xy_center_line_diff, axis=1))
201
-
202
- sample_pts_num = max(2, int(estimate_arc_len / eh))
203
- return sample_pts_num
204
-
205
- def detect_sast(self,
206
- tcl_map,
207
- tvo_map,
208
- tbo_map,
209
- tco_map,
210
- ratio_w,
211
- ratio_h,
212
- src_w,
213
- src_h,
214
- shrink_ratio_of_width=0.3,
215
- tcl_map_thresh=0.5,
216
- offset_expand=1.0,
217
- out_strid=4.0):
218
- """
219
- first resize the tcl_map, tvo_map and tbo_map to the input_size, then restore the polys
220
- """
221
- # restore quad
222
- scores, quads, xy_text = self.restore_quad(tcl_map, tcl_map_thresh,
223
- tvo_map)
224
- dets = np.hstack((quads, scores)).astype(np.float32, copy=False)
225
- dets = self.nms(dets)
226
- if dets.shape[0] == 0:
227
- return []
228
- quads = dets[:, :-1].reshape(-1, 4, 2)
229
-
230
- # Compute quad area
231
- quad_areas = []
232
- for quad in quads:
233
- quad_areas.append(-self.quad_area(quad))
234
-
235
- # instance segmentation
236
- # instance_count, instance_label_map = cv2.connectedComponents(tcl_map.astype(np.uint8), connectivity=8)
237
- instance_count, instance_label_map = self.cluster_by_quads_tco(
238
- tcl_map, tcl_map_thresh, quads, tco_map)
239
-
240
- # restore single poly with tcl instance.
241
- poly_list = []
242
- for instance_idx in range(1, instance_count):
243
- xy_text = np.argwhere(instance_label_map == instance_idx)[:, ::-1]
244
- quad = quads[instance_idx - 1]
245
- q_area = quad_areas[instance_idx - 1]
246
- if q_area < 5:
247
- continue
248
-
249
- #
250
- len1 = float(np.linalg.norm(quad[0] - quad[1]))
251
- len2 = float(np.linalg.norm(quad[1] - quad[2]))
252
- min_len = min(len1, len2)
253
- if min_len < 3:
254
- continue
255
-
256
- # filter small CC
257
- if xy_text.shape[0] <= 0:
258
- continue
259
-
260
- # filter low confidence instance
261
- xy_text_scores = tcl_map[xy_text[:, 1], xy_text[:, 0], 0]
262
- if np.sum(xy_text_scores) / quad_areas[instance_idx - 1] < 0.1:
263
- # if np.sum(xy_text_scores) / quad_areas[instance_idx - 1] < 0.05:
264
- continue
265
-
266
- # sort xy_text
267
- left_center_pt = np.array(
268
- [[(quad[0, 0] + quad[-1, 0]) / 2.0,
269
- (quad[0, 1] + quad[-1, 1]) / 2.0]]) # (1, 2)
270
- right_center_pt = np.array(
271
- [[(quad[1, 0] + quad[2, 0]) / 2.0,
272
- (quad[1, 1] + quad[2, 1]) / 2.0]]) # (1, 2)
273
- proj_unit_vec = (right_center_pt - left_center_pt) / \
274
- (np.linalg.norm(right_center_pt - left_center_pt) + 1e-6)
275
- proj_value = np.sum(xy_text * proj_unit_vec, axis=1)
276
- xy_text = xy_text[np.argsort(proj_value)]
277
-
278
- # Sample pts in tcl map
279
- if self.sample_pts_num == 0:
280
- sample_pts_num = self.estimate_sample_pts_num(quad, xy_text)
281
- else:
282
- sample_pts_num = self.sample_pts_num
283
- xy_center_line = xy_text[np.linspace(
284
- 0,
285
- xy_text.shape[0] - 1,
286
- sample_pts_num,
287
- endpoint=True,
288
- dtype=np.float32).astype(np.int32)]
289
-
290
- point_pair_list = []
291
- for x, y in xy_center_line:
292
- # get corresponding offset
293
- offset = tbo_map[y, x, :].reshape(2, 2)
294
- if offset_expand != 1.0:
295
- offset_length = np.linalg.norm(
296
- offset, axis=1, keepdims=True)
297
- expand_length = np.clip(
298
- offset_length * (offset_expand - 1),
299
- a_min=0.5,
300
- a_max=3.0)
301
- offset_detal = offset / offset_length * expand_length
302
- offset = offset + offset_detal
303
- # original point
304
- ori_yx = np.array([y, x], dtype=np.float32)
305
- point_pair = (ori_yx + offset)[:, ::-1] * out_strid / np.array(
306
- [ratio_w, ratio_h]).reshape(-1, 2)
307
- point_pair_list.append(point_pair)
308
-
309
- # ndarry: (x, 2), expand poly along width
310
- detected_poly = self.point_pair2poly(point_pair_list)
311
- detected_poly = self.expand_poly_along_width(detected_poly,
312
- shrink_ratio_of_width)
313
- detected_poly[:, 0] = np.clip(
314
- detected_poly[:, 0], a_min=0, a_max=src_w)
315
- detected_poly[:, 1] = np.clip(
316
- detected_poly[:, 1], a_min=0, a_max=src_h)
317
- poly_list.append(detected_poly)
318
-
319
- return poly_list
320
-
321
- def __call__(self, outs_dict, shape_list):
322
- score_list = outs_dict['f_score']
323
- border_list = outs_dict['f_border']
324
- tvo_list = outs_dict['f_tvo']
325
- tco_list = outs_dict['f_tco']
326
- if isinstance(score_list, paddle.Tensor):
327
- score_list = score_list.numpy()
328
- border_list = border_list.numpy()
329
- tvo_list = tvo_list.numpy()
330
- tco_list = tco_list.numpy()
331
-
332
- img_num = len(shape_list)
333
- poly_lists = []
334
- for ino in range(img_num):
335
- p_score = score_list[ino].transpose((1, 2, 0))
336
- p_border = border_list[ino].transpose((1, 2, 0))
337
- p_tvo = tvo_list[ino].transpose((1, 2, 0))
338
- p_tco = tco_list[ino].transpose((1, 2, 0))
339
- src_h, src_w, ratio_h, ratio_w = shape_list[ino]
340
-
341
- poly_list = self.detect_sast(
342
- p_score,
343
- p_tvo,
344
- p_border,
345
- p_tco,
346
- ratio_w,
347
- ratio_h,
348
- src_w,
349
- src_h,
350
- shrink_ratio_of_width=self.shrink_ratio_of_width,
351
- tcl_map_thresh=self.tcl_map_thresh,
352
- offset_expand=self.expand_scale)
353
- poly_lists.append({'points': np.array(poly_list)})
354
-
355
- return poly_lists
@@ -1,14 +0,0 @@
1
- # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
2
- # Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
3
- #
4
- # Licensed under the Apache License, Version 2.0 (the "License");
5
- # you may not use this file except in compliance with the License.
6
- # You may obtain a copy of the License at
7
- #
8
- # http://www.apache.org/licenses/LICENSE-2.0
9
- #
10
- # Unless required by applicable law or agreed to in writing, software
11
- # distributed under the License is distributed on an "AS IS" BASIS,
12
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- # See the License for the specific language governing permissions and
14
- # limitations under the License.
@@ -1,83 +0,0 @@
1
- # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- from __future__ import absolute_import
16
- from __future__ import division
17
- from __future__ import print_function
18
-
19
- import os
20
- import sys
21
- import time
22
-
23
- __dir__ = os.path.dirname(os.path.abspath(__file__))
24
-
25
- sys.path.append(__dir__)
26
- sys.path.append(os.path.abspath(os.path.join(__dir__, '..')))
27
-
28
- from pyxlpr.ppocr.data import build_dataloader
29
- from pyxlpr.ppocr.modeling.architectures import build_model
30
- from pyxlpr.ppocr.postprocess import build_post_process
31
- from pyxlpr.ppocr.metrics import build_metric
32
- from pyxlpr.ppocr.utils.save_load import load_model
33
- from pyxlpr.ppocr.utils.utility import print_dict
34
- import pyxlpr.ppocr.tools.program as program
35
-
36
-
37
- def main(config, device, logger):
38
- global_config = config['Global']
39
- # build dataloader
40
- valid_dataloader = build_dataloader(config, 'Eval', device, logger)
41
-
42
- # build post process
43
- post_process_class = build_post_process(config['PostProcess'],
44
- global_config)
45
-
46
- # build model
47
- # for rec algorithm
48
- if hasattr(post_process_class, 'character'):
49
- char_num = len(getattr(post_process_class, 'character'))
50
- if config['Architecture']["algorithm"] in ["Distillation",
51
- ]: # distillation model
52
- for key in config['Architecture']["Models"]:
53
- config['Architecture']["Models"][key]["Head"][
54
- 'out_channels'] = char_num
55
- else: # base rec model
56
- config['Architecture']["Head"]['out_channels'] = char_num
57
-
58
- model = build_model(config['Architecture'])
59
- extra_input = config['Architecture'][
60
- 'algorithm'] in ["SRN", "NRTR", "SAR", "SEED"]
61
- if "model_type" in config['Architecture'].keys():
62
- model_type = config['Architecture']['model_type']
63
- else:
64
- model_type = None
65
-
66
- best_model_dict = load_model(config, model)
67
- if len(best_model_dict):
68
- logger.info('metric in ckpt ***************')
69
- for k, v in best_model_dict.items():
70
- logger.info('{}:{}'.format(k, v))
71
-
72
- # build metric
73
- eval_class = build_metric(config['Metric'])
74
- # start eval
75
- metric = program.eval(model, valid_dataloader, post_process_class,
76
- eval_class, model_type, extra_input)
77
- logger.info(str(metric))
78
- return metric
79
-
80
-
81
- if __name__ == '__main__':
82
- config, device, logger, vdl_writer = program.preprocess()
83
- main(config, device, logger)
@@ -1,77 +0,0 @@
1
- # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- from __future__ import absolute_import
16
- from __future__ import division
17
- from __future__ import print_function
18
-
19
- import os
20
- import sys
21
- import pickle
22
-
23
- __dir__ = os.path.dirname(os.path.abspath(__file__))
24
- sys.path.append(__dir__)
25
- sys.path.append(os.path.abspath(os.path.join(__dir__, '..')))
26
-
27
- from pyxlpr.ppocr.data import build_dataloader
28
- from pyxlpr.ppocr.modeling.architectures import build_model
29
- from pyxlpr.ppocr.postprocess import build_post_process
30
- from pyxlpr.ppocr.utils.save_load import load_model
31
- from pyxlpr.ppocr.utils.utility import print_dict
32
- import pyxlpr.ppocr.tools.program as program
33
-
34
-
35
- def main():
36
- global_config = config['Global']
37
- # build dataloader
38
- config['Eval']['dataset']['name'] = config['Train']['dataset']['name']
39
- config['Eval']['dataset']['data_dir'] = config['Train']['dataset'][
40
- 'data_dir']
41
- config['Eval']['dataset']['label_file_list'] = config['Train']['dataset'][
42
- 'label_file_list']
43
- eval_dataloader = build_dataloader(config, 'Eval', device, logger)
44
-
45
- # build post process
46
- post_process_class = build_post_process(config['PostProcess'],
47
- global_config)
48
-
49
- # build model
50
- # for rec algorithm
51
- if hasattr(post_process_class, 'character'):
52
- char_num = len(getattr(post_process_class, 'character'))
53
- config['Architecture']["Head"]['out_channels'] = char_num
54
-
55
- #set return_features = True
56
- config['Architecture']["Head"]["return_feats"] = True
57
-
58
- model = build_model(config['Architecture'])
59
-
60
- best_model_dict = load_model(config, model)
61
- if len(best_model_dict):
62
- logger.info('metric in ckpt ***************')
63
- for k, v in best_model_dict.items():
64
- logger.info('{}:{}'.format(k, v))
65
-
66
- # get features from train data
67
- char_center = program.get_center(model, eval_dataloader, post_process_class)
68
-
69
- #serialize to disk
70
- with open("train_center.pkl", 'wb') as f:
71
- pickle.dump(char_center, f)
72
- return
73
-
74
-
75
- if __name__ == '__main__':
76
- config, device, logger, vdl_writer = program.preprocess()
77
- main()
@@ -1,129 +0,0 @@
1
- # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- import os
16
- import sys
17
-
18
- __dir__ = os.path.dirname(os.path.abspath(__file__))
19
- sys.path.append(__dir__)
20
- sys.path.append(os.path.abspath(os.path.join(__dir__, "..")))
21
-
22
- import argparse
23
-
24
- import paddle
25
- from paddle.jit import to_static
26
-
27
- from pyxlpr.ppocr.modeling.architectures import build_model
28
- from pyxlpr.ppocr.postprocess import build_post_process
29
- from pyxlpr.ppocr.utils.save_load import load_model
30
- from pyxlpr.ppocr.utils.logging import get_logger
31
- from pyxlpr.ppocr.tools.program import load_config, merge_config, ArgsParser
32
-
33
-
34
- def export_single_model(model, arch_config, save_path, logger):
35
- if arch_config["algorithm"] == "SRN":
36
- max_text_length = arch_config["Head"]["max_text_length"]
37
- other_shape = [
38
- paddle.static.InputSpec(
39
- shape=[None, 1, 64, 256], dtype="float32"), [
40
- paddle.static.InputSpec(
41
- shape=[None, 256, 1],
42
- dtype="int64"), paddle.static.InputSpec(
43
- shape=[None, max_text_length, 1], dtype="int64"),
44
- paddle.static.InputSpec(
45
- shape=[None, 8, max_text_length, max_text_length],
46
- dtype="int64"), paddle.static.InputSpec(
47
- shape=[None, 8, max_text_length, max_text_length],
48
- dtype="int64")
49
- ]
50
- ]
51
- model = to_static(model, input_spec=other_shape)
52
- elif arch_config["algorithm"] == "SAR":
53
- other_shape = [
54
- paddle.static.InputSpec(
55
- shape=[None, 3, 48, 160], dtype="float32"),
56
- ]
57
- model = to_static(model, input_spec=other_shape)
58
- else:
59
- infer_shape = [3, -1, -1]
60
- if arch_config["model_type"] == "rec":
61
- infer_shape = [3, 32, -1] # for rec model, H must be 32
62
- if "Transform" in arch_config and arch_config[
63
- "Transform"] is not None and arch_config["Transform"][
64
- "name"] == "TPS":
65
- logger.info(
66
- "When there is tps in the network, variable length input is not supported, and the input size needs to be the same as during training"
67
- )
68
- infer_shape[-1] = 100
69
- if arch_config["algorithm"] == "NRTR":
70
- infer_shape = [1, 32, 100]
71
- elif arch_config["model_type"] == "table":
72
- infer_shape = [3, 488, 488]
73
- model = to_static(
74
- model,
75
- input_spec=[
76
- paddle.static.InputSpec(
77
- shape=[None] + infer_shape, dtype="float32")
78
- ])
79
-
80
- paddle.jit.save(model, save_path)
81
- logger.info("inference model is saved to {}".format(save_path))
82
- return
83
-
84
-
85
- def main(config, logger):
86
- # FLAGS = ArgsParser().parse_args()
87
- # config = load_config(FLAGS.config)
88
- # merge_config(FLAGS.opt)
89
- # logger = get_logger()
90
- # build post process
91
-
92
- post_process_class = build_post_process(config["PostProcess"],
93
- config["Global"])
94
-
95
- # build model
96
- # for rec algorithm
97
- if hasattr(post_process_class, "character"):
98
- char_num = len(getattr(post_process_class, "character"))
99
- if config["Architecture"]["algorithm"] in ["Distillation",
100
- ]: # distillation model
101
- for key in config["Architecture"]["Models"]:
102
- config["Architecture"]["Models"][key]["Head"][
103
- "out_channels"] = char_num
104
- # just one final tensor needs to to exported for inference
105
- config["Architecture"]["Models"][key][
106
- "return_all_feats"] = False
107
- else: # base rec model
108
- config["Architecture"]["Head"]["out_channels"] = char_num
109
- model = build_model(config["Architecture"])
110
- load_model(config, model)
111
- model.eval()
112
-
113
- save_path = config["Global"]["save_inference_dir"]
114
-
115
- arch_config = config["Architecture"]
116
-
117
- if arch_config["algorithm"] in ["Distillation", ]: # distillation model
118
- archs = list(arch_config["Models"].values())
119
- for idx, name in enumerate(model.model_name_list):
120
- sub_model_save_path = os.path.join(save_path, name, "inference")
121
- export_single_model(model.model_list[idx], archs[idx],
122
- sub_model_save_path, logger)
123
- else:
124
- save_path = os.path.join(save_path, "inference")
125
- export_single_model(model, arch_config, save_path, logger)
126
-
127
-
128
- if __name__ == "__main__":
129
- main()