pyxllib 0.3.96__tar.gz → 0.3.100__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyxllib-0.3.96/pyxllib.egg-info → pyxllib-0.3.100}/PKG-INFO +19 -3
- pyxllib-0.3.100/README.md +18 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/algo/matcher.py +78 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/algo/stat.py +14 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/data/echarts.py +68 -3
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/data/pglib.py +75 -6
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/data/sqlite.py +7 -1
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/ext/kq5034lib.py +119 -5
- pyxllib-0.3.100/pyxllib/ext/robustprocfile.py +191 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/ext/unixlib.py +0 -5
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/ext/webhook.py +2 -12
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/file/specialist/filelib.py +22 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/file/xlsxlib.py +2 -2
- pyxllib-0.3.100/pyxllib/prog/multiprocs.py +101 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/prog/newbie.py +1 -1
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/prog/pupil.py +36 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/prog/specialist/__init__.py +1 -1
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/jiebalib.py +5 -2
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/xmllib.py +5 -3
- {pyxllib-0.3.96 → pyxllib-0.3.100/pyxllib.egg-info}/PKG-INFO +19 -3
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib.egg-info/SOURCES.txt +2 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/setup.py +1 -1
- pyxllib-0.3.96/README.md +0 -2
- {pyxllib-0.3.96 → pyxllib-0.3.100}/LICENSE +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/MANIFEST.in +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/algo/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/algo/disjoint.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/algo/geo.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/algo/intervals.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/algo/newbie.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/algo/pupil.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/algo/shapelylib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/algo/specialist.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/algo/treelib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/algo/unitlib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/cv/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/cv/expert.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/cv/imfile.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/cv/imhash.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/cv/pupil.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/cv/rgbfmt.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/cv/trackbartools.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/cv/xlcvlib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/cv/xlpillib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/data/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/data/oss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/data/sqllib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/ext/JLineViewer.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/ext/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/ext/autogui/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/ext/autogui/autogui.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/ext/autogui/virtualkey.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/ext/demolib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/ext/old.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/ext/qt.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/ext/seleniumlib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/ext/tk.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/ext/utools.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/ext/win32lib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/ext/yuquelib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/file/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/file/docxlib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/file/gitlib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/file/movielib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/file/newbie.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/file/onenotelib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/file/packlib/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/file/packlib/zipfile.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/file/pdflib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/file/pupil.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/file/specialist/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/file/specialist/dirlib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/file/specialist/download.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/prog/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/prog/deprecatedlib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/prog/ipyexec.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/prog/sitepackages.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/prog/specialist/bc.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/prog/specialist/browser.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/prog/specialist/common.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/prog/specialist/datetime.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/prog/specialist/tictoc.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/prog/specialist/xllog.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/prog/xlosenv.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/stdlib/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/stdlib/tablepyxl/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/stdlib/tablepyxl/style.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/stdlib/tablepyxl/tablepyxl.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/ahocorasick.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/charclasslib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/jscode.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/latex/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/levenshtein.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/nestenv.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/newbie.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/pupil/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/pupil/common.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/pupil/xlalign.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/pycode.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/specialist/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/specialist/common.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/specialist/ptag.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/spellchecker.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/text/vbacode.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/xl.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib/xlcv.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib.egg-info/dependency_links.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib.egg-info/requires.txt +5 -5
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxllib.egg-info/top_level.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ai/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ai/clientlib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ai/specialist.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ai/torch_app.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ai/xlpaddle.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ai/xltorch.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/data/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/data/coco.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/data/datacls.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/data/datasets.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/data/gptlib.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/data/icdar/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/data/icdar/deteval.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/data/icdar/icdar2013.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/data/icdar/iou.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/data/icdar/rrc_evaluation_funcs_1_1.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/data/imtextline.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/data/labelme.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/data/removeline.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/data/specialist.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/eval/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/paddleocr.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/configs/rec/multi_language/generate_multi_language_configs.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/ColorJitter.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/copy_paste.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/east_process.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/gen_table_mask.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/iaa_augment.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/label_ops.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/make_border_map.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/make_pse_gt.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/make_shrink_map.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/operators.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/pg_process.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/randaugment.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/random_crop_data.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/rec_img_aug.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/sast_process.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/text_image_aug/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/text_image_aug/augment.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/imaug/text_image_aug/warp_mls.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/lmdb_dataset.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/pgnet_dataset.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/pubtab_dataset.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/data/simple_dataset.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/ace_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/basic_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/center_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/cls_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/combined_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/det_basic_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/det_db_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/det_east_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/det_pse_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/det_sast_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/distillation_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/e2e_pg_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/kie_sdmgr_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/rec_aster_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/rec_att_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/rec_ctc_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/rec_enhanced_ctc_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/rec_nrtr_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/rec_sar_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/rec_srn_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/losses/table_att_loss.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/metrics/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/metrics/cls_metric.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/metrics/det_metric.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/metrics/distillation_metric.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/metrics/e2e_metric.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/metrics/eval_det_iou.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/metrics/kie_metric.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/metrics/rec_metric.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/metrics/table_metric.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/architectures/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/architectures/base_model.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/architectures/distillation_model.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/backbones/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/backbones/det_mobilenet_v3.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/backbones/det_resnet_vd.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/backbones/det_resnet_vd_sast.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/backbones/e2e_resnet_vd_pg.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/backbones/kie_unet_sdmgr.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/backbones/rec_mobilenet_v3.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/backbones/rec_mv1_enhance.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/backbones/rec_nrtr_mtb.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/backbones/rec_resnet_31.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/backbones/rec_resnet_aster.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/backbones/rec_resnet_fpn.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/backbones/rec_resnet_vd.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/cls_head.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/det_db_head.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/det_east_head.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/det_pse_head.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/det_sast_head.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/e2e_pg_head.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/kie_sdmgr_head.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/multiheadAttention.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/rec_aster_head.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/rec_att_head.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/rec_ctc_head.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/rec_nrtr_head.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/rec_sar_head.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/rec_srn_head.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/self_attention.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/heads/table_att_head.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/necks/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/necks/db_fpn.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/necks/east_fpn.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/necks/fpn.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/necks/pg_fpn.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/necks/rnn.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/necks/sast_fpn.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/necks/table_fpn.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/transforms/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/transforms/stn.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/transforms/tps.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/modeling/transforms/tps_spatial_transformer.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/optimizer/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/optimizer/learning_rate.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/optimizer/lr_scheduler.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/optimizer/optimizer.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/optimizer/regularizer.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/postprocess/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/postprocess/cls_postprocess.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/postprocess/db_postprocess.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/postprocess/east_postprocess.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/postprocess/locality_aware_nms.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/postprocess/pg_postprocess.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/postprocess/pse_postprocess/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/postprocess/pse_postprocess/pse/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/postprocess/pse_postprocess/pse/setup.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/postprocess/pse_postprocess/pse_postprocess.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/postprocess/rec_postprocess.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/postprocess/sast_postprocess.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/eval.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/export_center.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/export_model.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/infer/predict_cls.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/infer/predict_det.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/infer/predict_e2e.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/infer/predict_rec.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/infer/predict_system.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/infer/utility.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/infer_cls.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/infer_det.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/infer_e2e.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/infer_kie.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/infer_rec.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/infer_table.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/program.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/test_hubserving.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/train.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/tools/xlprog.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/EN_symbol_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/ar_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/arabic_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/be_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/bg_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/chinese_cht_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/cyrillic_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/devanagari_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/en_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/fa_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/french_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/german_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/hi_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/it_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/japan_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/ka_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/korean_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/latin_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/mr_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/ne_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/oc_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/pu_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/rs_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/rsc_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/ru_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/ta_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/table_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/table_structure_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/te_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/ug_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/uk_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/ur_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict/xi_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/dict90.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/e2e_metric/Deteval.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/e2e_metric/polygon_fast.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/e2e_utils/extract_batchsize.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/e2e_utils/extract_textpoint_fast.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/e2e_utils/extract_textpoint_slow.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/e2e_utils/pgnet_pp_utils.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/e2e_utils/visual.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/en_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/gen_label.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/ic15_dict.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/iou.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/logging.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/network.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/ppocr_keys_v1.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/profiler.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/save_load.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/stats.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppocr/utils/utility.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppstructure/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppstructure/predict_system.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppstructure/table/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppstructure/table/eval_table.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppstructure/table/matcher.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppstructure/table/predict_structure.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppstructure/table/predict_table.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppstructure/table/table_metric/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppstructure/table/table_metric/parallel.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppstructure/table/table_metric/table_metric.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppstructure/table/tablepyxl/__init__.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppstructure/table/tablepyxl/style.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppstructure/table/tablepyxl/tablepyxl.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/ppstructure/utility.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/pyxlpr/xlai.py +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/requirements.txt +0 -0
- {pyxllib-0.3.96 → pyxllib-0.3.100}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: pyxllib
|
3
|
-
Version: 0.3.
|
3
|
+
Version: 0.3.100
|
4
4
|
Summary: 厦门理工模式识别团队通用python代码工具库
|
5
5
|
Home-page: https://github.com/XLPRUtils/pyxllib
|
6
6
|
Author: code4101
|
@@ -17,5 +17,21 @@ Provides-Extra: xlcv
|
|
17
17
|
Provides-Extra: xlai
|
18
18
|
License-File: LICENSE
|
19
19
|
|
20
|
-
#
|
21
|
-
|
20
|
+
# 1 install
|
21
|
+
|
22
|
+
```
|
23
|
+
pip install pyxllib
|
24
|
+
```
|
25
|
+
|
26
|
+
more details: https://www.yuque.com/xlpr/pyxllib/install
|
27
|
+
|
28
|
+
# 2 document
|
29
|
+
|
30
|
+
document: https://www.yuque.com/xlpr/pyxllib.
|
31
|
+
Work hard to improve, welcome more assistant partners.
|
32
|
+
|
33
|
+
# 3 Acknowledge
|
34
|
+
|
35
|
+
This project uses PyCharm to develop.
|
36
|
+
Thanks to [JetBrains'](https://www.jetbrains.com/?from=pyxllib) support
|
37
|
+
of [free license for open sourse project development](https://pycharm.iswbm.com/c01/c01_05.html).
|
@@ -0,0 +1,18 @@
|
|
1
|
+
# 1 install
|
2
|
+
|
3
|
+
```
|
4
|
+
pip install pyxllib
|
5
|
+
```
|
6
|
+
|
7
|
+
more details: https://www.yuque.com/xlpr/pyxllib/install
|
8
|
+
|
9
|
+
# 2 document
|
10
|
+
|
11
|
+
document: https://www.yuque.com/xlpr/pyxllib.
|
12
|
+
Work hard to improve, welcome more assistant partners.
|
13
|
+
|
14
|
+
# 3 Acknowledge
|
15
|
+
|
16
|
+
This project uses PyCharm to develop.
|
17
|
+
Thanks to [JetBrains'](https://www.jetbrains.com/?from=pyxllib) support
|
18
|
+
of [free license for open sourse project development](https://pycharm.iswbm.com/c01/c01_05.html).
|
@@ -13,6 +13,8 @@ from pyxllib.prog.pupil import check_install_package
|
|
13
13
|
# check_install_package('Levenshtein', 'python-Levenshtein')
|
14
14
|
|
15
15
|
from collections import defaultdict
|
16
|
+
import heapq
|
17
|
+
import math
|
16
18
|
import warnings
|
17
19
|
|
18
20
|
warnings.filterwarnings("ignore", message="loaded more than 1 DLL from .libs:")
|
@@ -40,6 +42,53 @@ except ModuleNotFoundError:
|
|
40
42
|
pass
|
41
43
|
|
42
44
|
|
45
|
+
def calculate_coeff_favoring_length(length1, length2, baseline=100, scale=10000):
|
46
|
+
"""
|
47
|
+
根据两文本的长度计算相似度调整系数,以解决短文本过高相似度评分的问题。
|
48
|
+
|
49
|
+
短文本之间相似或完全相同的片段可能导致相似度评分过高,从而误判文本间的相关性比实际更高。
|
50
|
+
通过引入相似度调整系数来平衡评分,降低短文本之间的相似度得分,使评分更加合理和公平。
|
51
|
+
|
52
|
+
:param length1: 第一文本的长度
|
53
|
+
:param length2: 第二文本的长度
|
54
|
+
:param baseline: 基线长度,影响系数调整的起始点。
|
55
|
+
:param scale: 尺度长度,定义了系数增长到2的长度标准。
|
56
|
+
:return: 相似度调整系数。
|
57
|
+
"""
|
58
|
+
total_length = length1 + length2
|
59
|
+
length_ratio = min(length1, length2) / max(length1, length2)
|
60
|
+
|
61
|
+
if total_length < baseline:
|
62
|
+
coefficient = 0.5 + 0.5 * (total_length / baseline)
|
63
|
+
else:
|
64
|
+
coefficient = 1 + (math.log1p(total_length - baseline + 1) / math.log1p(scale - baseline + 1))
|
65
|
+
|
66
|
+
# 考虑长度差异的影响
|
67
|
+
coefficient *= length_ratio
|
68
|
+
|
69
|
+
return coefficient
|
70
|
+
|
71
|
+
|
72
|
+
def compute_text_similarity_favoring_length(text1, text2, baseline=100, scale=10000):
|
73
|
+
"""
|
74
|
+
计算两段文本之间的相似度,引入长度调整系数以解决短文本过高相似度评分的问题。
|
75
|
+
|
76
|
+
:param text1: 第一段文本
|
77
|
+
:param text2: 第二段文本
|
78
|
+
:param baseline: 基线长度,影响系数调整的起始点。
|
79
|
+
:param scale: 尺度长度,定义了系数增长到2的长度标准。
|
80
|
+
:return: 加权后的相似度得分,范围在0到1之间。
|
81
|
+
"""
|
82
|
+
base_similarity = Levenshtein.ratio(text1, text2)
|
83
|
+
coefficient = calculate_coeff_favoring_length(len(text1), len(text2), baseline, scale)
|
84
|
+
|
85
|
+
# 计算加权相似度
|
86
|
+
weighted_similarity = base_similarity * coefficient
|
87
|
+
|
88
|
+
# 确保相似度不会超过1
|
89
|
+
return min(weighted_similarity, 1.0)
|
90
|
+
|
91
|
+
|
43
92
|
class DataMatcher:
|
44
93
|
""" 泛化的匹配类,对任何类型的数据进行匹配 """
|
45
94
|
|
@@ -161,6 +210,35 @@ class DataMatcher:
|
|
161
210
|
center_idx = max(indices, key=lambda x: sum(get_similarity(x, y) for y in indices))
|
162
211
|
return center_idx
|
163
212
|
|
213
|
+
def find_top_similar_pairs(self, top_n=1):
|
214
|
+
"""找到最相近的top_n对数据。
|
215
|
+
|
216
|
+
:param top_n: 需要返回的最相似的数据对的数量。
|
217
|
+
:return: 一个列表,包含(top_n个)最相似数据对的索引和它们之间的相似度。
|
218
|
+
"""
|
219
|
+
if len(self.data) < 2:
|
220
|
+
return []
|
221
|
+
|
222
|
+
# 初始化一个列表来保存最相似的数据对,使用最小堆来维护这个列表
|
223
|
+
# 最小堆能够保证每次都能快速弹出相似度最小的数据对
|
224
|
+
top_pairs = []
|
225
|
+
|
226
|
+
for i in tqdm(range(len(self.data))):
|
227
|
+
for j in range(i + 1, len(self.data)):
|
228
|
+
similarity = self.compute_similarity(self.data[i], self.data[j])
|
229
|
+
|
230
|
+
# 如果当前相似度对数量还未达到top_n,直接添加
|
231
|
+
if len(top_pairs) < top_n:
|
232
|
+
heapq.heappush(top_pairs, (similarity, (i, j)))
|
233
|
+
else:
|
234
|
+
# 如果当前对的相似度大于堆中最小的相似度,替换之
|
235
|
+
if similarity > top_pairs[0][0]:
|
236
|
+
heapq.heapreplace(top_pairs, (similarity, (i, j)))
|
237
|
+
|
238
|
+
# 将堆转换为排序后的列表返回
|
239
|
+
top_pairs.sort(reverse=True, key=lambda x: x[0])
|
240
|
+
return [(pair[1], pair[0]) for pair in top_pairs]
|
241
|
+
|
164
242
|
|
165
243
|
class GroupedDataMatcher(DataMatcher):
|
166
244
|
""" 对数据量特别大的情况,我们可以先对数据进行分组,然后再对每个分组进行匹配 """
|
@@ -456,3 +456,17 @@ def custom_fillna(df, default_fill_value='', numeric_fill_value=None, specific_f
|
|
456
456
|
df[column] = df[column].fillna(default_fill_value)
|
457
457
|
# 可以在这里添加更多条件,以处理其他数据类型,如datetime。
|
458
458
|
return df
|
459
|
+
|
460
|
+
|
461
|
+
def dataframe_to_list(df):
|
462
|
+
"""将DataFrame转换为列表结构,第一行是表头,其余是数据"""
|
463
|
+
# 获取表头(列名)作为第一个列表元素
|
464
|
+
headers = df.columns.tolist()
|
465
|
+
|
466
|
+
# 获取数据行,每一行作为一个列表,然后将所有这些列表收集到一个大列表中
|
467
|
+
data_rows = df.values.tolist()
|
468
|
+
|
469
|
+
# 将表头和数据行合并成最终的列表
|
470
|
+
result_list = [headers] + data_rows
|
471
|
+
|
472
|
+
return result_list
|
@@ -14,10 +14,14 @@ from pyxllib.prog.pupil import check_install_package
|
|
14
14
|
|
15
15
|
check_install_package('pyecharts')
|
16
16
|
|
17
|
+
# import types
|
18
|
+
|
17
19
|
import pyecharts
|
18
20
|
from pyecharts import options as opts
|
19
21
|
from pyecharts.commons.utils import JsCode
|
20
|
-
from pyecharts.
|
22
|
+
from pyecharts.globals import ChartType
|
23
|
+
from pyecharts import types
|
24
|
+
from pyecharts.charts import Bar, Line, Radar
|
21
25
|
from pyecharts.charts.chart import Chart
|
22
26
|
|
23
27
|
from pyxllib.prog.pupil import inject_members
|
@@ -54,8 +58,7 @@ class XlChart(Chart):
|
|
54
58
|
kwargs['label']['formatter'] = fmt
|
55
59
|
|
56
60
|
self._append_color(color)
|
57
|
-
self._append_legend(name)
|
58
|
-
# self._append_legend(name, is_selected=True)
|
61
|
+
self._append_legend(name, is_selected=True)
|
59
62
|
|
60
63
|
self.options.get('series').append(
|
61
64
|
{
|
@@ -101,6 +104,68 @@ class XlBar(Bar):
|
|
101
104
|
"""
|
102
105
|
return cls.from_dict({'value': list(yaxis)}, xaxis=xaxis, title=title)
|
103
106
|
|
107
|
+
@classmethod
|
108
|
+
def from_data_split_into_groups(cls, data, groups, *, title=None):
|
109
|
+
"""根据给定的组数自动拆分数据并生成条形图
|
110
|
+
:param list data: 数据清单
|
111
|
+
:param int groups: 要拆分成的组数
|
112
|
+
"""
|
113
|
+
# 找到最大值和最小值
|
114
|
+
min_val, max_val = min(data), max(data)
|
115
|
+
|
116
|
+
# 计算间隔
|
117
|
+
interval = (max_val - min_val) / groups
|
118
|
+
|
119
|
+
# 分组和标签
|
120
|
+
group_counts = [0] * groups
|
121
|
+
labels = []
|
122
|
+
# todo 如果数据量特别大,这里应该排序后,再用特殊方法计算分组
|
123
|
+
for value in data:
|
124
|
+
index = min(int((value - min_val) / interval), groups - 1)
|
125
|
+
group_counts[index] += 1
|
126
|
+
|
127
|
+
for i in range(groups):
|
128
|
+
labels.append(f"{min_val + interval * i:.2f}-{min_val + interval * (i + 1):.2f}")
|
129
|
+
# t = cls.from_dict({'value': group_counts}, xaxis=labels, title=title)
|
130
|
+
|
131
|
+
return cls.from_dict({'value': group_counts}, xaxis=labels, title=title)
|
132
|
+
|
133
|
+
|
134
|
+
class XlRadar(Radar):
|
135
|
+
def __init__(self, *args, **kwargs):
|
136
|
+
super().__init__(*args, **kwargs)
|
137
|
+
self.color_idx = 0
|
138
|
+
|
139
|
+
def add(
|
140
|
+
self,
|
141
|
+
series_name: str,
|
142
|
+
data: types.Sequence[types.Union[opts.RadarItem, dict]],
|
143
|
+
*,
|
144
|
+
label_opts=None,
|
145
|
+
color: types.Optional[str] = None,
|
146
|
+
linestyle_opts=None,
|
147
|
+
**kwargs
|
148
|
+
):
|
149
|
+
""" 标准库(2.0.5版)的雷达图颜色渲染有问题,这里要增加一个修正过程 """
|
150
|
+
if label_opts is None:
|
151
|
+
label_opts = opts.LabelOpts(is_show=False)
|
152
|
+
|
153
|
+
if linestyle_opts is None:
|
154
|
+
linestyle_opts = opts.LineStyleOpts(color=self.colors[self.color_idx % len(self.colors)])
|
155
|
+
self.color_idx += 1
|
156
|
+
elif linestyle_opts.get('color') is None:
|
157
|
+
linestyle_opts.update(color=self.colors[self.color_idx % len(self.colors)])
|
158
|
+
self.color_idx += 1
|
159
|
+
|
160
|
+
if color is None:
|
161
|
+
color = linestyle_opts.get('color')
|
162
|
+
|
163
|
+
return super(XlRadar, self).add(series_name, data,
|
164
|
+
label_opts=label_opts,
|
165
|
+
color=color,
|
166
|
+
linestyle_opts=linestyle_opts,
|
167
|
+
**kwargs)
|
168
|
+
|
104
169
|
|
105
170
|
inject_members(XlBar, Bar)
|
106
171
|
|
@@ -52,6 +52,17 @@ class Connection(psycopg.Connection, SqlBase):
|
|
52
52
|
def __1_库(self):
|
53
53
|
pass
|
54
54
|
|
55
|
+
def get_db_activities(self):
|
56
|
+
"""
|
57
|
+
检索当前数据库的活动信息。
|
58
|
+
"""
|
59
|
+
sql = """
|
60
|
+
SELECT pid, datname, usename, state, query, age(now(), query_start) AS "query_age"
|
61
|
+
FROM pg_stat_activity
|
62
|
+
WHERE state = 'active'
|
63
|
+
"""
|
64
|
+
return self.exec2dict(sql).fetchall()
|
65
|
+
|
55
66
|
def __2_表格(self):
|
56
67
|
pass
|
57
68
|
|
@@ -140,6 +151,10 @@ class Connection(psycopg.Connection, SqlBase):
|
|
140
151
|
也可以写复杂的处理算法规则,详见 http://postgres.cn/docs/12/sql-insert.html
|
141
152
|
比如这里是插入的id重复的话,就把host_name替换掉,还可以指定nick_name替换为'abc'
|
142
153
|
注意前面的(id)是必须要输入的
|
154
|
+
|
155
|
+
注意:有个常见需求,是想插入后返回对应的id,但是这样就需要知道这张表自增的id字段名
|
156
|
+
以及还是很难获得插入后的id值,可以默认刚插入的id是最大的,但是这样并不安全,有风险
|
157
|
+
建议还是外部自己先计算全表最大的id值,自己实现自增,就能知道插入的这条数据的id了
|
143
158
|
"""
|
144
159
|
ks = ','.join(cols.keys())
|
145
160
|
vs = ','.join(['%s'] * (len(cols.keys())))
|
@@ -419,8 +434,8 @@ class XlprDb(Connection):
|
|
419
434
|
if gpu:
|
420
435
|
status['gpu_memory'] = ssh.check_gpu_usage(print_mode=True)
|
421
436
|
if disk:
|
422
|
-
#
|
423
|
-
status['disk_memory'] = ssh.check_disk_usage(print_mode=True, timeout=
|
437
|
+
# 检查磁盘空间会很慢,如果超时可以跳过。(设置超时6小时)
|
438
|
+
status['disk_memory'] = ssh.check_disk_usage(print_mode=True, timeout=60 * 60 * 6)
|
424
439
|
except Exception as e:
|
425
440
|
status = {'error': f'{str(type(e))[8:-2]}: {e}'}
|
426
441
|
print(status)
|
@@ -524,7 +539,8 @@ class XlprDb(Connection):
|
|
524
539
|
|
525
540
|
args = ['CPU核心数(比如4核显示是400%)', date_trunc, recent, 'sum(hosts.cpu_number)*100']
|
526
541
|
|
527
|
-
htmltexts = [
|
542
|
+
htmltexts = [
|
543
|
+
'<a target="_blank" href="https://www.yuque.com/xlpr/data/hnpb2g?singleDoc#"> 《服务器监控》工具使用文档 </a>']
|
528
544
|
res = self._get_host_trace_total('cpu', 'XLPR服务器 CPU 使用近况', *args)
|
529
545
|
htmltexts.append(res[0])
|
530
546
|
|
@@ -546,7 +562,8 @@ class XlprDb(Connection):
|
|
546
562
|
|
547
563
|
args = ['内存(单位:GB)', date_trunc, recent, 'sum(hosts.cpu_gb)']
|
548
564
|
|
549
|
-
htmltexts = [
|
565
|
+
htmltexts = [
|
566
|
+
'<a target="_blank" href="https://www.yuque.com/xlpr/data/hnpb2g?singleDoc#"> 《服务器监控》工具使用文档 </a>']
|
550
567
|
res = self._get_host_trace_total('cpu_memory', 'XLPR服务器 内存 使用近况', *args)
|
551
568
|
htmltexts.append(res[0])
|
552
569
|
|
@@ -570,7 +587,8 @@ class XlprDb(Connection):
|
|
570
587
|
|
571
588
|
args = ['硬盘(单位:GB)', date_trunc, recent, 'sum(hosts.disk_gb)']
|
572
589
|
|
573
|
-
htmltexts = [
|
590
|
+
htmltexts = [
|
591
|
+
'<a target="_blank" href="https://www.yuque.com/xlpr/data/hnpb2g?singleDoc#"> 《服务器监控》工具使用文档 </a>']
|
574
592
|
res = self._get_host_trace_total('disk_memory', 'XLPR服务器 DISK硬盘 使用近况', *args)
|
575
593
|
htmltexts.append(res[0])
|
576
594
|
htmltexts.append('注:xlpr4(四卡)服务器使用du计算/home大小有问题,未统计在列<br/>')
|
@@ -597,7 +615,8 @@ class XlprDb(Connection):
|
|
597
615
|
|
598
616
|
args = ['显存(单位:GB)', date_trunc, recent, 'sum(hosts.gpu_gb)']
|
599
617
|
|
600
|
-
htmltexts = [
|
618
|
+
htmltexts = [
|
619
|
+
'<a target="_blank" href="https://www.yuque.com/xlpr/data/hnpb2g?singleDoc#"> 《服务器监控》工具使用文档 </a>']
|
601
620
|
res = self._get_host_trace_total('gpu_memory', 'XLPR八台服务器 GPU显存 使用近况', *args)
|
602
621
|
htmltexts.append(res[0])
|
603
622
|
|
@@ -641,3 +660,53 @@ class XlprDb(Connection):
|
|
641
660
|
self.update_row('files', {'dhash': computed_dhash}, {'id': file_id})
|
642
661
|
progress_bar.update(1)
|
643
662
|
self.commit()
|
663
|
+
|
664
|
+
def append_history(self, table_name, where, backup_keys, *,
|
665
|
+
can_merge=None,
|
666
|
+
update_time=None,
|
667
|
+
commit=False):
|
668
|
+
""" 为表格添加历史记录,请确保这个表有一个jsonb格式的historys字段
|
669
|
+
|
670
|
+
这里每次都会对关键字段进行全量备份,没有进行高级的优化。
|
671
|
+
所以只适用于一些历史记录功能场景。更复杂的还是需要另外自己定制。
|
672
|
+
|
673
|
+
:param table_name: 表名
|
674
|
+
:param where: 要记录的id的规则,请确保筛选后记录是唯一的
|
675
|
+
:param backup_keys: 需要备份的字段名
|
676
|
+
:param can_merge: 在某些情况下,history不需要非常冗余地记录,可以给定与上一条合并的规则
|
677
|
+
def can_merge(last, now):
|
678
|
+
"last是上一条字典记录,now是当前要记录的字典数据,
|
679
|
+
返回True,则用now替换last,并不新增记录"
|
680
|
+
...
|
681
|
+
|
682
|
+
:param update_time: 更新时间,如果不指定则使用当前时间
|
683
|
+
"""
|
684
|
+
# 1 获得历史记录
|
685
|
+
ops = ' AND '.join([f'{k}=%s' for k in where.keys()])
|
686
|
+
historys = self.exec2one(f'SELECT historys FROM {table_name} WHERE {ops}', list(where.values())) or []
|
687
|
+
if historys:
|
688
|
+
status1 = historys[-1]
|
689
|
+
else:
|
690
|
+
status1 = {}
|
691
|
+
|
692
|
+
# 2 获得新记录
|
693
|
+
if update_time is None:
|
694
|
+
update_time = utc_timestamp()
|
695
|
+
status2 = self.exec2dict(f'SELECT {",".join(backup_keys)} FROM {table_name} WHERE {ops}',
|
696
|
+
list(where.values())).fetchone()
|
697
|
+
status2['update_time'] = update_time
|
698
|
+
|
699
|
+
# 3 添加历史记录
|
700
|
+
if can_merge is None:
|
701
|
+
def can_merge(status1, status2):
|
702
|
+
for k in backup_keys:
|
703
|
+
if status1.get(k) != status2.get(k):
|
704
|
+
return False
|
705
|
+
return True
|
706
|
+
|
707
|
+
if historys and can_merge(status1, status2):
|
708
|
+
historys[-1] = status2
|
709
|
+
else:
|
710
|
+
historys.append(status2)
|
711
|
+
|
712
|
+
self.update_row(table_name, {'historys': historys}, where, commit=commit)
|
@@ -7,9 +7,12 @@
|
|
7
7
|
import json
|
8
8
|
import re
|
9
9
|
import sqlite3
|
10
|
+
import warnings
|
10
11
|
|
11
12
|
import pandas as pd
|
12
13
|
|
14
|
+
warnings.filterwarnings('ignore', message="pandas only support SQLAlchemy connectable")
|
15
|
+
|
13
16
|
|
14
17
|
class SqlBase:
|
15
18
|
""" Sql语法通用的功能 """
|
@@ -109,7 +112,10 @@ class SqlBase:
|
|
109
112
|
|
110
113
|
def exec2one(self, *args, **kwargs):
|
111
114
|
""" 获得第1行的值 """
|
112
|
-
|
115
|
+
try:
|
116
|
+
return self.execute(*args, **kwargs).fetchone()[0]
|
117
|
+
except TypeError:
|
118
|
+
return None
|
113
119
|
|
114
120
|
def exec2row(self, *args, **kwargs):
|
115
121
|
""" 获得第1行的值 """
|
@@ -15,13 +15,14 @@ from pyxllib.prog.pupil import check_install_package
|
|
15
15
|
check_install_package('fire') # 自动安装依赖包
|
16
16
|
|
17
17
|
from collections import Counter, defaultdict
|
18
|
-
from datetime import date
|
18
|
+
from datetime import date, timedelta
|
19
19
|
import datetime
|
20
20
|
import math
|
21
21
|
import os
|
22
22
|
import re
|
23
23
|
import time
|
24
24
|
from io import StringIO
|
25
|
+
import csv
|
25
26
|
|
26
27
|
import fire
|
27
28
|
import pandas as pd
|
@@ -112,7 +113,7 @@ class Xiaoetong:
|
|
112
113
|
""" 获取打卡参与用户
|
113
114
|
"""
|
114
115
|
# 获取总页数
|
115
|
-
url = "https://api.xiaoe-tech.com/xe.elock.actor/1.0.0"
|
116
|
+
url = "https://api.xiaoe-tech.com/xe.elock.actor/1.0.0" # 接口地址【路径:API列表 -> 打卡管理 -> 获取打卡参与用户】
|
116
117
|
data_1 = {
|
117
118
|
"access_token": self.token,
|
118
119
|
"activity_id": activity_id,
|
@@ -124,7 +125,7 @@ class Xiaoetong:
|
|
124
125
|
page = math.ceil(result_1['data']['count'] / page_size) # 页数
|
125
126
|
# 获取打卡用户数据
|
126
127
|
lst = result_1['data']['list']
|
127
|
-
for i in range(1, page):
|
128
|
+
for i in range(1, page): # 为什么从1开始,因为第一页的数据上面已经获取到了,这里没必要从新获取一次
|
128
129
|
data = {
|
129
130
|
"access_token": self.token,
|
130
131
|
"activity_id": activity_id,
|
@@ -139,7 +140,6 @@ class Xiaoetong:
|
|
139
140
|
return lst
|
140
141
|
|
141
142
|
|
142
|
-
|
143
143
|
class 网课考勤:
|
144
144
|
def __init__(self, today=None):
|
145
145
|
self.返款标题 = ''
|
@@ -684,7 +684,7 @@ class 网课考勤:
|
|
684
684
|
df = pd.read_csv(files[-1]) # 221005周三09:19,小鹅通又双叒更新了
|
685
685
|
except UnicodeDecodeError:
|
686
686
|
pass
|
687
|
-
|
687
|
+
|
688
688
|
if df is None:
|
689
689
|
try:
|
690
690
|
df = pd.read_csv(files[-1], encoding="ANSI") # 240226周一11:21,
|
@@ -960,6 +960,120 @@ class 网课考勤:
|
|
960
960
|
# driver.click('//*[@id="commitRefundApplyBtn"]') # 建议手动点"提交申请"
|
961
961
|
|
962
962
|
|
963
|
+
class 网课考勤2(网课考勤):
|
964
|
+
def login_xe(self):
|
965
|
+
self.xe = Xiaoetong() # 实例化
|
966
|
+
self.xe.login(self.app_id,
|
967
|
+
self.client_id,
|
968
|
+
self.secret_key) # 获取了token
|
969
|
+
|
970
|
+
# 依据课程链接,获取资源id(与课次)
|
971
|
+
def 获取课次与资源id(self):
|
972
|
+
课程链接 = self.课程链接[1:]
|
973
|
+
ls_resource_id = [""]
|
974
|
+
for item in 课程链接: # 课次
|
975
|
+
resource_id = re.search(r"detail\?id=(.+?)\&", item) # 资源id
|
976
|
+
ls_resource_id.append(resource_id.group(1))
|
977
|
+
return ls_resource_id
|
978
|
+
|
979
|
+
# 获取直播间用户数据
|
980
|
+
def 获取直播间用户数据(self, resource_id, path):
|
981
|
+
if path.is_file():
|
982
|
+
return
|
983
|
+
# 2)获取直播间用户数据:
|
984
|
+
lst = self.xe.get_alive_user_list(resource_id)
|
985
|
+
fieldnames = ['用户ID', '用户昵称', '备注名', '状态', '直播间停留时长(秒)', '直播间停留时长',
|
986
|
+
'累计观看时长(秒)', '累计观看时长', '直播观看时长(秒)', '直播观看时长', '回放观看时长(秒)',
|
987
|
+
'回放观看时长', '评论次数', '直播间成交金额']
|
988
|
+
p = path
|
989
|
+
with open(p, mode='w', newline='', encoding='utf-8') as file:
|
990
|
+
writer = csv.DictWriter(file, fieldnames=fieldnames)
|
991
|
+
writer.writeheader()
|
992
|
+
for x in lst:
|
993
|
+
record = {
|
994
|
+
'用户ID': x['user_id'],
|
995
|
+
'用户昵称': x['wx_nickname'],
|
996
|
+
'备注名': None,
|
997
|
+
'状态': "其它关联权益",
|
998
|
+
'直播间停留时长(秒)': x['his_online_time'],
|
999
|
+
'直播间停留时长': str(timedelta(seconds=x['his_online_time'])),
|
1000
|
+
'累计观看时长(秒)': x['his_learn_time'],
|
1001
|
+
'累计观看时长': str(timedelta(seconds=x['his_learn_time'])),
|
1002
|
+
'直播观看时长(秒)': x['his_learning_time'],
|
1003
|
+
'直播观看时长': str(timedelta(seconds=x['his_learning_time'])),
|
1004
|
+
'回放观看时长(秒)': x['his_learned_time'],
|
1005
|
+
'回放观看时长': str(timedelta(seconds=x['his_learned_time'])),
|
1006
|
+
'评论次数': x['comment_num'],
|
1007
|
+
'直播间成交金额': x['user_total_price']
|
1008
|
+
}
|
1009
|
+
writer.writerow(record)
|
1010
|
+
|
1011
|
+
def 获取课次列表(self):
|
1012
|
+
return list(range(max(self.结束课次2, 1), self.当天课次2 + 1))
|
1013
|
+
|
1014
|
+
def 下载课程(self):
|
1015
|
+
prfx = self.prfx
|
1016
|
+
lt = self.获取课次与资源id()
|
1017
|
+
for i in tqdm(self.获取课次列表()):
|
1018
|
+
resource_id = lt[i]
|
1019
|
+
formatted_date = self.today # datetime.datetime.now().strftime("%Y-%m-%d")
|
1020
|
+
path = prfx.format(x=i, y=formatted_date)
|
1021
|
+
self.获取直播间用户数据(resource_id, self.root / "数据表" / path)
|
1022
|
+
|
1023
|
+
# 20240206 新增【针对打卡部分
|
1024
|
+
def 获取打卡id(self):
|
1025
|
+
""" 依据打卡链接,获取activity_id(打卡id)
|
1026
|
+
"""
|
1027
|
+
打卡链接 = self.打卡链接[1:]
|
1028
|
+
ls_activity_id = []
|
1029
|
+
for item in 打卡链接: # 课次
|
1030
|
+
activity_id = re.search(r"\?activity_id=(.+?)\&", item)
|
1031
|
+
ls_activity_id.append(activity_id.group(1))
|
1032
|
+
return ls_activity_id
|
1033
|
+
|
1034
|
+
def 获取打卡参与用户(self, activity_id, path):
|
1035
|
+
# 1)如果路径中已经有了,就跳过
|
1036
|
+
if path.is_file():
|
1037
|
+
return
|
1038
|
+
# 2)获取打卡用户数据:
|
1039
|
+
lst = self.xe.get_elock_actor(activity_id)
|
1040
|
+
fieldnames = ['用户id', '用户昵称', '打卡昵称', '打卡分组', '姓名', '电话', '最近采集号码', '城市', '微信号',
|
1041
|
+
'打卡天数', '打卡次数', '被点赞数', '被评论数', '被点评数', '被精选数', '参与时间']
|
1042
|
+
p = path
|
1043
|
+
with open(p, mode='w', newline='', encoding='utf-8') as file:
|
1044
|
+
writer = csv.DictWriter(file, fieldnames=fieldnames)
|
1045
|
+
writer.writeheader()
|
1046
|
+
for x in lst:
|
1047
|
+
record = {
|
1048
|
+
'用户id': x['user_id'],
|
1049
|
+
'用户昵称': x['wx_nickname'],
|
1050
|
+
'打卡昵称': x['clock_nickname'],
|
1051
|
+
'打卡分组': None,
|
1052
|
+
'姓名': x['wx_nickname'],
|
1053
|
+
'电话': x['phone'],
|
1054
|
+
'最近采集号码': None,
|
1055
|
+
'城市': x['wx_city'],
|
1056
|
+
'微信号': None,
|
1057
|
+
'打卡天数': x['clock_days'],
|
1058
|
+
'打卡次数': x['clock_days'],
|
1059
|
+
'被点赞数': x['zan_count'],
|
1060
|
+
'被评论数': x['comment_count'],
|
1061
|
+
'被点评数': x['review_count'],
|
1062
|
+
'被精选数': 0,
|
1063
|
+
'参与时间': x['created_at']
|
1064
|
+
}
|
1065
|
+
writer.writerow(record)
|
1066
|
+
|
1067
|
+
def 下载打卡数据(self):
|
1068
|
+
prfx = "{x}-" + f"《{self.返款标题}技术公益网课【中心教室】-日历打卡学员数据.csv"
|
1069
|
+
lt = self.获取打卡id()
|
1070
|
+
for i in tqdm(range(1)):
|
1071
|
+
activity_id = lt[i]
|
1072
|
+
formatted_date = self.today # datetime.datetime.now().strftime("%Y-%m-%d")
|
1073
|
+
path = prfx.format(x=formatted_date)
|
1074
|
+
self.获取打卡参与用户(activity_id, self.root / "数据表" / path)
|
1075
|
+
|
1076
|
+
|
963
1077
|
class KqDb(Connection):
|
964
1078
|
""" 五一身心行修考勤工具 """
|
965
1079
|
|