python-doctr 0.8.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. doctr/__init__.py +1 -1
  2. doctr/contrib/__init__.py +0 -0
  3. doctr/contrib/artefacts.py +131 -0
  4. doctr/contrib/base.py +105 -0
  5. doctr/datasets/datasets/pytorch.py +2 -2
  6. doctr/datasets/generator/base.py +6 -5
  7. doctr/datasets/imgur5k.py +1 -1
  8. doctr/datasets/loader.py +1 -6
  9. doctr/datasets/utils.py +2 -1
  10. doctr/datasets/vocabs.py +9 -2
  11. doctr/file_utils.py +26 -12
  12. doctr/io/elements.py +40 -6
  13. doctr/io/html.py +2 -2
  14. doctr/io/image/pytorch.py +6 -8
  15. doctr/io/image/tensorflow.py +1 -1
  16. doctr/io/pdf.py +5 -2
  17. doctr/io/reader.py +6 -0
  18. doctr/models/__init__.py +0 -1
  19. doctr/models/_utils.py +57 -20
  20. doctr/models/builder.py +71 -13
  21. doctr/models/classification/mobilenet/pytorch.py +45 -9
  22. doctr/models/classification/mobilenet/tensorflow.py +38 -7
  23. doctr/models/classification/predictor/pytorch.py +18 -11
  24. doctr/models/classification/predictor/tensorflow.py +16 -10
  25. doctr/models/classification/textnet/pytorch.py +3 -3
  26. doctr/models/classification/textnet/tensorflow.py +3 -3
  27. doctr/models/classification/zoo.py +39 -15
  28. doctr/models/detection/__init__.py +1 -0
  29. doctr/models/detection/_utils/__init__.py +1 -0
  30. doctr/models/detection/_utils/base.py +66 -0
  31. doctr/models/detection/differentiable_binarization/base.py +4 -3
  32. doctr/models/detection/differentiable_binarization/pytorch.py +2 -2
  33. doctr/models/detection/differentiable_binarization/tensorflow.py +14 -18
  34. doctr/models/detection/fast/__init__.py +6 -0
  35. doctr/models/detection/fast/base.py +257 -0
  36. doctr/models/detection/fast/pytorch.py +442 -0
  37. doctr/models/detection/fast/tensorflow.py +428 -0
  38. doctr/models/detection/linknet/base.py +4 -3
  39. doctr/models/detection/predictor/pytorch.py +15 -1
  40. doctr/models/detection/predictor/tensorflow.py +15 -1
  41. doctr/models/detection/zoo.py +21 -4
  42. doctr/models/factory/hub.py +3 -12
  43. doctr/models/kie_predictor/base.py +9 -3
  44. doctr/models/kie_predictor/pytorch.py +41 -20
  45. doctr/models/kie_predictor/tensorflow.py +36 -16
  46. doctr/models/modules/layers/pytorch.py +89 -10
  47. doctr/models/modules/layers/tensorflow.py +88 -10
  48. doctr/models/modules/transformer/pytorch.py +2 -2
  49. doctr/models/predictor/base.py +77 -50
  50. doctr/models/predictor/pytorch.py +31 -20
  51. doctr/models/predictor/tensorflow.py +27 -17
  52. doctr/models/preprocessor/pytorch.py +4 -4
  53. doctr/models/preprocessor/tensorflow.py +3 -2
  54. doctr/models/recognition/master/pytorch.py +2 -2
  55. doctr/models/recognition/parseq/pytorch.py +4 -3
  56. doctr/models/recognition/parseq/tensorflow.py +4 -3
  57. doctr/models/recognition/sar/pytorch.py +7 -6
  58. doctr/models/recognition/sar/tensorflow.py +3 -9
  59. doctr/models/recognition/vitstr/pytorch.py +1 -1
  60. doctr/models/recognition/zoo.py +1 -1
  61. doctr/models/zoo.py +2 -2
  62. doctr/py.typed +0 -0
  63. doctr/transforms/functional/base.py +1 -1
  64. doctr/transforms/functional/pytorch.py +4 -4
  65. doctr/transforms/modules/base.py +37 -15
  66. doctr/transforms/modules/pytorch.py +66 -8
  67. doctr/transforms/modules/tensorflow.py +63 -7
  68. doctr/utils/fonts.py +7 -5
  69. doctr/utils/geometry.py +35 -12
  70. doctr/utils/metrics.py +33 -174
  71. doctr/utils/reconstitution.py +126 -0
  72. doctr/utils/visualization.py +5 -118
  73. doctr/version.py +1 -1
  74. {python_doctr-0.8.0.dist-info → python_doctr-0.9.0.dist-info}/METADATA +96 -91
  75. {python_doctr-0.8.0.dist-info → python_doctr-0.9.0.dist-info}/RECORD +79 -75
  76. {python_doctr-0.8.0.dist-info → python_doctr-0.9.0.dist-info}/WHEEL +1 -1
  77. doctr/models/artefacts/__init__.py +0 -2
  78. doctr/models/artefacts/barcode.py +0 -74
  79. doctr/models/artefacts/face.py +0 -63
  80. doctr/models/obj_detection/__init__.py +0 -1
  81. doctr/models/obj_detection/faster_rcnn/__init__.py +0 -4
  82. doctr/models/obj_detection/faster_rcnn/pytorch.py +0 -81
  83. {python_doctr-0.8.0.dist-info → python_doctr-0.9.0.dist-info}/LICENSE +0 -0
  84. {python_doctr-0.8.0.dist-info → python_doctr-0.9.0.dist-info}/top_level.txt +0 -0
  85. {python_doctr-0.8.0.dist-info → python_doctr-0.9.0.dist-info}/zip-safe +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: python-doctr
3
- Version: 0.8.0
3
+ Version: 0.9.0
4
4
  Summary: Document Text Recognition (docTR): deep Learning for high-performance OCR on documents.
5
5
  Author-email: Mindee <contact@mindee.com>
6
6
  Maintainer: François-Guillaume Fernandez, Charles Gaillard, Olivier Dulcy, Felix Dittrich
@@ -209,7 +209,7 @@ License: Apache License
209
209
  Project-URL: documentation, https://mindee.github.io/doctr
210
210
  Project-URL: repository, https://github.com/mindee/doctr
211
211
  Project-URL: tracker, https://github.com/mindee/doctr/issues
212
- Project-URL: changelog, https://github.com/mindee/doctr/latest/changelog.html
212
+ Project-URL: changelog, https://mindee.github.io/doctr/changelog.html
213
213
  Keywords: OCR,deep learning,computer vision,tensorflow,pytorch,text detection,text recognition
214
214
  Classifier: Development Status :: 4 - Beta
215
215
  Classifier: Intended Audience :: Developers
@@ -219,87 +219,93 @@ Classifier: License :: OSI Approved :: Apache Software License
219
219
  Classifier: Natural Language :: English
220
220
  Classifier: Operating System :: OS Independent
221
221
  Classifier: Programming Language :: Python :: 3
222
- Classifier: Programming Language :: Python :: 3.8
223
222
  Classifier: Programming Language :: Python :: 3.9
224
223
  Classifier: Programming Language :: Python :: 3.10
224
+ Classifier: Programming Language :: Python :: 3.11
225
225
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
226
- Requires-Python: <4,>=3.8.0
226
+ Requires-Python: <4,>=3.9.0
227
227
  Description-Content-Type: text/markdown
228
228
  License-File: LICENSE
229
- Requires-Dist: importlib-metadata
230
- Requires-Dist: numpy <2.0.0,>=1.16.0
231
- Requires-Dist: scipy <2.0.0,>=1.4.0
232
- Requires-Dist: h5py <4.0.0,>=3.1.0
233
- Requires-Dist: opencv-python <5.0.0,>=4.5.0
234
- Requires-Dist: pypdfium2 <5.0.0,>=4.0.0
235
- Requires-Dist: pyclipper <2.0.0,>=1.2.0
236
- Requires-Dist: shapely <3.0.0,>=1.6.0
237
- Requires-Dist: langdetect <2.0.0,>=1.0.9
238
- Requires-Dist: rapidfuzz <4.0.0,>=3.0.0
239
- Requires-Dist: huggingface-hub <1.0.0,>=0.20.0
240
- Requires-Dist: matplotlib >=3.1.0
241
- Requires-Dist: weasyprint >=55.0
242
- Requires-Dist: Pillow >=9.2.0
243
- Requires-Dist: defusedxml >=0.7.0
244
- Requires-Dist: mplcursors >=0.3
245
- Requires-Dist: unidecode >=1.0.0
246
- Requires-Dist: tqdm >=4.30.0
229
+ Requires-Dist: numpy<2.0.0,>=1.16.0
230
+ Requires-Dist: scipy<2.0.0,>=1.4.0
231
+ Requires-Dist: h5py<4.0.0,>=3.1.0
232
+ Requires-Dist: opencv-python<5.0.0,>=4.5.0
233
+ Requires-Dist: pypdfium2<5.0.0,>=4.11.0
234
+ Requires-Dist: pyclipper<2.0.0,>=1.2.0
235
+ Requires-Dist: shapely<3.0.0,>=1.6.0
236
+ Requires-Dist: langdetect<2.0.0,>=1.0.9
237
+ Requires-Dist: rapidfuzz<4.0.0,>=3.0.0
238
+ Requires-Dist: huggingface-hub<1.0.0,>=0.20.0
239
+ Requires-Dist: Pillow>=9.2.0
240
+ Requires-Dist: defusedxml>=0.7.0
241
+ Requires-Dist: anyascii>=0.3.2
242
+ Requires-Dist: tqdm>=4.30.0
243
+ Provides-Extra: contrib
244
+ Requires-Dist: onnxruntime>=1.11.0; extra == "contrib"
247
245
  Provides-Extra: dev
248
- Requires-Dist: tensorflow <2.16.0,>=2.11.0 ; extra == 'dev'
249
- Requires-Dist: tf2onnx <2.0.0,>=1.16.0 ; extra == 'dev'
250
- Requires-Dist: torch <3.0.0,>=1.12.0 ; extra == 'dev'
251
- Requires-Dist: torchvision >=0.13.0 ; extra == 'dev'
252
- Requires-Dist: onnx <3.0.0,>=1.12.0 ; extra == 'dev'
253
- Requires-Dist: pytest >=5.3.2 ; extra == 'dev'
254
- Requires-Dist: coverage[toml] >=4.5.4 ; extra == 'dev'
255
- Requires-Dist: hdf5storage >=0.1.18 ; extra == 'dev'
256
- Requires-Dist: onnxruntime >=1.11.0 ; extra == 'dev'
257
- Requires-Dist: requests >=2.20.0 ; extra == 'dev'
258
- Requires-Dist: psutil >=5.9.5 ; extra == 'dev'
259
- Requires-Dist: ruff >=0.1.5 ; extra == 'dev'
260
- Requires-Dist: mypy >=0.812 ; extra == 'dev'
261
- Requires-Dist: pre-commit >=2.17.0 ; extra == 'dev'
262
- Requires-Dist: sphinx !=3.5.0,>=3.0.0 ; extra == 'dev'
263
- Requires-Dist: sphinxemoji >=0.1.8 ; extra == 'dev'
264
- Requires-Dist: sphinx-copybutton >=0.3.1 ; extra == 'dev'
265
- Requires-Dist: docutils <0.21 ; extra == 'dev'
266
- Requires-Dist: recommonmark >=0.7.1 ; extra == 'dev'
267
- Requires-Dist: sphinx-markdown-tables >=0.0.15 ; extra == 'dev'
268
- Requires-Dist: sphinx-tabs >=3.3.0 ; extra == 'dev'
269
- Requires-Dist: furo >=2022.3.4 ; extra == 'dev'
246
+ Requires-Dist: tensorflow<2.16.0,>=2.11.0; extra == "dev"
247
+ Requires-Dist: tf2onnx<2.0.0,>=1.16.0; extra == "dev"
248
+ Requires-Dist: torch<3.0.0,>=1.12.0; extra == "dev"
249
+ Requires-Dist: torchvision>=0.13.0; extra == "dev"
250
+ Requires-Dist: onnx<3.0.0,>=1.12.0; extra == "dev"
251
+ Requires-Dist: weasyprint>=55.0; extra == "dev"
252
+ Requires-Dist: matplotlib>=3.1.0; extra == "dev"
253
+ Requires-Dist: mplcursors>=0.3; extra == "dev"
254
+ Requires-Dist: pytest>=5.3.2; extra == "dev"
255
+ Requires-Dist: coverage[toml]>=4.5.4; extra == "dev"
256
+ Requires-Dist: hdf5storage>=0.1.18; extra == "dev"
257
+ Requires-Dist: onnxruntime>=1.11.0; extra == "dev"
258
+ Requires-Dist: requests>=2.20.0; extra == "dev"
259
+ Requires-Dist: psutil>=5.9.5; extra == "dev"
260
+ Requires-Dist: ruff>=0.3.0; extra == "dev"
261
+ Requires-Dist: mypy>=1.0; extra == "dev"
262
+ Requires-Dist: pre-commit>=3.0.0; extra == "dev"
263
+ Requires-Dist: sphinx!=3.5.0,>=3.0.0; extra == "dev"
264
+ Requires-Dist: sphinxemoji>=0.1.8; extra == "dev"
265
+ Requires-Dist: sphinx-copybutton>=0.3.1; extra == "dev"
266
+ Requires-Dist: docutils<0.22; extra == "dev"
267
+ Requires-Dist: recommonmark>=0.7.1; extra == "dev"
268
+ Requires-Dist: sphinx-markdown-tables>=0.0.15; extra == "dev"
269
+ Requires-Dist: sphinx-tabs>=3.3.0; extra == "dev"
270
+ Requires-Dist: furo>=2022.3.4; extra == "dev"
270
271
  Provides-Extra: docs
271
- Requires-Dist: sphinx !=3.5.0,>=3.0.0 ; extra == 'docs'
272
- Requires-Dist: sphinxemoji >=0.1.8 ; extra == 'docs'
273
- Requires-Dist: sphinx-copybutton >=0.3.1 ; extra == 'docs'
274
- Requires-Dist: docutils <0.21 ; extra == 'docs'
275
- Requires-Dist: recommonmark >=0.7.1 ; extra == 'docs'
276
- Requires-Dist: sphinx-markdown-tables >=0.0.15 ; extra == 'docs'
277
- Requires-Dist: sphinx-tabs >=3.3.0 ; extra == 'docs'
278
- Requires-Dist: furo >=2022.3.4 ; extra == 'docs'
272
+ Requires-Dist: sphinx!=3.5.0,>=3.0.0; extra == "docs"
273
+ Requires-Dist: sphinxemoji>=0.1.8; extra == "docs"
274
+ Requires-Dist: sphinx-copybutton>=0.3.1; extra == "docs"
275
+ Requires-Dist: docutils<0.22; extra == "docs"
276
+ Requires-Dist: recommonmark>=0.7.1; extra == "docs"
277
+ Requires-Dist: sphinx-markdown-tables>=0.0.15; extra == "docs"
278
+ Requires-Dist: sphinx-tabs>=3.3.0; extra == "docs"
279
+ Requires-Dist: furo>=2022.3.4; extra == "docs"
280
+ Provides-Extra: html
281
+ Requires-Dist: weasyprint>=55.0; extra == "html"
279
282
  Provides-Extra: quality
280
- Requires-Dist: ruff >=0.1.5 ; extra == 'quality'
281
- Requires-Dist: mypy >=0.812 ; extra == 'quality'
282
- Requires-Dist: pre-commit >=2.17.0 ; extra == 'quality'
283
+ Requires-Dist: ruff>=0.1.5; extra == "quality"
284
+ Requires-Dist: mypy>=0.812; extra == "quality"
285
+ Requires-Dist: pre-commit>=2.17.0; extra == "quality"
283
286
  Provides-Extra: testing
284
- Requires-Dist: pytest >=5.3.2 ; extra == 'testing'
285
- Requires-Dist: coverage[toml] >=4.5.4 ; extra == 'testing'
286
- Requires-Dist: hdf5storage >=0.1.18 ; extra == 'testing'
287
- Requires-Dist: onnxruntime >=1.11.0 ; extra == 'testing'
288
- Requires-Dist: requests >=2.20.0 ; extra == 'testing'
289
- Requires-Dist: psutil >=5.9.5 ; extra == 'testing'
287
+ Requires-Dist: pytest>=5.3.2; extra == "testing"
288
+ Requires-Dist: coverage[toml]>=4.5.4; extra == "testing"
289
+ Requires-Dist: hdf5storage>=0.1.18; extra == "testing"
290
+ Requires-Dist: onnxruntime>=1.11.0; extra == "testing"
291
+ Requires-Dist: requests>=2.20.0; extra == "testing"
292
+ Requires-Dist: psutil>=5.9.5; extra == "testing"
290
293
  Provides-Extra: tf
291
- Requires-Dist: tensorflow <2.16.0,>=2.11.0 ; extra == 'tf'
292
- Requires-Dist: tf2onnx <2.0.0,>=1.16.0 ; extra == 'tf'
294
+ Requires-Dist: tensorflow<2.16.0,>=2.11.0; extra == "tf"
295
+ Requires-Dist: tf2onnx<2.0.0,>=1.16.0; extra == "tf"
293
296
  Provides-Extra: torch
294
- Requires-Dist: torch <3.0.0,>=1.12.0 ; extra == 'torch'
295
- Requires-Dist: torchvision >=0.13.0 ; extra == 'torch'
296
- Requires-Dist: onnx <3.0.0,>=1.12.0 ; extra == 'torch'
297
+ Requires-Dist: torch<3.0.0,>=1.12.0; extra == "torch"
298
+ Requires-Dist: torchvision>=0.13.0; extra == "torch"
299
+ Requires-Dist: onnx<3.0.0,>=1.12.0; extra == "torch"
300
+ Provides-Extra: viz
301
+ Requires-Dist: matplotlib>=3.1.0; extra == "viz"
302
+ Requires-Dist: mplcursors>=0.3; extra == "viz"
297
303
 
298
304
  <p align="center">
299
- <img src="docs/images/Logo_doctr.gif" width="40%">
305
+ <img src="https://github.com/mindee/doctr/raw/main/docs/images/Logo_doctr.gif" width="40%">
300
306
  </p>
301
307
 
302
- [![Slack Icon](https://img.shields.io/badge/Slack-Community-4A154B?style=flat-square&logo=slack&logoColor=white)](https://slack.mindee.com) [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](LICENSE) ![Build Status](https://github.com/mindee/doctr/workflows/builds/badge.svg) [![Docker Images](https://img.shields.io/badge/Docker-4287f5?style=flat&logo=docker&logoColor=white)](https://github.com/mindee/doctr/pkgs/container/doctr) [![codecov](https://codecov.io/gh/mindee/doctr/branch/main/graph/badge.svg?token=577MO567NM)](https://codecov.io/gh/mindee/doctr) [![CodeFactor](https://www.codefactor.io/repository/github/mindee/doctr/badge?s=bae07db86bb079ce9d6542315b8c6e70fa708a7e)](https://www.codefactor.io/repository/github/mindee/doctr) [![Codacy Badge](https://api.codacy.com/project/badge/Grade/340a76749b634586a498e1c0ab998f08)](https://app.codacy.com/gh/mindee/doctr?utm_source=github.com&utm_medium=referral&utm_content=mindee/doctr&utm_campaign=Badge_Grade) [![Doc Status](https://github.com/mindee/doctr/workflows/doc-status/badge.svg)](https://mindee.github.io/doctr) [![Pypi](https://img.shields.io/badge/pypi-v0.7.0-blue.svg)](https://pypi.org/project/python-doctr/) [![Hugging Face Spaces](https://img.shields.io/badge/%F0%9F%A4%97%20Hugging%20Face-Spaces-blue)](https://huggingface.co/spaces/mindee/doctr) [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/mindee/notebooks/blob/main/doctr/quicktour.ipynb)
308
+ [![Slack Icon](https://img.shields.io/badge/Slack-Community-4A154B?style=flat-square&logo=slack&logoColor=white)](https://slack.mindee.com) [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](LICENSE) ![Build Status](https://github.com/mindee/doctr/workflows/builds/badge.svg) [![Docker Images](https://img.shields.io/badge/Docker-4287f5?style=flat&logo=docker&logoColor=white)](https://github.com/mindee/doctr/pkgs/container/doctr) [![codecov](https://codecov.io/gh/mindee/doctr/branch/main/graph/badge.svg?token=577MO567NM)](https://codecov.io/gh/mindee/doctr) [![CodeFactor](https://www.codefactor.io/repository/github/mindee/doctr/badge?s=bae07db86bb079ce9d6542315b8c6e70fa708a7e)](https://www.codefactor.io/repository/github/mindee/doctr) [![Codacy Badge](https://api.codacy.com/project/badge/Grade/340a76749b634586a498e1c0ab998f08)](https://app.codacy.com/gh/mindee/doctr?utm_source=github.com&utm_medium=referral&utm_content=mindee/doctr&utm_campaign=Badge_Grade) [![Doc Status](https://github.com/mindee/doctr/workflows/doc-status/badge.svg)](https://mindee.github.io/doctr) [![Pypi](https://img.shields.io/badge/pypi-v0.9.0-blue.svg)](https://pypi.org/project/python-doctr/) [![Hugging Face Spaces](https://img.shields.io/badge/%F0%9F%A4%97%20Hugging%20Face-Spaces-blue)](https://huggingface.co/spaces/mindee/doctr) [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/mindee/notebooks/blob/main/doctr/quicktour.ipynb)
303
309
 
304
310
 
305
311
  **Optical Character Recognition made seamless & accessible to anyone, powered by TensorFlow 2 & PyTorch**
@@ -309,7 +315,7 @@ What you can expect from this repository:
309
315
  - efficient ways to parse textual information (localize and identify each word) from your documents
310
316
  - guidance on how to integrate this in your current architecture
311
317
 
312
- ![OCR_example](docs/images/ocr.png)
318
+ ![OCR_example](https://github.com/mindee/doctr/raw/main/docs/images/ocr.png)
313
319
 
314
320
  ## Quick Tour
315
321
 
@@ -334,7 +340,7 @@ from doctr.io import DocumentFile
334
340
  pdf_doc = DocumentFile.from_pdf("path/to/your/doc.pdf")
335
341
  # Image
336
342
  single_img_doc = DocumentFile.from_images("path/to/your/img.jpg")
337
- # Webpage
343
+ # Webpage (requires `weasyprint` to be installed)
338
344
  webpage_doc = DocumentFile.from_url("https://www.yoursite.com")
339
345
  # Multiple page images
340
346
  multi_img_doc = DocumentFile.from_images(["path/to/page1.jpg", "path/to/page2.jpg"])
@@ -372,10 +378,11 @@ If both options are set to False, the predictor will always fit and return rotat
372
378
  To interpret your model's predictions, you can visualize them interactively as follows:
373
379
 
374
380
  ```python
381
+ # Display the result (requires matplotlib & mplcursors to be installed)
375
382
  result.show()
376
383
  ```
377
384
 
378
- ![Visualization sample](docs/images/doctr_example_script.gif)
385
+ ![Visualization sample](https://github.com/mindee/doctr/raw/main/docs/images/doctr_example_script.gif)
379
386
 
380
387
  Or even rebuild the original document from its predictions:
381
388
 
@@ -386,7 +393,7 @@ synthetic_pages = result.synthesize()
386
393
  plt.imshow(synthetic_pages[0]); plt.axis('off'); plt.show()
387
394
  ```
388
395
 
389
- ![Synthesis sample](docs/images/synthesized_sample.png)
396
+ ![Synthesis sample](https://github.com/mindee/doctr/raw/main/docs/images/synthesized_sample.png)
390
397
 
391
398
  The `ocr_predictor` returns a `Document` object with a nested structure (with `Page`, `Block`, `Line`, `Word`, `Artefact`).
392
399
  To get a better understanding of our document model, check our [documentation](https://mindee.github.io/doctr/modules/io.html#document-structure):
@@ -425,23 +432,13 @@ The KIE predictor results per page are in a dictionary format with each key repr
425
432
 
426
433
  ### If you are looking for support from the Mindee team
427
434
 
428
- [![Bad OCR test detection image asking the developer if they need help](docs/images/doctr-need-help.png)](https://mindee.com/product/doctr)
435
+ [![Bad OCR test detection image asking the developer if they need help](https://github.com/mindee/doctr/raw/main/docs/images/doctr-need-help.png)](https://mindee.com/product/doctr)
429
436
 
430
437
  ## Installation
431
438
 
432
439
  ### Prerequisites
433
440
 
434
- Python 3.8 (or higher) and [pip](https://pip.pypa.io/en/stable/) are required to install docTR.
435
-
436
- Since we use [weasyprint](https://weasyprint.org/), you will need extra dependencies if you are not running Linux.
437
-
438
- For MacOS users, you can install them as follows:
439
-
440
- ```shell
441
- brew install cairo pango gdk-pixbuf libffi
442
- ```
443
-
444
- For Windows users, those dependencies are included in GTK. You can find the latest installer over [here](https://github.com/tschoonj/GTK-for-Windows-Runtime-Environment-Installer/releases).
441
+ Python 3.9 (or higher) and [pip](https://pip.pypa.io/en/stable/) are required to install docTR.
445
442
 
446
443
  ### Latest release
447
444
 
@@ -460,6 +457,8 @@ We try to keep framework-specific dependencies to a minimum. You can install fra
460
457
  pip install "python-doctr[tf]"
461
458
  # for PyTorch
462
459
  pip install "python-doctr[torch]"
460
+ # optional dependencies for visualization, html, and contrib modules can be installed as follows:
461
+ pip install "python-doctr[torch,viz,html,contib]"
463
462
  ```
464
463
 
465
464
  For MacBooks with M1 chip, you will need some additional packages or specific versions:
@@ -494,6 +493,7 @@ Credits where it's due: this repository is implementing, among others, architect
494
493
 
495
494
  - DBNet: [Real-time Scene Text Detection with Differentiable Binarization](https://arxiv.org/pdf/1911.08947.pdf).
496
495
  - LinkNet: [LinkNet: Exploiting Encoder Representations for Efficient Semantic Segmentation](https://arxiv.org/pdf/1707.03718.pdf)
496
+ - FAST: [FAST: Faster Arbitrarily-Shaped Text Detector with Minimalist Kernel Representation](https://arxiv.org/pdf/2111.02394.pdf)
497
497
 
498
498
  ### Text Recognition
499
499
 
@@ -513,7 +513,7 @@ The full package documentation is available [here](https://mindee.github.io/doct
513
513
 
514
514
  A minimal demo app is provided for you to play with our end-to-end OCR models!
515
515
 
516
- ![Demo app](docs/images/demo_update.png)
516
+ ![Demo app](https://github.com/mindee/doctr/raw/main/docs/images/demo_update.png)
517
517
 
518
518
  #### Live demo
519
519
 
@@ -553,11 +553,11 @@ USE_TORCH=1 streamlit run demo/app.py
553
553
  Instead of having your demo actually running Python, you would prefer to run everything in your web browser?
554
554
  Check out our [TensorFlow.js demo](https://github.com/mindee/doctr-tfjs-demo) to get started!
555
555
 
556
- ![TFJS demo](docs/images/demo_illustration_mini.png)
556
+ ![TFJS demo](https://github.com/mindee/doctr/raw/main/docs/images/demo_illustration_mini.png)
557
557
 
558
558
  ### Docker container
559
559
 
560
- [We offers Docker container support for easy testing and deployment](https://github.com/mindee/doctr/packages).
560
+ [We offer Docker container support for easy testing and deployment](https://github.com/mindee/doctr/pkgs/container/doctr).
561
561
 
562
562
  #### Using GPU with docTR Docker Images
563
563
 
@@ -646,9 +646,14 @@ Your API should now be running locally on your port 8002. Access your automatica
646
646
 
647
647
  ```python
648
648
  import requests
649
+
650
+ params = {"det_arch": "db_resnet50", "reco_arch": "crnn_vgg16_bn"}
651
+
649
652
  with open('/path/to/your/doc.jpg', 'rb') as f:
650
- data = f.read()
651
- response = requests.post("http://localhost:8002/ocr", files={'file': data}).json()
653
+ files = [ # application/pdf, image/jpeg, image/png supported
654
+ ("files", ("doc.jpg", f.read(), "image/jpeg")),
655
+ ]
656
+ print(requests.post("http://localhost:8080/ocr", params=params, files=files).json())
652
657
  ```
653
658
 
654
659
  ### Example notebooks
@@ -673,8 +678,8 @@ If you wish to cite this project, feel free to use this [BibTeX](http://www.bibt
673
678
 
674
679
  If you scrolled down to this section, you most likely appreciate open source. Do you feel like extending the range of our supported characters? Or perhaps submitting a paper implementation? Or contributing in any other way?
675
680
 
676
- You're in luck, we compiled a short guide (cf. [`CONTRIBUTING`](CONTRIBUTING.md)) for you to easily do so!
681
+ You're in luck, we compiled a short guide (cf. [`CONTRIBUTING`](https://mindee.github.io/doctr/contributing/contributing.html)) for you to easily do so!
677
682
 
678
683
  ## License
679
684
 
680
- Distributed under the Apache 2.0 License. See [`LICENSE`](LICENSE) for more information.
685
+ Distributed under the Apache 2.0 License. See [`LICENSE`](https://github.com/mindee/doctr?tab=Apache-2.0-1-ov-file#readme) for more information.
@@ -1,6 +1,10 @@
1
- doctr/__init__.py,sha256=m6lezpfDosfTVFssFVrN7aH0tDzM4h9OgCCi0Nevq8g,161
2
- doctr/file_utils.py,sha256=P6Ld5_rFSMwv1m91yhARdJgF7KIXWzgUJXUXaUiUNgc,3156
3
- doctr/version.py,sha256=XbvKmQHvfaLsOHsQx8r-8Tg2eqiGheb-FcQ7cNGg5ro,23
1
+ doctr/__init__.py,sha256=q-1tv1hf-BRaZtxsrbPVxYNL6ZtyIOSDvlZOSt85TmU,170
2
+ doctr/file_utils.py,sha256=3LUSkLXfMsxJgLY_Gah9Qlb7-l-Bxx1y6Wm9hlVXJig,3738
3
+ doctr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ doctr/version.py,sha256=BSHwM5KsOpEVgNNeYRIr89q04Tayoadwt508Blp8nAo,23
5
+ doctr/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ doctr/contrib/artefacts.py,sha256=xXkTkZVMrjalzffeBDOXktRhMn-EmUVEdURpDsV0-h8,5351
7
+ doctr/contrib/base.py,sha256=dKUcKvVMyPFvZp_-IekocFNG3JmCJ1cNt0V8BQ5zdV0,3426
4
8
  doctr/datasets/__init__.py,sha256=umI2ABbgWIKuhswl8RGaF6CefFiI8DdEGVb0Kbd8aZA,574
5
9
  doctr/datasets/cord.py,sha256=p9ObLgqV3uB7TYoS5Puag0q-JtFTPrXUztkxL36U69U,4746
6
10
  doctr/datasets/detection.py,sha256=H6inFO6rjdvU_Asm9UTod5r5bjjpmJJWGityv0RTJ8M,3607
@@ -10,8 +14,8 @@ doctr/datasets/ic03.py,sha256=2HEHvW9tLYFiSEaPeNM4vrqL3ICjth6LUUXPcHjrHjQ,5066
10
14
  doctr/datasets/ic13.py,sha256=5qjGMmotEOo_8N2gp0XUdZPW5t2gvVe-cTestlfD6Mc,4010
11
15
  doctr/datasets/iiit5k.py,sha256=7y4pv4WG-FdXCn7aXLsUodXnk63gRBR8325HfqqlQ3k,3936
12
16
  doctr/datasets/iiithws.py,sha256=MFWgIW5bNJSvxWU-USZvbYVHNlkBsnzzMaSGrbut-zQ,2778
13
- doctr/datasets/imgur5k.py,sha256=UrDisvRDFJpuD2utLwUDgqVQEZCdesbVIR6upoG1tu4,6705
14
- doctr/datasets/loader.py,sha256=px4IeA8ttqf61b6sRcUtQiXS_UBDhPmeiv6DZ7zuZTk,3044
17
+ doctr/datasets/imgur5k.py,sha256=CeSh2R_U-1iPTg-DZOgY5L6t4F-fX1WkqiMP0C6jivQ,6679
18
+ doctr/datasets/loader.py,sha256=77ErVBRQsXAhe4pTJstZ-wk4wOZSFdw9w-_OMv89IKg,2803
15
19
  doctr/datasets/mjsynth.py,sha256=Sybpaxiib8jDOc33OQgl2gGQ4XX8kKsnZaNokKmt08o,4063
16
20
  doctr/datasets/ocr.py,sha256=wSAU62NUdFgt52vxo65bXPsuKeVWArlAkD5kxWKypiM,2550
17
21
  doctr/datasets/orientation.py,sha256=PZfSQGfBSqzwRlg84L7BA7Lb2jseBvxkKqzh36TtFXk,1113
@@ -20,150 +24,150 @@ doctr/datasets/sroie.py,sha256=bAkPLmw9aVSu_MyEix_FKFW0pbYye1w16vIkumnQ4E8,3939
20
24
  doctr/datasets/svhn.py,sha256=Q4M84eRGWLWQ5Bsw0zvouhHTUQl46B9-pS06ZYKT5j8,5251
21
25
  doctr/datasets/svt.py,sha256=eos2IUqeM8AW98zJ4PjHQ-hM0hUiJ-cumFhctQrpZp4,4551
22
26
  doctr/datasets/synthtext.py,sha256=Q0WKA_UJtgjdBaHHQ888n6ltT-NBuf5kTYQv5SB40IQ,5387
23
- doctr/datasets/utils.py,sha256=_5gV_Ti3OfkOjIRsS3hud-V7RcNNVKfgx2AndyEVu6g,7551
24
- doctr/datasets/vocabs.py,sha256=uJ-y5qm76o5Wd-JZ023zmFRXTpGzb26Sn-gJt2FVOb0,3121
27
+ doctr/datasets/utils.py,sha256=xdKi9z0hSvW0TOS_Pb_VJabX_pyCgRmMvRxIZBXpjg8,7584
28
+ doctr/datasets/vocabs.py,sha256=TABYcaDr2ZviJJWaH2-DrZTLaQYUVTsUTAGgFia3c4k,3834
25
29
  doctr/datasets/wildreceipt.py,sha256=HvnAaxo9lLwC8UMUYYKKJo6HkG8xm2yIHopBsN5G1LA,4566
26
30
  doctr/datasets/datasets/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
27
31
  doctr/datasets/datasets/base.py,sha256=TUK8GMosZnkTBsJm8zOc7AIy3FUMIV2vOTu3YbTjnSQ,4874
28
- doctr/datasets/datasets/pytorch.py,sha256=ZMSJcYS3v_Mdzqd4OxW2AIZEf4K2T3nuEp7MbQuy2bo,1981
32
+ doctr/datasets/datasets/pytorch.py,sha256=M75erZOBP_Cg05Vk4D01yQZSyyqEbN0omHch1afe4pY,2039
29
33
  doctr/datasets/datasets/tensorflow.py,sha256=Ivx_T6o2ttHXjyUy5wi0LpsmKZYOVb7xL1fHKvRlE80,1975
30
34
  doctr/datasets/generator/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
31
- doctr/datasets/generator/base.py,sha256=TLSPPTUKIOdkXp5SooYDZbX0hyOZMBMY11Di5jTOLnc,5659
35
+ doctr/datasets/generator/base.py,sha256=SpzbEqVYUpPZr5NTgccXtpw_yD37WxJ0Jx4HDwa_EKk,5769
32
36
  doctr/datasets/generator/pytorch.py,sha256=HUmdHUm7rU84gXv18BeXdYTDBCHabtw21Xdpm-p02ik,2134
33
37
  doctr/datasets/generator/tensorflow.py,sha256=Yj9vgEjdNnOwrM4Ew2w5TfkEwNXgy6ACZuEnExZcUMs,2229
34
38
  doctr/io/__init__.py,sha256=kS7tKGFvzxOCWBOun-Y8n9CsziwRKNynjwpZEUUI03M,106
35
- doctr/io/elements.py,sha256=IGKU2TGQFeJYsv6mHt-mJAQRIwnmi5jJ6tj5ou-QRAY,23241
36
- doctr/io/html.py,sha256=PY3bJBn7M2V4fDaSS5RV_9cr-EC4SCDWOnm3VqiBM8U,715
37
- doctr/io/pdf.py,sha256=vCZFNB0jabldMG2yLu5_fa1OQY7-LGVyCgS2nBP3Hjk,1320
38
- doctr/io/reader.py,sha256=9PEGgvarm7IzxbQgpzev91CYODVrBSA9MNAP1di96gM,2540
39
+ doctr/io/elements.py,sha256=bGAIS34Kq_KMGl4zHjIloGCjsbyB6VcrkGq7fvQY71k,24854
40
+ doctr/io/html.py,sha256=cXDCMKztiFafCdPy_AMU5Ven52q1A0FJWXhPnJMLHGg,719
41
+ doctr/io/pdf.py,sha256=V2GAwPFvGAjBqhT85Y6uVejlLy0vn5S94_0ZJVPQLiE,1350
42
+ doctr/io/reader.py,sha256=68pr31K19Tej6UHAqhfAlD11paGX6h3IeSvEje8GaOg,2829
39
43
  doctr/io/image/__init__.py,sha256=SqJtZIvr9dIDPLW39kHIX_MxufCKbI54aX28VrjeauE,193
40
44
  doctr/io/image/base.py,sha256=g6kdcoIEQVN0dhFsVUA-gwGedhRhyKjFVrPu9QbR_UQ,1740
41
- doctr/io/image/pytorch.py,sha256=dVGXIU2ZgeGDLkOgJ55RneVf5wyJUv9CZbIDh9SVjqA,3254
42
- doctr/io/image/tensorflow.py,sha256=4bdeGDo13EFDImNHkxInTSGpzBU4WaXtxPg-hh4trU4,3207
43
- doctr/models/__init__.py,sha256=SNHAyfMOn_18tjh8nmdl4vv7XLW2JDXX4fdbiDKcZdA,148
44
- doctr/models/_utils.py,sha256=4whOjayC7ZZFd0rp84sAmnM8F4rLXYrlNFeGIQsuKys,5759
45
- doctr/models/builder.py,sha256=QcHEuTycC5oH2QAUqZJi1gO5AmFsNkMpAzrGgth4DtM,17890
45
+ doctr/io/image/pytorch.py,sha256=13F8tFXultegdF9yZqCMXSM9Jn4ojwT9YLYWMF5nZ6M,3310
46
+ doctr/io/image/tensorflow.py,sha256=47a-zW4VoAeaoihTsppFJlFyK_8dvGzjGF1GB3Ti0Ig,3213
47
+ doctr/models/__init__.py,sha256=yn_mXUL8B5L27Uaat1rLGRQHgLR8VLVxzBuPfNuN1YE,124
48
+ doctr/models/_utils.py,sha256=zt-wXy0OP8Mw9JhnCLPFhX5d5efdcijgPlLnVKBdRhw,7540
49
+ doctr/models/builder.py,sha256=9b6fCFDJvHo5XOyP8oz6PkrEjyGxyHUecEWYjdHv0IA,20665
46
50
  doctr/models/core.py,sha256=SMXYuX1o_Q2zrjcF-vzfqj7IkLKlDyzEOc-4HeiEZ8g,501
47
- doctr/models/zoo.py,sha256=6VeOSI_1y8ecvpmOSSLJpEc9Ut1LKqPAsgPQyOCqL_w,9322
48
- doctr/models/artefacts/__init__.py,sha256=ZPEGVgF1rk1JudUb_9EWijngdQRGsAthWdth28Hjb1U,43
49
- doctr/models/artefacts/barcode.py,sha256=46QPq7J0i9PG4qtYIsRbGhoJMiNzTht2TCOKDCJiPsU,2721
50
- doctr/models/artefacts/face.py,sha256=oN2tD6QO7bp79dib9IXK2Y4NB67AIMuoO92n6E1oKqI,1893
51
+ doctr/models/zoo.py,sha256=G52XurwqjVdLRHOZWrEu2QbmZQWsCPdZVIPu874gL_E,9318
51
52
  doctr/models/classification/__init__.py,sha256=HeErE29Bs1-91FtS9HqNghHe89XZGzI_11MO_E6GJ7s,154
52
- doctr/models/classification/zoo.py,sha256=ZlQATwhEMj3dwRyJUisPC0XByq6z2I5GdfeidDsm8DQ,2489
53
+ doctr/models/classification/zoo.py,sha256=HM2p1fFM83CLqrz7-Vlj-_oBEPNRpuC1bD_0AKquh0E,3503
53
54
  doctr/models/classification/magc_resnet/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
54
55
  doctr/models/classification/magc_resnet/pytorch.py,sha256=UY65c3_Ux2o4hOm_USEQYP7O69bj76qbUB-PCb_0Lng,5603
55
56
  doctr/models/classification/magc_resnet/tensorflow.py,sha256=4UDrEcom_2wcyE1QjwmT-u6rtpzQ5ViyB1U6HxpT_XI,6423
56
57
  doctr/models/classification/mobilenet/__init__.py,sha256=FBZ2YT2Cq3mj6vpDC3ff5TcMpagNWFhwxQ_brdsgBqo,172
57
- doctr/models/classification/mobilenet/pytorch.py,sha256=yio6IMHP658AnRVW_gtUjiT9EWr3Byf96YIG3Mp3nrw,8009
58
- doctr/models/classification/mobilenet/tensorflow.py,sha256=VauT18woqAl1UqI8mxt1xklvEV5MVK-fdLkAdRinJWA,14364
58
+ doctr/models/classification/mobilenet/pytorch.py,sha256=bko2qHGmGfJay2EZmEZlrzgnDah7vhLpibNYIxVFWqo,9318
59
+ doctr/models/classification/mobilenet/tensorflow.py,sha256=VUMq9auHRcDafvjhayCq4fV1fLS14Nxp_O6OEtA7R6A,15601
59
60
  doctr/models/classification/predictor/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
60
- doctr/models/classification/predictor/pytorch.py,sha256=Jah8NsP7eCJFw8-y2tJAxkhjmoqxx7WpW5-uUZ1I0sU,1883
61
- doctr/models/classification/predictor/tensorflow.py,sha256=PNOJGkyajRbuRrw2qrbZE0AbzFgXujVQF8gIRuEhhnk,1698
61
+ doctr/models/classification/predictor/pytorch.py,sha256=NEeFSUJ5QhUqtG9pDw5s3ZzR8wSORhicmxESPTVSErw,2288
62
+ doctr/models/classification/predictor/tensorflow.py,sha256=asJl1GiDFq8WJM_J56tx4xPR-Kqrnroc1ZGty5znkDg,2071
62
63
  doctr/models/classification/resnet/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
63
64
  doctr/models/classification/resnet/pytorch.py,sha256=VVkNit3HEezRfOPw8wfuiEEAUCEnYSauCvWaCFF3cwo,12442
64
65
  doctr/models/classification/resnet/tensorflow.py,sha256=jBGiL6Mucnq7JGkyIa4Y9A6BQz2ol88cm-eBxJjsTPo,13185
65
66
  doctr/models/classification/textnet/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
66
- doctr/models/classification/textnet/pytorch.py,sha256=69vbyqRktq-k-JLiKTjkPZCtkP2trIj4VFfnfLxvf6M,10163
67
- doctr/models/classification/textnet/tensorflow.py,sha256=XB1O6vw7Swf0zPgYaVzQd3mWcVMVZiYsWbh8I-WZSqo,9789
67
+ doctr/models/classification/textnet/pytorch.py,sha256=z2BwTM-7ClEzanHWXB5Uie-_X62k1OZZ2Y6m08V_zUM,10163
68
+ doctr/models/classification/textnet/tensorflow.py,sha256=RVg7Nic0cn8A3eswq-YauoqvnDrdxiuzvx_h4qAGubQ,9789
68
69
  doctr/models/classification/vgg/__init__.py,sha256=FBZ2YT2Cq3mj6vpDC3ff5TcMpagNWFhwxQ_brdsgBqo,172
69
70
  doctr/models/classification/vgg/pytorch.py,sha256=b_q9oWmtlazD4uk9DFYezWgsgAwwN-3ewEz15E2cJR4,3136
70
71
  doctr/models/classification/vgg/tensorflow.py,sha256=mVuyIXtX7iu622K0GwXkALOM7gzFtlGX9IABLP2NR2Y,4090
71
72
  doctr/models/classification/vit/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
72
73
  doctr/models/classification/vit/pytorch.py,sha256=770ZrCPX7LlVUjE9XNFrzcb2i_0lHStJ8Q4vXEhXEHs,6096
73
74
  doctr/models/classification/vit/tensorflow.py,sha256=TtglXtKAE6y_gfzk8DOhUwoQNIMhK86tmhCB1SbT-k0,5869
74
- doctr/models/detection/__init__.py,sha256=oSgdRo9kpFqWFa5d81yiAytttBpwiBkqHQOMs6Tkr1Q,85
75
+ doctr/models/detection/__init__.py,sha256=RqSz5beehLiqhW0PwFLFmCfTyMjofO-0umcQJLDMHjY,105
75
76
  doctr/models/detection/core.py,sha256=K2uQTIu3ttgxj7YF7i1a-X6djIGCSFjZnQQ57JQBDv0,3566
76
- doctr/models/detection/zoo.py,sha256=zKyTkjJezU0WiWns6NpwCuwzOCcJr9IK3GZJ6DPwmTI,2841
77
- doctr/models/detection/_utils/__init__.py,sha256=jDHErtF1nkN-uICx8prmdvmGTSoN6U27ZVmHLoqtcNo,131
77
+ doctr/models/detection/zoo.py,sha256=OJP8K3CKzLRmhaSe0CtvFPioXBcZcvf8__As_6xflFo,3332
78
+ doctr/models/detection/_utils/__init__.py,sha256=6f2JchSfAnB8ZShc4dvR2uZtWXEEIZ22vwt4yENoqXg,151
79
+ doctr/models/detection/_utils/base.py,sha256=56OnQhSmAdG2YRd6D-2tl_YXtZ60OyOyBoyJxuTNZJI,2526
78
80
  doctr/models/detection/_utils/pytorch.py,sha256=UZ-PK5Uw0dVN978JGj5MVtF7kLXTL4EtugCoq_VVkVk,1063
79
81
  doctr/models/detection/_utils/tensorflow.py,sha256=9D2ita4ZqJus2byLe7bkSIhyYExAiOLAGBbC7-oRZDU,979
80
82
  doctr/models/detection/differentiable_binarization/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
81
- doctr/models/detection/differentiable_binarization/base.py,sha256=ucjNmclcN0SfTBZxgucaGC1qWqI3UvBLHfMGzR6LsZI,16323
82
- doctr/models/detection/differentiable_binarization/pytorch.py,sha256=-bByMRDipo_0WIXuFPf9DUPfWduVgLd0UIn48GP3f94,15983
83
- doctr/models/detection/differentiable_binarization/tensorflow.py,sha256=EGN-kGq0VAEaJwDQJXquU3Cs8tAtcaFMY7rllHUHgDE,14598
83
+ doctr/models/detection/differentiable_binarization/base.py,sha256=fFnXH8iGLXFk4La5G19rqvId_7RDOh5H-v_IRyb1hA0,16432
84
+ doctr/models/detection/differentiable_binarization/pytorch.py,sha256=nYOLVLsLF4zrnXK9u6mTPue7X2JR7WQe2gUb_UMDI6I,15955
85
+ doctr/models/detection/differentiable_binarization/tensorflow.py,sha256=l4QltrgDMLK_eY0dxEaCDzrB8rlhVpwUmOAPNIzd_70,14506
86
+ doctr/models/detection/fast/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
87
+ doctr/models/detection/fast/base.py,sha256=Ydm8fzKwYO_NBMnGazAYg1hpzlXZcRWJ-oKGir36DsE,10927
88
+ doctr/models/detection/fast/pytorch.py,sha256=4FYCaMZ2vzr_j4Phu2bOXs73L_Cfvgu4LDE0Q7m8hz0,16143
89
+ doctr/models/detection/fast/tensorflow.py,sha256=ps0x_AYFW5GYA2SlVQip1S4x61o2mhlSNhhJdaALnF4,15797
84
90
  doctr/models/detection/linknet/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
85
- doctr/models/detection/linknet/base.py,sha256=51teowVwuJ_PKJnL7die_KWLNpdnZTIYVh1TXstfIOs,10508
91
+ doctr/models/detection/linknet/base.py,sha256=R12TMBNeOsY_UTjSFbPr7-FmLsMIJSwxdHc3e3pFLKw,10617
86
92
  doctr/models/detection/linknet/pytorch.py,sha256=sodWXaCDv1taRl3g6lgwxitvhU-ZszfN-OIofsorkp8,13810
87
93
  doctr/models/detection/linknet/tensorflow.py,sha256=PK3adzBG6wz_SA5lMrh0KBKpbDu-e3FaKwTZ8-ZaN-s,12914
88
94
  doctr/models/detection/predictor/__init__.py,sha256=lwmH917kRdbUUBsE02fELIuXQNRNePpIj3iK43ey6Bg,159
89
- doctr/models/detection/predictor/pytorch.py,sha256=bKSOe5Gfo5ctvqGAle3CyCKMP-zZpdIH-h-j0D7bBbA,2083
90
- doctr/models/detection/predictor/tensorflow.py,sha256=ZVpRrxsje91InWJrOSOfxdtTdvZg-0IXwEBVJBktBRA,1868
95
+ doctr/models/detection/predictor/pytorch.py,sha256=sNuMGvcKQeeOcW8QG-xWK0W59DohGYxXlKv0yK2HcNQ,2689
96
+ doctr/models/detection/predictor/tensorflow.py,sha256=W96CJhjYC3-MTcLA3FTH1lLX5DZ3ueYha2b_7A4GxNk,2448
91
97
  doctr/models/factory/__init__.py,sha256=cKPoH2V2157lLMTR2zsljG3_IQHziodqR-XK_LG0D_I,19
92
- doctr/models/factory/hub.py,sha256=iyktX-LE1wQmtvoFKmRHS2AofkloGBvni6TH0aF_xRI,7918
98
+ doctr/models/factory/hub.py,sha256=5IsoyiLfZO_QxM6-dK0-oaAleY8bMvvi9yP_n_W1FHs,7464
93
99
  doctr/models/kie_predictor/__init__.py,sha256=lwmH917kRdbUUBsE02fELIuXQNRNePpIj3iK43ey6Bg,159
94
- doctr/models/kie_predictor/base.py,sha256=XHnTOzaFiqJiwb6nA7osjFwEHfeTVi4FwfjRDIFJNzU,1784
95
- doctr/models/kie_predictor/pytorch.py,sha256=OW0BAVT11R7PKCminD8VbyZ6En12TyaE103zMrSeG4s,6940
96
- doctr/models/kie_predictor/tensorflow.py,sha256=JAbHyhEE-OEx1r3NIqWYTlhAb9ECY7ZfW5Jc4d-LwVw,6697
100
+ doctr/models/kie_predictor/base.py,sha256=YTGdKda8zqM1H9gB8B2zFEN7C8NjFA00I-o3fRtipvA,2115
101
+ doctr/models/kie_predictor/pytorch.py,sha256=TcWjWb51aoXdfRal4diRbSbJohBOwpKsDUpci6d3VfE,7990
102
+ doctr/models/kie_predictor/tensorflow.py,sha256=MJTP5BlSlG50emhofPPMTKl5MgRdRTGzRhg53tdsMoE,7626
97
103
  doctr/models/modules/__init__.py,sha256=pouP7obVTu4p6aHkyaqa1yHKbynpvT0Hgo-LO_1U2R4,83
98
104
  doctr/models/modules/layers/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
99
- doctr/models/modules/layers/pytorch.py,sha256=tUwSJ2bIETJYAGqc8UCJ7dbTOa0LkBfQo2dwB1ZXPb8,2929
100
- doctr/models/modules/layers/tensorflow.py,sha256=Zw4rMxob3tK7HCzbcYZMSHVp-2ha8z1YPV9MpYsuyag,3359
105
+ doctr/models/modules/layers/pytorch.py,sha256=UIidAIKfXQxlX9MbVWADLGrrPE7J496BMfgRHR73jMY,6853
106
+ doctr/models/modules/layers/tensorflow.py,sha256=etXoKXuIeFr_LD-L0x0fhVlL-cUrjL5vFTh4cmci2P8,7145
101
107
  doctr/models/modules/transformer/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
102
- doctr/models/modules/transformer/pytorch.py,sha256=Bn6KPvhBdtS2MlRQmQT7c_d63maRfwfMias3P8eJ9fA,7725
108
+ doctr/models/modules/transformer/pytorch.py,sha256=93wDIrV7odRORV_wOLFNsw-QSH_COjUcp9J55PPp_qA,7664
103
109
  doctr/models/modules/transformer/tensorflow.py,sha256=NTF-Q6ClUIMdSWDqus6kPZjOlKC3XcJ3HqUeyZTqtnU,9113
104
110
  doctr/models/modules/vision_transformer/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
105
111
  doctr/models/modules/vision_transformer/pytorch.py,sha256=D6BbqNPV_7OFRogM0iaiWbn_6uLe3Thbo6GKRSYpfTQ,3970
106
112
  doctr/models/modules/vision_transformer/tensorflow.py,sha256=PaDbTtCc5YGqZNd_NFMdxeq6oNxs0WtVGYwhLCjJWFY,4199
107
- doctr/models/obj_detection/__init__.py,sha256=7TJnvLcLYaQtnrXaiBS38qvELgSC-hW6jIhsIfNXob4,27
108
- doctr/models/obj_detection/faster_rcnn/__init__.py,sha256=LOFUrXC37tQ8hDYF_xTxiD11YEgnLsW2wY0_MJDKszk,144
109
- doctr/models/obj_detection/faster_rcnn/pytorch.py,sha256=xT1U-Wo0tJLcRXe7QOwoaDDYeJKRqHAM5-TsmSGDJG0,2855
110
113
  doctr/models/predictor/__init__.py,sha256=lwmH917kRdbUUBsE02fELIuXQNRNePpIj3iK43ey6Bg,159
111
- doctr/models/predictor/base.py,sha256=IL2WNF0kTkFKERAMAwur29ptDtvp7aYbc6WStTLvt9A,6688
112
- doctr/models/predictor/pytorch.py,sha256=Dwf23IXE_q6RL3rrsbvK9U1yeoeP27M1mwntdoR4lQs,5954
113
- doctr/models/predictor/tensorflow.py,sha256=pYaTv3y_ELgMfgmMCzL2lPzFu2VvNwsoTzaElshuBj0,5800
114
+ doctr/models/predictor/base.py,sha256=LY910Umd0u00rrZxQNoPDcizCdsMa2cnDEg6ZQEFFjg,8157
115
+ doctr/models/predictor/pytorch.py,sha256=nc2QJSdUmFLel2x6kTTgRFRNFDXNZnOKwJMSF0WmRWQ,6518
116
+ doctr/models/predictor/tensorflow.py,sha256=Foiji8uEXCLX62E1JWdvlWDX3psLAQtiNd4Fpl0vyuc,6243
114
117
  doctr/models/preprocessor/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
115
- doctr/models/preprocessor/pytorch.py,sha256=C6s07Xaky0NqCEpjfZB3-mgegkT16dwBXVRaFCfvAN0,4993
116
- doctr/models/preprocessor/tensorflow.py,sha256=i_Crf-ZGRzC2cMLQDg9P5aHEfmK_xtF1_HXzu-Ul-4M,4483
118
+ doctr/models/preprocessor/pytorch.py,sha256=blJVqP1Xsa5GBX4pWrmaHJetCjP08Im8fry7BzLks-U,4877
119
+ doctr/models/preprocessor/tensorflow.py,sha256=6ZXPxKjXQ8NRTC_FGZZ_6-mEV_5xdzmZhJzdgR2cdl4,4625
117
120
  doctr/models/recognition/__init__.py,sha256=902nfVyvjOuUGHDKSGZgoS0fKC52J3jcUJQJhIpvOIY,124
118
121
  doctr/models/recognition/core.py,sha256=dbg8SebgfK8CPHXR-7rzmCI9XMLXmWW0jLd1yLLv_34,1593
119
122
  doctr/models/recognition/utils.py,sha256=GhNehWmCjl3GJ1ZFneA3cBRrZZk36856uU5i727FaQg,3550
120
- doctr/models/recognition/zoo.py,sha256=MakzszAsbiAgAJS4AhA02F6dWG47qTc1DklNXoey8JQ,2505
123
+ doctr/models/recognition/zoo.py,sha256=GFe7TikjfjF5nxuINrFJP7jK3hqan44kjNWoIFyYylA,2506
121
124
  doctr/models/recognition/crnn/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
122
125
  doctr/models/recognition/crnn/pytorch.py,sha256=AE8Ey-Z5VZNGUldL-crbMdyKI__OUMBmn8nYC2790Pc,11802
123
126
  doctr/models/recognition/crnn/tensorflow.py,sha256=dcT1X_zLmEqPiWG628lQTe9WMmfEWubXgCWFYs1BhJo,11666
124
127
  doctr/models/recognition/master/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
125
128
  doctr/models/recognition/master/base.py,sha256=5yQ0mUaS_ZWmUUzTAobgAlNS3Vp90PFvrzAcQXUF758,1540
126
- doctr/models/recognition/master/pytorch.py,sha256=Endn_S7svrN27IGdXDgAXXlZ_p0_IpasjvPPiJuxSiI,12318
129
+ doctr/models/recognition/master/pytorch.py,sha256=-RpyO6mBW3ql-BjNjnh5T-EMCvxIHLIJSUkB1lzX7Uw,12260
127
130
  doctr/models/recognition/master/tensorflow.py,sha256=rbrPMz49ySW8Wpd72dBNOH8dvcoAl3NwBi2ID7qVkxA,12140
128
131
  doctr/models/recognition/parseq/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
129
132
  doctr/models/recognition/parseq/base.py,sha256=8MMqibB8zZLw2qU-iyx79Zpr4MdEtbnF3f3ikfLrBjU,1534
130
- doctr/models/recognition/parseq/pytorch.py,sha256=lXpjXfgRRLzyHdCJCoc_0xsNN8_67ywoBb6tgoYCnj0,19868
131
- doctr/models/recognition/parseq/tensorflow.py,sha256=YNvJoddq8jYtxsW-wEsw1-p0a8gnkxOCEgklATQ2M-0,21558
133
+ doctr/models/recognition/parseq/pytorch.py,sha256=PAojvRwtz1qzKzW3JI_tTm1pco7mPHuX-Y-lah5mZOk,19927
134
+ doctr/models/recognition/parseq/tensorflow.py,sha256=Y0DCm4p2Sjxv8_9zAYP_9rkBQZTMMtlQHoYq4hLk8-0,21632
132
135
  doctr/models/recognition/predictor/__init__.py,sha256=lwmH917kRdbUUBsE02fELIuXQNRNePpIj3iK43ey6Bg,159
133
136
  doctr/models/recognition/predictor/_utils.py,sha256=y6hDoGS8reluLmx8JmTxM2f1uhlYnjOouh0BOr6wNTA,3389
134
137
  doctr/models/recognition/predictor/pytorch.py,sha256=snMHU0GopDEJ9HDdzpVxuvfJxVL-91Le-rc_dSqKCA0,2785
135
138
  doctr/models/recognition/predictor/tensorflow.py,sha256=o4Mhbxf9BUofqTV863U7-Zi0H77imX3LfhqzYLc2m4k,2549
136
139
  doctr/models/recognition/sar/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
137
- doctr/models/recognition/sar/pytorch.py,sha256=IuZ2KQO-2Du6FKRoJQud90fwNEhTFQy7e8t7pZaCuQE,15102
138
- doctr/models/recognition/sar/tensorflow.py,sha256=wkOlGdqK8NA_PYLQhcrgiv3Rqmeoj_HAi0Ku29QD5ds,15249
140
+ doctr/models/recognition/sar/pytorch.py,sha256=pN68aLfuqWKN6dexxeMy3DFJq1YP-MWUsUFj4BBHtXs,15118
141
+ doctr/models/recognition/sar/tensorflow.py,sha256=GLChryUwWZKAc77MxwDwTS8wc9GUzQaxlVv5PpYPuGA,15045
139
142
  doctr/models/recognition/vitstr/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
140
143
  doctr/models/recognition/vitstr/base.py,sha256=Xt7hq45tq999boF0XgW62x_cX5wJXx7VLxWA9H06U_o,1488
141
- doctr/models/recognition/vitstr/pytorch.py,sha256=8IxKWHt2uy6yXCsT_JTiccFoPToYKENH0H3tP-yTmHI,9596
144
+ doctr/models/recognition/vitstr/pytorch.py,sha256=21N7PJbaYmO_mQKW8uS0MGXTtTyFr4QYWRsX6PTKhtU,9568
142
145
  doctr/models/recognition/vitstr/tensorflow.py,sha256=_8k6Jxd715uH8lsBqUCn4C_3tlgE75h_BXt4AlfYrk8,9671
143
146
  doctr/models/utils/__init__.py,sha256=zwLK6mpproUGFH-1PUNiQyoR9IrAAakj7RgOiTJaBjk,200
144
147
  doctr/models/utils/pytorch.py,sha256=bO8a432TzpHTuqAsFuAi2ld6NOT12E2rlghQlW4nAjg,5494
145
148
  doctr/models/utils/tensorflow.py,sha256=VccfK6cyPoWuWGWAWbl17MkLa1srtHU-HJCbb_MXePw,6322
146
149
  doctr/transforms/__init__.py,sha256=0VHbvUZ7llFd1e_7_JdWTaxYMCvaR6KbUJaYJequmQI,23
147
150
  doctr/transforms/functional/__init__.py,sha256=FBZ2YT2Cq3mj6vpDC3ff5TcMpagNWFhwxQ_brdsgBqo,172
148
- doctr/transforms/functional/base.py,sha256=mFb2XGEX6g-36k_FSMhjj1MvW4ZXaqxv1GROCSc1cUg,6969
149
- doctr/transforms/functional/pytorch.py,sha256=aonwJAzPECgYeEWiigAQfbJHP8my_60Ul0x8LGdD-TI,5015
151
+ doctr/transforms/functional/base.py,sha256=c2PYwU4ZDDDwTGqgaIdOTP71XZ7lo458yc3CimYxiWQ,6943
152
+ doctr/transforms/functional/pytorch.py,sha256=itjgP7LgIdnPNbIjPqtrZHcxGAlTFg8NP8yBtg9bEco,5007
150
153
  doctr/transforms/functional/tensorflow.py,sha256=35dYnCtA9A9SvjndEvckxD8rK_uZ1_4BTgBZ7WiBtGI,9959
151
154
  doctr/transforms/modules/__init__.py,sha256=a4GXc5YZWt26eeBKo2HqLmbDn1_qo-uko6GoPNrniC0,221
152
- doctr/transforms/modules/base.py,sha256=_WboS3OoaM3yVBEApGG36RE61v8rzgSKZOSSsdVXgOU,9126
153
- doctr/transforms/modules/pytorch.py,sha256=sPJDRoAgfd9XSjI7DKV-3uZrcD-t6TH4jvL4Mi5yBP4,8606
154
- doctr/transforms/modules/tensorflow.py,sha256=5rQ_NcerIlsUWdFicApbDOdvmKZDD9bbojvXuWBOTTE,17959
155
+ doctr/transforms/modules/base.py,sha256=fwaXQhjuR514-fl4FqVZnb_NsOxkRtE8Yh_hiE2uCTU,9970
156
+ doctr/transforms/modules/pytorch.py,sha256=RCQ1MT9M4bipp5ghIQnxArvg7SjnT7xzHgShtlZmJcA,10937
157
+ doctr/transforms/modules/tensorflow.py,sha256=z1bR_Qk_QKSqeJByKrMTUsJyEekuCjr-ik8NO66nyyo,20348
155
158
  doctr/utils/__init__.py,sha256=uQY9ibZ24V896fmihIsK23QOIZdKtk0HyKoCVJ_lLuM,95
156
159
  doctr/utils/common_types.py,sha256=KXG-4mvL1MPmkrjuhCs8vAfiaBmdGRmt2yQcNlgALM8,584
157
160
  doctr/utils/data.py,sha256=26iN_Ra1OJD_LHIEbefADMxU2yVtCpu3gYdhCW5K9B4,4280
158
- doctr/utils/fonts.py,sha256=Ugjac4WPEJLsAf4U8j0f6DIoOpER_w13jHZ_GyvD0Xs,1224
159
- doctr/utils/geometry.py,sha256=Cfdw0kdH_K3qFMoioGlKdDgrRhgD2DhxXjy_lhIbpVQ,15685
160
- doctr/utils/metrics.py,sha256=hYRRlIW-e8onLPsYvnJL9HzBtwZT3x-p_yu52INz4uw,25935
161
+ doctr/utils/fonts.py,sha256=QqtfTDNCEKPb1drUbpXEhVDxtHWhKCKcGHC1l_t2_iI,1336
162
+ doctr/utils/geometry.py,sha256=zMgWMshx5uUqqUPgv8DJGAso96qa7yHu_0UnbzqsGOA,16459
163
+ doctr/utils/metrics.py,sha256=rWZUZGDhIaR422riqH3E5Mhvy_D6QwjKIz55PnILWlI,20552
161
164
  doctr/utils/multithreading.py,sha256=iEM6o_qjutH-CxFTz7K1VQseYpVaHH3Hpw_yNDoQBSw,1989
165
+ doctr/utils/reconstitution.py,sha256=O-AaibQRlfhKxCAiqd_lYhXzgoRtFMQgdRwCtuQU1fI,4770
162
166
  doctr/utils/repr.py,sha256=3GdMquo1NtwNkQPoB-nmDm_AFmU3sLc4T3VfGck9uoQ,2111
163
- doctr/utils/visualization.py,sha256=iIO6mEqqVKvkxGpDQJomJmGeplCxAuwuS8Vur0vEtYg,17758
164
- python_doctr-0.8.0.dist-info/LICENSE,sha256=75RTSsXOsAYhGpxsHc9U41ep6GS7vrUPufeekgoeOXM,11336
165
- python_doctr-0.8.0.dist-info/METADATA,sha256=x22K1JVru3iqq9PwP6U-j1ZebiG_Y9VuUsjqX0kguws,32631
166
- python_doctr-0.8.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
167
- python_doctr-0.8.0.dist-info/top_level.txt,sha256=lCgp4pmjPI3HYph62XhfzA3jRwM715kGtJPmqIUJ9t8,6
168
- python_doctr-0.8.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
169
- python_doctr-0.8.0.dist-info/RECORD,,
167
+ doctr/utils/visualization.py,sha256=L6UXyxecH2NVSA_u-OL0_TJ0HGLD5ROAyEaL59I7buI,13277
168
+ python_doctr-0.9.0.dist-info/LICENSE,sha256=75RTSsXOsAYhGpxsHc9U41ep6GS7vrUPufeekgoeOXM,11336
169
+ python_doctr-0.9.0.dist-info/METADATA,sha256=KU5hLGiNSLrBVfS7SBrgNkr4vGIB-OSXaU-nPQf8fRM,33351
170
+ python_doctr-0.9.0.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
171
+ python_doctr-0.9.0.dist-info/top_level.txt,sha256=lCgp4pmjPI3HYph62XhfzA3jRwM715kGtJPmqIUJ9t8,6
172
+ python_doctr-0.9.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
173
+ python_doctr-0.9.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.42.0)
2
+ Generator: setuptools (72.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,2 +0,0 @@
1
- from .barcode import *
2
- from .face import *