paddlex 3.0.1__py3-none-any.whl → 3.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. paddlex/.version +1 -1
  2. paddlex/inference/models/base/predictor/base_predictor.py +2 -0
  3. paddlex/inference/models/common/static_infer.py +20 -14
  4. paddlex/inference/models/common/ts/funcs.py +19 -8
  5. paddlex/inference/models/formula_recognition/predictor.py +1 -1
  6. paddlex/inference/models/formula_recognition/processors.py +2 -2
  7. paddlex/inference/models/text_recognition/result.py +1 -1
  8. paddlex/inference/pipelines/layout_parsing/layout_objects.py +859 -0
  9. paddlex/inference/pipelines/layout_parsing/pipeline_v2.py +144 -205
  10. paddlex/inference/pipelines/layout_parsing/result_v2.py +13 -272
  11. paddlex/inference/pipelines/layout_parsing/setting.py +1 -0
  12. paddlex/inference/pipelines/layout_parsing/utils.py +108 -312
  13. paddlex/inference/pipelines/layout_parsing/xycut_enhanced/utils.py +302 -247
  14. paddlex/inference/pipelines/layout_parsing/xycut_enhanced/xycuts.py +156 -104
  15. paddlex/inference/pipelines/ocr/result.py +2 -2
  16. paddlex/inference/pipelines/pp_chatocr/pipeline_v4.py +1 -1
  17. paddlex/inference/serving/basic_serving/_app.py +47 -13
  18. paddlex/inference/serving/infra/utils.py +22 -17
  19. paddlex/inference/utils/hpi.py +60 -25
  20. paddlex/inference/utils/hpi_model_info_collection.json +627 -204
  21. paddlex/inference/utils/misc.py +20 -0
  22. paddlex/inference/utils/mkldnn_blocklist.py +36 -2
  23. paddlex/inference/utils/official_models.py +126 -5
  24. paddlex/inference/utils/pp_option.py +81 -21
  25. paddlex/modules/semantic_segmentation/dataset_checker/__init__.py +12 -2
  26. paddlex/ops/__init__.py +6 -3
  27. paddlex/utils/deps.py +2 -2
  28. paddlex/utils/device.py +4 -19
  29. paddlex/utils/download.py +10 -7
  30. paddlex/utils/flags.py +9 -0
  31. paddlex/utils/subclass_register.py +2 -2
  32. {paddlex-3.0.1.dist-info → paddlex-3.0.3.dist-info}/METADATA +307 -162
  33. {paddlex-3.0.1.dist-info → paddlex-3.0.3.dist-info}/RECORD +37 -35
  34. {paddlex-3.0.1.dist-info → paddlex-3.0.3.dist-info}/WHEEL +1 -1
  35. {paddlex-3.0.1.dist-info → paddlex-3.0.3.dist-info}/entry_points.txt +1 -0
  36. {paddlex-3.0.1.dist-info/licenses → paddlex-3.0.3.dist-info}/LICENSE +0 -0
  37. {paddlex-3.0.1.dist-info → paddlex-3.0.3.dist-info}/top_level.txt +0 -0
paddlex/.version CHANGED
@@ -1 +1 @@
1
- 3.0.1
1
+ 3.0.3
@@ -337,9 +337,11 @@ class BasePredictor(
337
337
  pp_option = PaddlePredictorOption(model_name=self.model_name)
338
338
  elif pp_option.model_name is None:
339
339
  pp_option.model_name = self.model_name
340
+ pp_option.reset_run_mode_by_default(model_name=self.model_name)
340
341
  if device_info:
341
342
  pp_option.device_type = device_info[0]
342
343
  pp_option.device_id = device_info[1]
344
+ pp_option.reset_run_mode_by_default(device_type=device_info[0])
343
345
  hpi_info = self.get_hpi_info()
344
346
  if hpi_info is not None:
345
347
  hpi_info = hpi_info.model_dump(exclude_unset=True)
@@ -33,7 +33,7 @@ from ...utils.hpi import (
33
33
  suggest_inference_backend_and_config,
34
34
  )
35
35
  from ...utils.model_paths import get_model_paths
36
- from ...utils.pp_option import PaddlePredictorOption
36
+ from ...utils.pp_option import PaddlePredictorOption, get_default_run_mode
37
37
  from ...utils.trt_config import DISABLE_TRT_HALF_OPS_CONFIG
38
38
 
39
39
  CACHE_DIR = ".cache"
@@ -407,15 +407,10 @@ class PaddleInfer(StaticInfer):
407
407
  assert self._option.device_type == "cpu"
408
408
  config.disable_gpu()
409
409
  if "mkldnn" in self._option.run_mode:
410
- try:
411
- config.enable_mkldnn()
412
- if "bf16" in self._option.run_mode:
413
- config.enable_mkldnn_bfloat16()
414
- except Exception:
415
- logging.warning(
416
- "MKL-DNN is not available. We will disable MKL-DNN."
417
- )
418
- config.set_mkldnn_cache_capacity(-1)
410
+ config.enable_mkldnn()
411
+ if "bf16" in self._option.run_mode:
412
+ config.enable_mkldnn_bfloat16()
413
+ config.set_mkldnn_cache_capacity(self._option.mkldnn_cache_capacity)
419
414
  else:
420
415
  if hasattr(config, "disable_mkldnn"):
421
416
  config.disable_mkldnn()
@@ -639,10 +634,19 @@ class HPInfer(StaticInfer):
639
634
  )
640
635
  backend_config = self._config.backend_config or {}
641
636
 
642
- if backend == "paddle" and not backend_config:
643
- logging.warning(
644
- "The Paddle Inference backend is selected with the default configuration. This may not provide optimal performance."
645
- )
637
+ if backend == "paddle":
638
+ if not backend_config:
639
+ is_default_config = True
640
+ elif backend_config.keys() != {"run_mode"}:
641
+ is_default_config = False
642
+ else:
643
+ is_default_config = backend_config["run_mode"] == get_default_run_mode(
644
+ self._config.pdx_model_name, self._config.device_type
645
+ )
646
+ if is_default_config:
647
+ logging.warning(
648
+ "The Paddle Inference backend is selected with the default configuration. This may not provide optimal performance."
649
+ )
646
650
 
647
651
  return backend, backend_config
648
652
 
@@ -683,6 +687,8 @@ class HPInfer(StaticInfer):
683
687
  return PaddleInfer(self._model_dir, self._model_file_prefix, option=pp_option)
684
688
 
685
689
  def _build_ui_runtime(self, backend, backend_config, ui_option=None):
690
+ # TODO: Validate the compatibility of backends with device types
691
+
686
692
  from ultra_infer import ModelFormat, Runtime, RuntimeOption
687
693
 
688
694
  if ui_option is None:
@@ -17,10 +17,11 @@ from typing import Callable, Dict, List, Optional, Union
17
17
 
18
18
  import numpy as np
19
19
  import pandas as pd
20
+ from packaging.version import Version
20
21
  from pandas.tseries import holiday as hd
21
22
  from pandas.tseries.offsets import DateOffset, Day, Easter
22
23
 
23
- from .....utils.deps import function_requires_deps, is_dep_available
24
+ from .....utils.deps import function_requires_deps, get_dep_version, is_dep_available
24
25
 
25
26
  if is_dep_available("chinese-calendar"):
26
27
  import chinese_calendar
@@ -496,13 +497,23 @@ def time_feature(
496
497
  # Extend the time series if no known_cov_numeric
497
498
  if not kcov:
498
499
  freq = freq if freq is not None else pd.infer_freq(tf_kcov[time_col])
499
- extend_time = pd.date_range(
500
- start=tf_kcov[time_col][-1],
501
- freq=freq,
502
- periods=extend_points + 1,
503
- closed="right",
504
- name=time_col,
505
- ).to_frame()
500
+ pd_version = get_dep_version("pandas")
501
+ if Version(pd_version) >= Version("1.4"):
502
+ extend_time = pd.date_range(
503
+ start=tf_kcov[time_col][-1],
504
+ freq=freq,
505
+ periods=extend_points + 1,
506
+ inclusive="right",
507
+ name=time_col,
508
+ ).to_frame()
509
+ else:
510
+ extend_time = pd.date_range(
511
+ start=tf_kcov[time_col][-1],
512
+ freq=freq,
513
+ periods=extend_points + 1,
514
+ closed="right",
515
+ name=time_col,
516
+ ).to_frame()
506
517
  tf_kcov = pd.concat([tf_kcov, extend_time])
507
518
 
508
519
  # Extract and add time features to known_cov_numeric
@@ -136,7 +136,7 @@ class FormulaRecPredictor(BasePredictor):
136
136
  }
137
137
 
138
138
  @register("DecodeImage")
139
- def build_readimg(self, channel_first, img_mode):
139
+ def build_readimg(self, channel_first, img_mode="RGB"):
140
140
  assert channel_first == False
141
141
  return "Read", ReadImage(format=img_mode)
142
142
 
@@ -365,7 +365,7 @@ class LaTeXOCRDecode(object):
365
365
  dec = [self.tokenizer.decode(tok) for tok in tokens]
366
366
  dec_str_list = [
367
367
  "".join(detok.split(" "))
368
- .replace("", " ")
368
+ .replace("Ġ", " ")
369
369
  .replace("[EOS]", "")
370
370
  .replace("[BOS]", "")
371
371
  .replace("[PAD]", "")
@@ -798,7 +798,7 @@ class UniMERNetDecode(object):
798
798
  for i in reversed(range(len(toks[b]))):
799
799
  if toks[b][i] is None:
800
800
  toks[b][i] = ""
801
- toks[b][i] = toks[b][i].replace("", " ").strip()
801
+ toks[b][i] = toks[b][i].replace("Ġ", " ").strip()
802
802
  if toks[b][i] in (
803
803
  [
804
804
  self.tokenizer.bos_token,
@@ -35,7 +35,7 @@ class TextRecResult(BaseCVResult):
35
35
 
36
36
  def _to_img(self):
37
37
  """Draw label on image"""
38
- image = Image.fromarray(self["input_img"])
38
+ image = Image.fromarray(self["input_img"][:, :, ::-1])
39
39
  rec_text = self["rec_text"]
40
40
  rec_score = self["rec_score"]
41
41
  image = image.convert("RGB")