paddlex 3.0.2__py3-none-any.whl → 3.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
paddlex/.version CHANGED
@@ -1 +1 @@
1
- 3.0.2
1
+ 3.0.3
@@ -337,9 +337,11 @@ class BasePredictor(
337
337
  pp_option = PaddlePredictorOption(model_name=self.model_name)
338
338
  elif pp_option.model_name is None:
339
339
  pp_option.model_name = self.model_name
340
+ pp_option.reset_run_mode_by_default(model_name=self.model_name)
340
341
  if device_info:
341
342
  pp_option.device_type = device_info[0]
342
343
  pp_option.device_id = device_info[1]
344
+ pp_option.reset_run_mode_by_default(device_type=device_info[0])
343
345
  hpi_info = self.get_hpi_info()
344
346
  if hpi_info is not None:
345
347
  hpi_info = hpi_info.model_dump(exclude_unset=True)
@@ -687,6 +687,8 @@ class HPInfer(StaticInfer):
687
687
  return PaddleInfer(self._model_dir, self._model_file_prefix, option=pp_option)
688
688
 
689
689
  def _build_ui_runtime(self, backend, backend_config, ui_option=None):
690
+ # TODO: Validate the compatibility of backends with device types
691
+
690
692
  from ultra_infer import ModelFormat, Runtime, RuntimeOption
691
693
 
692
694
  if ui_option is None:
@@ -435,8 +435,8 @@ class LayoutParsingResultV2(BaseCVResult, HtmlMixin, XlsxMixin, MarkdownMixin):
435
435
 
436
436
  markdown_content = ""
437
437
  last_label = None
438
- seg_start_flag = None
439
- seg_end_flag = None
438
+ seg_start_flag = True
439
+ seg_end_flag = True
440
440
  prev_block = None
441
441
  page_first_element_seg_start_flag = None
442
442
  page_last_element_seg_end_flag = None
@@ -468,6 +468,11 @@ class LayoutParsingResultV2(BaseCVResult, HtmlMixin, XlsxMixin, MarkdownMixin):
468
468
  else handle_func(block)
469
469
  )
470
470
  last_label = label
471
+ page_first_element_seg_start_flag = (
472
+ True
473
+ if page_first_element_seg_start_flag is None
474
+ else page_first_element_seg_start_flag
475
+ )
471
476
  page_last_element_seg_end_flag = seg_end_flag
472
477
 
473
478
  markdown_info["markdown_texts"] = markdown_content
@@ -114,6 +114,7 @@ class PipelineWrapper(Generic[PipelineT]):
114
114
  if not self._closed:
115
115
  self._queue.put(None)
116
116
  await call_async(self._thread.join)
117
+ self._closed = True
117
118
 
118
119
  def _worker(self):
119
120
  while not self._closed:
@@ -18,6 +18,7 @@ import io
18
18
  import mimetypes
19
19
  import re
20
20
  import tempfile
21
+ import threading
21
22
  import uuid
22
23
  from functools import partial
23
24
  from typing import Awaitable, Callable, List, Optional, Tuple, TypeVar, Union, overload
@@ -176,29 +177,33 @@ def base64_encode(data: bytes) -> str:
176
177
  return base64.b64encode(data).decode("ascii")
177
178
 
178
179
 
180
+ _lock = threading.Lock()
181
+
182
+
179
183
  @function_requires_deps("pypdfium2", "opencv-contrib-python")
180
184
  def read_pdf(
181
185
  bytes_: bytes, max_num_imgs: Optional[int] = None
182
186
  ) -> Tuple[List[np.ndarray], PDFInfo]:
183
187
  images: List[np.ndarray] = []
184
188
  page_info_list: List[PDFPageInfo] = []
185
- doc = pdfium.PdfDocument(bytes_)
186
- for page in doc:
187
- if max_num_imgs is not None and len(images) >= max_num_imgs:
188
- break
189
- # TODO: Do not always use zoom=2.0
190
- zoom = 2.0
191
- deg = 0
192
- image = page.render(scale=zoom, rotation=deg).to_pil()
193
- image = image.convert("RGB")
194
- image = np.array(image)
195
- image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
196
- images.append(image)
197
- page_info = PDFPageInfo(
198
- width=image.shape[1],
199
- height=image.shape[0],
200
- )
201
- page_info_list.append(page_info)
189
+ with _lock:
190
+ doc = pdfium.PdfDocument(bytes_)
191
+ for page in doc:
192
+ if max_num_imgs is not None and len(images) >= max_num_imgs:
193
+ break
194
+ # TODO: Do not always use zoom=2.0
195
+ zoom = 2.0
196
+ deg = 0
197
+ image = page.render(scale=zoom, rotation=deg).to_pil()
198
+ image = image.convert("RGB")
199
+ image = np.array(image)
200
+ image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
201
+ images.append(image)
202
+ page_info = PDFPageInfo(
203
+ width=image.shape[1],
204
+ height=image.shape[0],
205
+ )
206
+ page_info_list.append(page_info)
202
207
  pdf_info = PDFInfo(
203
208
  numPages=len(page_info_list),
204
209
  pages=page_info_list,
@@ -132,13 +132,25 @@ def suggest_inference_backend_and_config(
132
132
  available_backends = []
133
133
  if "paddle" in model_paths:
134
134
  available_backends.append("paddle")
135
- if is_built_with_openvino() and is_onnx_model_available:
135
+ if (
136
+ is_built_with_openvino()
137
+ and is_onnx_model_available
138
+ and hpi_config.device_type == "cpu"
139
+ ):
136
140
  available_backends.append("openvino")
137
- if is_built_with_ort() and is_onnx_model_available:
141
+ if (
142
+ is_built_with_ort()
143
+ and is_onnx_model_available
144
+ and hpi_config.device_type in ("cpu", "gpu")
145
+ ):
138
146
  available_backends.append("onnxruntime")
139
- if is_built_with_trt() and is_onnx_model_available:
147
+ if (
148
+ is_built_with_trt()
149
+ and is_onnx_model_available
150
+ and hpi_config.device_type == "gpu"
151
+ ):
140
152
  available_backends.append("tensorrt")
141
- if is_built_with_om() and "om" in model_paths:
153
+ if is_built_with_om() and "om" in model_paths and hpi_config.device_type == "npu":
142
154
  available_backends.append("om")
143
155
 
144
156
  if not available_backends:
@@ -188,20 +200,21 @@ def suggest_inference_backend_and_config(
188
200
  hpi_config.pdx_model_name
189
201
  ].copy()
190
202
 
191
- if not is_mkldnn_available():
192
- if "paddle_mkldnn" in supported_pseudo_backends:
193
- supported_pseudo_backends.remove("paddle_mkldnn")
203
+ if not (is_mkldnn_available() and hpi_config.device_type == "cpu"):
204
+ for pb in supported_pseudo_backends[:]:
205
+ if pb.startswith("paddle_mkldnn"):
206
+ supported_pseudo_backends.remove(pb)
194
207
 
195
208
  # XXX
196
209
  if not (
197
210
  USE_PIR_TRT
198
211
  and importlib.util.find_spec("tensorrt")
199
212
  and ctypes.util.find_library("nvinfer")
213
+ and hpi_config.device_type == "gpu"
200
214
  ):
201
- if "paddle_tensorrt" in supported_pseudo_backends:
202
- supported_pseudo_backends.remove("paddle_tensorrt")
203
- if "paddle_tensorrt_fp16" in supported_pseudo_backends:
204
- supported_pseudo_backends.remove("paddle_tensorrt_fp16")
215
+ for pb in supported_pseudo_backends[:]:
216
+ if pb.startswith("paddle_tensorrt"):
217
+ supported_pseudo_backends.remove(pb)
205
218
 
206
219
  supported_backends = []
207
220
  backend_to_pseudo_backends = defaultdict(list)
@@ -227,12 +240,27 @@ def suggest_inference_backend_and_config(
227
240
  f"{repr(hpi_config.backend)} is not a supported inference backend.",
228
241
  )
229
242
  suggested_backend = hpi_config.backend
230
- pseudo_backends = backend_to_pseudo_backends[suggested_backend]
231
- pseudo_backend = pseudo_backends[0]
232
243
  else:
233
244
  # Prefer the first one.
234
245
  suggested_backend = supported_backends[0]
235
- pseudo_backend = supported_pseudo_backends[0]
246
+
247
+ pseudo_backends = backend_to_pseudo_backends[suggested_backend]
248
+
249
+ if hpi_config.backend_config is not None:
250
+ requested_base_pseudo_backend = None
251
+ if suggested_backend == "paddle":
252
+ if "run_mode" in hpi_config.backend_config:
253
+ if hpi_config.backend_config["run_mode"].startswith("mkldnn"):
254
+ requested_base_pseudo_backend = "paddle_mkldnn"
255
+ elif hpi_config.backend_config["run_mode"].startswith("trt"):
256
+ requested_base_pseudo_backend = "paddle_tensorrt"
257
+ if requested_base_pseudo_backend:
258
+ for pb in pseudo_backends:
259
+ if pb.startswith(requested_base_pseudo_backend):
260
+ break
261
+ else:
262
+ return None, "Unsupported backend configuration."
263
+ pseudo_backend = pseudo_backends[0]
236
264
 
237
265
  suggested_backend_config = {}
238
266
  if suggested_backend == "paddle":
@@ -1992,7 +1992,6 @@
1992
1992
  "onnxruntime"
1993
1993
  ],
1994
1994
  "PP-OCRv4_server_seal_det": [
1995
- "paddle_tensorrt",
1996
1995
  "tensorrt",
1997
1996
  "onnxruntime",
1998
1997
  "paddle"
@@ -2094,7 +2093,6 @@
2094
2093
  "onnxruntime"
2095
2094
  ],
2096
2095
  "PP-OCRv4_server_det": [
2097
- "paddle_tensorrt_fp16",
2098
2096
  "tensorrt",
2099
2097
  "onnxruntime",
2100
2098
  "paddle"
@@ -69,6 +69,7 @@ class PaddlePredictorOption(object):
69
69
 
70
70
  def __init__(self, model_name=None, **kwargs):
71
71
  super().__init__()
72
+ self._is_default_run_mode = True
72
73
  self._model_name = model_name
73
74
  self._cfg = {}
74
75
  self._init_option(**kwargs)
@@ -106,6 +107,10 @@ class PaddlePredictorOption(object):
106
107
  raise Exception(
107
108
  f"{k} is not supported to set! The supported option is: {self._get_settable_attributes()}"
108
109
  )
110
+
111
+ if "run_mode" in self._cfg:
112
+ self._is_default_run_mode = False
113
+
109
114
  for k, v in self._get_default_config().items():
110
115
  self._cfg.setdefault(k, v)
111
116
 
@@ -122,12 +127,16 @@ class PaddlePredictorOption(object):
122
127
 
123
128
  def _get_default_config(self):
124
129
  """get default config"""
125
- device_type, device_ids = parse_device(get_default_device())
130
+ if self.device_type is None:
131
+ device_type, device_ids = parse_device(get_default_device())
132
+ device_id = None if device_ids is None else device_ids[0]
133
+ else:
134
+ device_type, device_id = self.device_type, self.device_id
126
135
 
127
136
  default_config = {
128
137
  "run_mode": get_default_run_mode(self.model_name, device_type),
129
138
  "device_type": device_type,
130
- "device_id": None if device_ids is None else device_ids[0],
139
+ "device_id": device_id,
131
140
  "cpu_threads": 8,
132
141
  "delete_pass": [],
133
142
  "enable_new_ir": True if self.model_name not in NEWIR_BLOCKLIST else False,
@@ -148,9 +157,15 @@ class PaddlePredictorOption(object):
148
157
  self._cfg[k] = v
149
158
  self.changed = True
150
159
 
160
+ def reset_run_mode_by_default(self, model_name=None, device_type=None):
161
+ if self._is_default_run_mode:
162
+ model_name = model_name or self.model_name
163
+ device_type = device_type or self.device_type
164
+ self._update("run_mode", get_default_run_mode(model_name, device_type))
165
+
151
166
  @property
152
167
  def run_mode(self):
153
- return self._cfg["run_mode"]
168
+ return self._cfg.get("run_mode")
154
169
 
155
170
  @run_mode.setter
156
171
  def run_mode(self, run_mode: str):
@@ -189,11 +204,12 @@ class PaddlePredictorOption(object):
189
204
  )
190
205
  run_mode = "paddle"
191
206
 
207
+ self._is_default_run_mode = False
192
208
  self._update("run_mode", run_mode)
193
209
 
194
210
  @property
195
211
  def device_type(self):
196
- return self._cfg["device_type"]
212
+ return self._cfg.get("device_type")
197
213
 
198
214
  @device_type.setter
199
215
  def device_type(self, device_type):
@@ -211,7 +227,7 @@ class PaddlePredictorOption(object):
211
227
 
212
228
  @property
213
229
  def device_id(self):
214
- return self._cfg["device_id"]
230
+ return self._cfg.get("device_id")
215
231
 
216
232
  @device_id.setter
217
233
  def device_id(self, device_id):
@@ -219,7 +235,7 @@ class PaddlePredictorOption(object):
219
235
 
220
236
  @property
221
237
  def cpu_threads(self):
222
- return self._cfg["cpu_threads"]
238
+ return self._cfg.get("cpu_threads")
223
239
 
224
240
  @cpu_threads.setter
225
241
  def cpu_threads(self, cpu_threads):
@@ -230,7 +246,7 @@ class PaddlePredictorOption(object):
230
246
 
231
247
  @property
232
248
  def delete_pass(self):
233
- return self._cfg["delete_pass"]
249
+ return self._cfg.get("delete_pass")
234
250
 
235
251
  @delete_pass.setter
236
252
  def delete_pass(self, delete_pass):
@@ -238,7 +254,7 @@ class PaddlePredictorOption(object):
238
254
 
239
255
  @property
240
256
  def enable_new_ir(self):
241
- return self._cfg["enable_new_ir"]
257
+ return self._cfg.get("enable_new_ir")
242
258
 
243
259
  @enable_new_ir.setter
244
260
  def enable_new_ir(self, enable_new_ir: bool):
@@ -247,7 +263,7 @@ class PaddlePredictorOption(object):
247
263
 
248
264
  @property
249
265
  def enable_cinn(self):
250
- return self._cfg["enable_cinn"]
266
+ return self._cfg.get("enable_cinn")
251
267
 
252
268
  @enable_cinn.setter
253
269
  def enable_cinn(self, enable_cinn: bool):
@@ -256,7 +272,7 @@ class PaddlePredictorOption(object):
256
272
 
257
273
  @property
258
274
  def trt_cfg_setting(self):
259
- return self._cfg["trt_cfg_setting"]
275
+ return self._cfg.get("trt_cfg_setting")
260
276
 
261
277
  @trt_cfg_setting.setter
262
278
  def trt_cfg_setting(self, config: Dict):
@@ -268,7 +284,7 @@ class PaddlePredictorOption(object):
268
284
 
269
285
  @property
270
286
  def trt_use_dynamic_shapes(self):
271
- return self._cfg["trt_use_dynamic_shapes"]
287
+ return self._cfg.get("trt_use_dynamic_shapes")
272
288
 
273
289
  @trt_use_dynamic_shapes.setter
274
290
  def trt_use_dynamic_shapes(self, trt_use_dynamic_shapes):
@@ -276,7 +292,7 @@ class PaddlePredictorOption(object):
276
292
 
277
293
  @property
278
294
  def trt_collect_shape_range_info(self):
279
- return self._cfg["trt_collect_shape_range_info"]
295
+ return self._cfg.get("trt_collect_shape_range_info")
280
296
 
281
297
  @trt_collect_shape_range_info.setter
282
298
  def trt_collect_shape_range_info(self, trt_collect_shape_range_info):
@@ -284,7 +300,7 @@ class PaddlePredictorOption(object):
284
300
 
285
301
  @property
286
302
  def trt_discard_cached_shape_range_info(self):
287
- return self._cfg["trt_discard_cached_shape_range_info"]
303
+ return self._cfg.get("trt_discard_cached_shape_range_info")
288
304
 
289
305
  @trt_discard_cached_shape_range_info.setter
290
306
  def trt_discard_cached_shape_range_info(self, trt_discard_cached_shape_range_info):
@@ -294,7 +310,7 @@ class PaddlePredictorOption(object):
294
310
 
295
311
  @property
296
312
  def trt_dynamic_shapes(self):
297
- return self._cfg["trt_dynamic_shapes"]
313
+ return self._cfg.get("trt_dynamic_shapes")
298
314
 
299
315
  @trt_dynamic_shapes.setter
300
316
  def trt_dynamic_shapes(self, trt_dynamic_shapes: Dict[str, List[List[int]]]):
@@ -305,7 +321,7 @@ class PaddlePredictorOption(object):
305
321
 
306
322
  @property
307
323
  def trt_dynamic_shape_input_data(self):
308
- return self._cfg["trt_dynamic_shape_input_data"]
324
+ return self._cfg.get("trt_dynamic_shape_input_data")
309
325
 
310
326
  @trt_dynamic_shape_input_data.setter
311
327
  def trt_dynamic_shape_input_data(
@@ -315,7 +331,7 @@ class PaddlePredictorOption(object):
315
331
 
316
332
  @property
317
333
  def trt_shape_range_info_path(self):
318
- return self._cfg["trt_shape_range_info_path"]
334
+ return self._cfg.get("trt_shape_range_info_path")
319
335
 
320
336
  @trt_shape_range_info_path.setter
321
337
  def trt_shape_range_info_path(self, trt_shape_range_info_path: str):
@@ -324,7 +340,7 @@ class PaddlePredictorOption(object):
324
340
 
325
341
  @property
326
342
  def trt_allow_rebuild_at_runtime(self):
327
- return self._cfg["trt_allow_rebuild_at_runtime"]
343
+ return self._cfg.get("trt_allow_rebuild_at_runtime")
328
344
 
329
345
  @trt_allow_rebuild_at_runtime.setter
330
346
  def trt_allow_rebuild_at_runtime(self, trt_allow_rebuild_at_runtime):
@@ -332,7 +348,7 @@ class PaddlePredictorOption(object):
332
348
 
333
349
  @property
334
350
  def mkldnn_cache_capacity(self):
335
- return self._cfg["mkldnn_cache_capacity"]
351
+ return self._cfg.get("mkldnn_cache_capacity")
336
352
 
337
353
  @mkldnn_cache_capacity.setter
338
354
  def mkldnn_cache_capacity(self, capacity: int):
paddlex/utils/download.py CHANGED
@@ -39,14 +39,14 @@ class _ProgressPrinter(object):
39
39
  str_ += "\n"
40
40
  self._last_time = 0
41
41
  if time.time() - self._last_time >= self._flush_intvl:
42
- sys.stdout.write(f"\r{str_}")
42
+ sys.stderr.write(f"\r{str_}")
43
43
  self._last_time = time.time()
44
- sys.stdout.flush()
44
+ sys.stderr.flush()
45
45
 
46
46
 
47
47
  def _download(url, save_path, print_progress):
48
48
  if print_progress:
49
- print(f"Connecting to {url} ...")
49
+ print(f"Connecting to {url} ...", file=sys.stderr)
50
50
 
51
51
  with requests.get(url, stream=True, timeout=15) as r:
52
52
  r.raise_for_status()
@@ -62,7 +62,10 @@ def _download(url, save_path, print_progress):
62
62
  total_length = int(total_length)
63
63
  if print_progress:
64
64
  printer = _ProgressPrinter()
65
- print(f"Downloading {os.path.basename(save_path)} ...")
65
+ print(
66
+ f"Downloading {os.path.basename(save_path)} ...",
67
+ file=sys.stderr,
68
+ )
66
69
  for data in r.iter_content(chunk_size=4096):
67
70
  dl += len(data)
68
71
  f.write(data)
@@ -95,17 +98,17 @@ def _extract_tar_file(file_path, extd_dir):
95
98
  try:
96
99
  f.extract(file, extd_dir)
97
100
  except KeyError:
98
- print(f"File {file} not found in the archive.")
101
+ print(f"File {file} not found in the archive.", file=sys.stderr)
99
102
  yield total_num, index
100
103
  except Exception as e:
101
- print(f"An error occurred: {e}")
104
+ print(f"An error occurred: {e}", file=sys.stderr)
102
105
 
103
106
 
104
107
  def _extract(file_path, extd_dir, print_progress):
105
108
  """extract"""
106
109
  if print_progress:
107
110
  printer = _ProgressPrinter()
108
- print(f"Extracting {os.path.basename(file_path)}")
111
+ print(f"Extracting {os.path.basename(file_path)}", file=sys.stderr)
109
112
 
110
113
  if zipfile.is_zipfile(file_path):
111
114
  handler = _extract_zip_file
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: paddlex
3
- Version: 3.0.2
3
+ Version: 3.0.3
4
4
  Summary: Low-code development tool based on PaddlePaddle.
5
5
  Home-page: UNKNOWN
6
6
  Author: PaddlePaddle Authors
@@ -1,4 +1,4 @@
1
- paddlex/.version,sha256=P8C_jX3b_nfrQHTAsD1lZ9eHSiXqQUSq3qZob81fb-s,6
1
+ paddlex/.version,sha256=22W3tlP3wzosH5_TgfX74ImjifBjVngxeFuvSy4Ul-k,6
2
2
  paddlex/__init__.py,sha256=gfGHiM_wthbQaWWyZ_Xwb7VwA4YxhJa_E7vsxbD7tVc,1703
3
3
  paddlex/__main__.py,sha256=9HXLNHbXfczSIc0uYKqD1cHpsjs86xLmiVoy0cB_aiI,1290
4
4
  paddlex/constants.py,sha256=s7As1cvezO0hsuKzkRo1XX5gTIc6Srx0mZqpR6v4H04,680
@@ -344,9 +344,9 @@ paddlex/inference/models/anomaly_detection/processors.py,sha256=OhPoGXkF5Wr1AFSe
344
344
  paddlex/inference/models/anomaly_detection/result.py,sha256=vEqBIw9cOgBZzBuoniao4AztdSkiAuqMJEGHQevMz4M,2368
345
345
  paddlex/inference/models/base/__init__.py,sha256=XmfOH2Zt6T28bilPm9aHPy88zlthnpz8zufxQuHU2nI,647
346
346
  paddlex/inference/models/base/predictor/__init__.py,sha256=euD01o3s0Zg9VlzA5spCak2TElkU4RFiSUxCEiatH8Y,652
347
- paddlex/inference/models/base/predictor/base_predictor.py,sha256=GZXWaOjLk0AAp7PKdbeTbNBGi0oTjqES2lkVLvJEeqg,14728
347
+ paddlex/inference/models/base/predictor/base_predictor.py,sha256=DWfmZDFgZkEUbdXBujFHQxL5bRgFFLkMvH3JxJHSdzc,14880
348
348
  paddlex/inference/models/common/__init__.py,sha256=3lMwinLaX3HHXCFUcppc8uV_5P-XGv0Nf5aWvZYcbqw,926
349
- paddlex/inference/models/common/static_infer.py,sha256=Kas7xfx_rKEqWKMPHGMJRJOd-yddrYsjl--CmlMAX9k,33945
349
+ paddlex/inference/models/common/static_infer.py,sha256=JwCnWYNqjWC3zc27E9KwAm5ZvlfITASY5Zpm-P-MFk8,34019
350
350
  paddlex/inference/models/common/tokenizer/__init__.py,sha256=WyNKDrT6xhWwHmgVVaWYig_TV2xOQIVUHdsmkKjLjs4,940
351
351
  paddlex/inference/models/common/tokenizer/bert_tokenizer.py,sha256=xPEENMCnZq-0ofMA5Ylxu9qcFybIc6fEOXqEue3ogDU,25274
352
352
  paddlex/inference/models/common/tokenizer/clip_tokenizer.py,sha256=g9YU0PmxppCoIi9mub_3AQDBt8kzrBblKrDig5UDTZE,22426
@@ -543,7 +543,7 @@ paddlex/inference/pipelines/layout_parsing/layout_objects.py,sha256=Qo9qu780UPX9
543
543
  paddlex/inference/pipelines/layout_parsing/pipeline.py,sha256=Qmqtf0Et30CFnNxNDtGiMGSYGxNhUezSZKIsyAloTiA,25847
544
544
  paddlex/inference/pipelines/layout_parsing/pipeline_v2.py,sha256=9SSs8NtgCM79YPrWIXbomROBK1IhUemZpoRWCwAUQhY,60939
545
545
  paddlex/inference/pipelines/layout_parsing/result.py,sha256=R6wHENrYfrAbJfwZ-bi-_ha4EaU4hzsRNuwqKiH9Mhc,8688
546
- paddlex/inference/pipelines/layout_parsing/result_v2.py,sha256=4-VtKZS5iIPFl9-9306KpN-Ml_JWTKGGV9t1tzBTCoE,18722
546
+ paddlex/inference/pipelines/layout_parsing/result_v2.py,sha256=IZdn7N64s6DQc3eftxiDhRiBn8nAXMa22KJDFhHq8lk,18903
547
547
  paddlex/inference/pipelines/layout_parsing/setting.py,sha256=k3X-IRYbj1QIH9WPpn7TjogUYRYnskx__Z2kdp-iU9Y,2451
548
548
  paddlex/inference/pipelines/layout_parsing/utils.py,sha256=7jCatSn08b_ftLX0-1VKrzdQw1a4Xl3pum1i90o0L0I,27062
549
549
  paddlex/inference/pipelines/layout_parsing/xycut_enhanced/__init__.py,sha256=VG_OmZclRXlQrnMEm7Ovu4Va31zWaQ1S1Tbqr-_-dvQ,653
@@ -597,7 +597,7 @@ paddlex/inference/pipelines/video_detection/__init__.py,sha256=5fwogRCREggvQgge-
597
597
  paddlex/inference/pipelines/video_detection/pipeline.py,sha256=k-QJZl2t-J2oboDfeWZ812P8TqVM1T3Lb4KsRV9xHrQ,3391
598
598
  paddlex/inference/serving/__init__.py,sha256=toJfQp9IogLuJwLzprv8lieB-EERPqU4B0FJv8ZoNec,685
599
599
  paddlex/inference/serving/basic_serving/__init__.py,sha256=_NwYgeYL6HTpLWAxf7aRwRP-rBfP-Fsaff3vZe0GUkg,739
600
- paddlex/inference/serving/basic_serving/_app.py,sha256=xgvF7P29bDJKcoETvVoxD-ccwcX24CsfmByErrjAa4o,8598
600
+ paddlex/inference/serving/basic_serving/_app.py,sha256=30XG3-E5ay7GZQB2-6A2xY3tWV-WqC0fa0FptJaAndQ,8630
601
601
  paddlex/inference/serving/basic_serving/_server.py,sha256=c_nqZFecjAIQxw2huCKzLWZ_BSO_pFh9xaUSGZ1WrTg,1484
602
602
  paddlex/inference/serving/basic_serving/_pipeline_apps/__init__.py,sha256=z5_SMYmGTeBJTCwbipHBzuitWjraIEpy63y4od9Wtbg,1707
603
603
  paddlex/inference/serving/basic_serving/_pipeline_apps/anomaly_detection.py,sha256=9eDaXVlXC5sEvmBGDvtE4Lv6bz0riZvT48nq7ccFCAE,2374
@@ -641,7 +641,7 @@ paddlex/inference/serving/infra/__init__.py,sha256=lEGWWikF7G1wkk5M--CU2QxJaqbTd
641
641
  paddlex/inference/serving/infra/config.py,sha256=-6StYn1pF_VQdCWsthZ5b86O-m31ye_KWp7j-mwc9N8,1152
642
642
  paddlex/inference/serving/infra/models.py,sha256=KXxvV39dGzrbGeHDacZMGvtlcKPF4PEkjtuov0Vt9y4,1980
643
643
  paddlex/inference/serving/infra/storage.py,sha256=8Pw5mhvornnF2NnwOblRX5N_FGMrCvCkz0WfmQX7bXM,5379
644
- paddlex/inference/serving/infra/utils.py,sha256=8Vz3xYVcoZ_QEJAHlhGEakfojqvvZuHEi0tCBfgklIY,8457
644
+ paddlex/inference/serving/infra/utils.py,sha256=l-VMB2U3hau30O4JE6OKYpyjXOzF2PWLbUgWwo1FK2E,8585
645
645
  paddlex/inference/serving/schemas/__init__.py,sha256=lEGWWikF7G1wkk5M--CU2QxJaqbTdD__ML8ZJiKyVI4,609
646
646
  paddlex/inference/serving/schemas/anomaly_detection.py,sha256=407q-QdNYiCHSjuGnbnndoFK6CXWN4Y8HfzVaIzE_8U,1189
647
647
  paddlex/inference/serving/schemas/doc_preprocessor.py,sha256=rCYnssIaIpnulPGFRTE15U5zl7l6ylWzqpMJl1QEvVI,1630
@@ -685,14 +685,14 @@ paddlex/inference/utils/__init__.py,sha256=lEGWWikF7G1wkk5M--CU2QxJaqbTdD__ML8ZJ
685
685
  paddlex/inference/utils/benchmark.py,sha256=l2ATIuPwAd1uD4rZgADbt4HKEiobeFLm3P-bIbKtD4s,12932
686
686
  paddlex/inference/utils/color_map.py,sha256=mU1FdhHuyiJBn_thBkSndiMJtfkn3i3rLtPAPNPFid0,2953
687
687
  paddlex/inference/utils/get_pipeline_path.py,sha256=LCjXho0ix4jgG-sJ1DyKIAFfC8Yv2cj6hBlA3eOFJIo,985
688
- paddlex/inference/utils/hpi.py,sha256=Ways_Jq68cv_IqScjsy-6XEmPDlPQFtWScuywqBbjSE,9580
689
- paddlex/inference/utils/hpi_model_info_collection.json,sha256=ysbGt7uZ6QftLdY1uQiUqkjCcdR1UbiGq2Ah0CXaNaE,51795
688
+ paddlex/inference/utils/hpi.py,sha256=q5AcymBdbui9R2ExXEku4L1O3MiksSpK7gyFN5kqvEI,10514
689
+ paddlex/inference/utils/hpi_model_info_collection.json,sha256=dcbnWTTWqCcjT0NMGPYK7IEUJ07A2Id4vokP2ZaUgV0,51740
690
690
  paddlex/inference/utils/misc.py,sha256=qFrRum5-0jxue1RuLkF3KOFZdRCn3Yoh2DERb_cHfAo,812
691
691
  paddlex/inference/utils/mkldnn_blocklist.py,sha256=pYSebu-UYAU4XnTC19SVWcfsvkpxLlhbvcjCZHLkHIw,1800
692
692
  paddlex/inference/utils/model_paths.py,sha256=uRsEO-uHEd4xKQZqNyU5T7Vp6QI6iYZ5anZMXXpmKAg,1803
693
693
  paddlex/inference/utils/new_ir_blocklist.py,sha256=wdN1jiRWGo-YQvzoMOLhsqDjAcqhhkwfPBZEJ3-ldAw,880
694
694
  paddlex/inference/utils/official_models.py,sha256=Wn3PxkmrB6Zk2s0YAL7P9AJIRa7Axn49Ewum9VU90KE,44631
695
- paddlex/inference/utils/pp_option.py,sha256=HZeS7y5kc3exLUQlGp5jeMx1JxsXwxuGYOdOSmfJfg0,13067
695
+ paddlex/inference/utils/pp_option.py,sha256=SZNUrTE3MrvtMZCo55CL5hdZ_3DfJCs5bOU_lJOBsoo,13769
696
696
  paddlex/inference/utils/trt_blocklist.py,sha256=vaFJs6gvkMncYKTL5R6C93Nq2eYAdeBNcmJ4-abSFa0,1465
697
697
  paddlex/inference/utils/trt_config.py,sha256=995uGRdKFyaU9EB4jLDiS85EjVLXmsCyL7gJIl3c8h0,12661
698
698
  paddlex/inference/utils/io/__init__.py,sha256=KE8WngZIEjrOPEGeN6AEwKU-yt5nReDRWIBcmiRPM5s,939
@@ -1072,7 +1072,7 @@ paddlex/utils/config.py,sha256=mMP-WHMBB98hW-paFSc8vDnR7h87UpAe6X34W6ggIlQ,6255
1072
1072
  paddlex/utils/custom_device_list.py,sha256=_9t81bfHijsjpk23_anibl8r484iYdhENsdETg0obj0,7216
1073
1073
  paddlex/utils/deps.py,sha256=HL78ZP8oX6urGDoK-KA8A0ExJvitAv7CbIAXqxzWz24,7905
1074
1074
  paddlex/utils/device.py,sha256=fETdmBa3R_8MYnrFOuM3xjul2mhXNsJR09KXdo4ZNJA,6236
1075
- paddlex/utils/download.py,sha256=CC7TTOaOFXfcWCAg6CRJ-ECMIF9QVuvWFCOe39X2dhM,6402
1075
+ paddlex/utils/download.py,sha256=rOmnb8obglI9dA7F86161xPQE5qOdbzg0LqcLEVs3wE,6558
1076
1076
  paddlex/utils/env.py,sha256=dVOm9yQgXOGd4xFOFAmBHcKMDXCq-cuv3VDilGv-iHk,1617
1077
1077
  paddlex/utils/file_interface.py,sha256=KfMl_NQIpRGxBL9bRd54OK60RkSesDBQFeMftdDlVt8,6523
1078
1078
  paddlex/utils/flags.py,sha256=ieTQVpVYsR2mSmFQFfXJT2zcMR4vxGhHfevBJiJg6ik,2890
@@ -1089,9 +1089,9 @@ paddlex/utils/errors/__init__.py,sha256=1ooQ6m7PWkmniQbq4xVMVubjBKlMbZzoIvoPlKBo
1089
1089
  paddlex/utils/errors/dataset_checker.py,sha256=hnLeqMyMLuQQVWhHsfakpcZAN4Tu2ULaqtlQ9BzC1pI,2200
1090
1090
  paddlex/utils/errors/others.py,sha256=5RwDvxq4_K4UHMFRldrbwoDMnW8mW2KATinIwac7v-4,4303
1091
1091
  paddlex/utils/fonts/__init__.py,sha256=o47S4aK6ZKt5pfCc1_paMMig4fcSwgWX4m1BN4ybQ2Q,3297
1092
- paddlex-3.0.2.dist-info/LICENSE,sha256=rMw2yBesXt5HPQVzLhSvwRy25V71kZvIJrZT9jEWUEM,11325
1093
- paddlex-3.0.2.dist-info/METADATA,sha256=l1M2CluJyeNTy7Njg1ELwzC7GHnPlUrPfGdUbrrvjNs,76648
1094
- paddlex-3.0.2.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
1095
- paddlex-3.0.2.dist-info/entry_points.txt,sha256=65F9jsc5DpDHIv2DjPIVwnTf-zSokvgExdu3HKjhdfY,60
1096
- paddlex-3.0.2.dist-info/top_level.txt,sha256=KWSxMIrEchP3dxsAjzSRR-jmnjW0YGHECxG9OA5YB_g,8
1097
- paddlex-3.0.2.dist-info/RECORD,,
1092
+ paddlex-3.0.3.dist-info/LICENSE,sha256=rMw2yBesXt5HPQVzLhSvwRy25V71kZvIJrZT9jEWUEM,11325
1093
+ paddlex-3.0.3.dist-info/METADATA,sha256=s4W-3ZVs0Tms3in_3Q-JLdmCugqc6LXvmnXYSVsevdI,76648
1094
+ paddlex-3.0.3.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
1095
+ paddlex-3.0.3.dist-info/entry_points.txt,sha256=65F9jsc5DpDHIv2DjPIVwnTf-zSokvgExdu3HKjhdfY,60
1096
+ paddlex-3.0.3.dist-info/top_level.txt,sha256=KWSxMIrEchP3dxsAjzSRR-jmnjW0YGHECxG9OA5YB_g,8
1097
+ paddlex-3.0.3.dist-info/RECORD,,