xparse-client 0.2.9__py3-none-any.whl → 0.2.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
example/run_pipeline.py CHANGED
@@ -84,7 +84,7 @@ def run_with_config():
84
84
 
85
85
  def run_with_manual_setup():
86
86
  """手动创建 Source、Destination 和 Pipeline"""
87
- from xparse_client import ChunkConfig, EmbedConfig, ParseConfig, Stage, PipelineConfig, LocalDestination
87
+ from xparse_client import ChunkConfig, EmbedConfig, ParseConfig, Stage, PipelineConfig, LocalDestination, QdrantDestination
88
88
 
89
89
  # 创建 S3 数据源
90
90
  # source = S3Source(
@@ -128,14 +128,14 @@ def run_with_manual_setup():
128
128
  # prefix='',
129
129
  # region='cn-east-3'
130
130
  # )
131
- # source = S3Source(
132
- # endpoint='https://s3.us-east-1.amazonaws.com',
133
- # access_key='AKIA6QUE3TVZADUWA4PO',
134
- # secret_key='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',
135
- # bucket='textin-xparse',
136
- # prefix='',
137
- # region='us-east-1'
138
- # )
131
+ source = S3Source(
132
+ endpoint='https://s3.us-east-1.amazonaws.com',
133
+ access_key='AKIA6QUE3TVZADUWA4PO',
134
+ secret_key='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',
135
+ bucket='textin-test',
136
+ prefix='',
137
+ region='us-east-1'
138
+ )
139
139
  # source = S3Source(
140
140
  # endpoint='http://127.0.0.1:9000',
141
141
  # access_key='',
@@ -158,11 +158,11 @@ def run_with_manual_setup():
158
158
  # username='', # 用户名,按照实际填写
159
159
  # password='' # 密码,按照实际填写
160
160
  # )
161
- source = LocalSource(
162
- directory='/Users/ke_wang/Documents/doc',
163
- recursive=False,
164
- pattern=['*'] # 支持通配符: *.pdf, *.docx, **/*.txt
165
- )
161
+ # source = LocalSource(
162
+ # directory='/Users/ke_wang/Documents/doc',
163
+ # recursive=True,
164
+ # pattern=['**/*.png'] # 支持通配符: *.pdf, *.docx, **/*.txt
165
+ # )
166
166
 
167
167
  # 创建 Milvus 目的地
168
168
  # destination = MilvusDestination(
@@ -175,12 +175,12 @@ def run_with_manual_setup():
175
175
  # output_dir='./result'
176
176
  # )
177
177
 
178
- destination = MilvusDestination(
179
- db_path='https://in03-5388093d0db1707.serverless.ali-cn-hangzhou.cloud.zilliz.com.cn', # zilliz连接地址
180
- collection_name='textin_test_3_copy', # 数据库collection名称
181
- dimension=1024, # 向量维度,需与 embed API 返回一致
182
- api_key='872c3f5b3f3995c80dcda5c3d34f1f608815aef7671b6ee391ab37e40e79c892ce56d9c8c6565a03a3fd66da7e11b67f384c5c46' # Zilliz Cloud API Key
183
- )
178
+ # destination = MilvusDestination(
179
+ # db_path='https://in03-5388093d0db1707.serverless.ali-cn-hangzhou.cloud.zilliz.com.cn', # zilliz连接地址
180
+ # collection_name='textin_test_3_copy', # 数据库collection名称
181
+ # dimension=1024, # 向量维度,需与 embed API 返回一致
182
+ # api_key='872c3f5b3f3995c80dcda5c3d34f1f608815aef7671b6ee391ab37e40e79c892ce56d9c8c6565a03a3fd66da7e11b67f384c5c46' # Zilliz Cloud API Key
183
+ # )
184
184
 
185
185
  # destination = S3Destination(
186
186
  # endpoint='https://cos.ap-shanghai.myqcloud.com',
@@ -190,12 +190,19 @@ def run_with_manual_setup():
190
190
  # prefix='result',
191
191
  # region='ap-shanghai'
192
192
  # )
193
+
194
+ destination = QdrantDestination(
195
+ url='https://1325db22-7dd8-4fc9-930b-f969d4963b3d.us-east-1-1.aws.cloud.qdrant.io:6333',
196
+ collection_name='textin1',
197
+ dimension=1024,
198
+ api_key='eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhY2Nlc3MiOiJtIn0.TGnFB1pAD7c7IqSOvTpgCPpHXSnnoKhWEQ5pQ8DrBnI',
199
+ )
193
200
 
194
201
  # 使用新的 stages 格式创建配置
195
202
  stages = [
196
203
  Stage(
197
204
  type='parse',
198
- config=ParseConfig(provider='textin')
205
+ config=ParseConfig(provider='paddle')
199
206
  ),
200
207
  Stage(
201
208
  type='chunk',
xparse_client/__init__.py CHANGED
@@ -10,7 +10,7 @@ logging.basicConfig(
10
10
 
11
11
  from .pipeline.config import ParseConfig, ChunkConfig, EmbedConfig, Stage, PipelineStats, PipelineConfig
12
12
  from .pipeline.sources import Source, S3Source, LocalSource, FtpSource, SmbSource
13
- from .pipeline.destinations import Destination, MilvusDestination, LocalDestination, S3Destination
13
+ from .pipeline.destinations import Destination, MilvusDestination, QdrantDestination, LocalDestination, S3Destination
14
14
  from .pipeline.pipeline import Pipeline, create_pipeline_from_config
15
15
 
16
16
  __all__ = [
@@ -27,6 +27,7 @@ __all__ = [
27
27
  'SmbSource',
28
28
  'Destination',
29
29
  'MilvusDestination',
30
+ 'QdrantDestination',
30
31
  'LocalDestination',
31
32
  'S3Destination',
32
33
  'Pipeline',
@@ -13,7 +13,8 @@ from typing import List, Dict, Any
13
13
 
14
14
  from botocore.config import Config
15
15
  from pymilvus import MilvusClient
16
-
16
+ from qdrant_client import QdrantClient
17
+ from qdrant_client.models import Distance, VectorParams, PointStruct, PayloadSchemaType
17
18
 
18
19
  logger = logging.getLogger(__name__)
19
20
 
@@ -127,7 +128,7 @@ class MilvusDestination(Destination):
127
128
  print(f" ✓ 删除现有记录: record_id={record_id}, 删除 {deleted_count} 条")
128
129
  logger.info(f"删除 Milvus 现有记录: record_id={record_id}, 删除 {deleted_count} 条")
129
130
  else:
130
- print(f" → 未找到现有记录: record_id={record_id}")
131
+ print(f" → 准备写入记录: record_id={record_id}")
131
132
  except Exception as e:
132
133
  print(f" ! 删除现有记录失败: {str(e)}")
133
134
  logger.warning(f"删除 Milvus 现有记录失败: record_id={record_id}, {str(e)}")
@@ -296,9 +297,190 @@ class S3Destination(Destination):
296
297
  return False
297
298
 
298
299
 
300
+ class QdrantDestination(Destination):
301
+ """Qdrant 向量数据库目的地"""
302
+
303
+ def __init__(self, url: str, collection_name: str, dimension: int, api_key: str = None, prefer_grpc: bool = False):
304
+ """初始化 Qdrant 目的地
305
+
306
+ Args:
307
+ url: Qdrant 服务地址(如 'http://localhost:6333' 或 'https://xxx.qdrant.io')
308
+ collection_name: Collection 名称
309
+ dimension: 向量维度
310
+ api_key: API Key(可选,用于 Qdrant Cloud)
311
+ prefer_grpc: 是否优先使用 gRPC(默认 False,使用 HTTP)
312
+ """
313
+
314
+ self.url = url
315
+ self.collection_name = collection_name
316
+ self.dimension = dimension
317
+
318
+ client_kwargs = {'url': url}
319
+ if api_key:
320
+ client_kwargs['api_key'] = api_key
321
+ if prefer_grpc:
322
+ client_kwargs['prefer_grpc'] = True
323
+
324
+ self.client = QdrantClient(**client_kwargs)
325
+
326
+ # 检查或创建 collection
327
+ try:
328
+ collections = self.client.get_collections()
329
+ collection_exists = any(col.name == collection_name for col in collections.collections)
330
+
331
+ if not collection_exists:
332
+ self.client.create_collection(
333
+ collection_name=collection_name,
334
+ vectors_config=VectorParams(
335
+ size=dimension,
336
+ distance=Distance.COSINE
337
+ )
338
+ )
339
+ # 为 record_id 创建索引,用于过滤查询
340
+ try:
341
+ self.client.create_payload_index(
342
+ collection_name=collection_name,
343
+ field_name="record_id",
344
+ field_schema=PayloadSchemaType.KEYWORD
345
+ )
346
+ print(f"✓ Qdrant Collection 创建: {collection_name} (维度: {dimension})")
347
+ except Exception as e:
348
+ logger.warning(f"创建 record_id 索引失败(可能已存在): {str(e)}")
349
+ print(f"✓ Qdrant Collection 创建: {collection_name} (维度: {dimension})")
350
+ else:
351
+ print(f"✓ Qdrant Collection 存在: {collection_name}")
352
+ # 确保 record_id 索引存在(如果不存在则创建)
353
+ try:
354
+ self.client.create_payload_index(
355
+ collection_name=collection_name,
356
+ field_name="record_id",
357
+ field_schema=PayloadSchemaType.KEYWORD
358
+ )
359
+ except Exception as e:
360
+ # 索引可能已存在,忽略错误
361
+ logger.debug(f"record_id 索引可能已存在: {str(e)}")
362
+
363
+ logger.info(f"Qdrant 连接成功: {url}/{collection_name}")
364
+ except Exception as e:
365
+ print(f"✗ Qdrant 连接失败: {str(e)}")
366
+ logger.error(f"Qdrant 连接失败: {str(e)}")
367
+ raise
368
+
369
+ def write(self, data: List[Dict[str, Any]], metadata: Dict[str, Any]) -> bool:
370
+ try:
371
+ # 如果 metadata 中有 record_id,先删除相同 record_id 的现有记录
372
+ record_id = metadata.get('record_id')
373
+ if record_id:
374
+ try:
375
+ # 查询并删除相同 record_id 的所有记录
376
+ # 使用字典格式的 filter(兼容性更好)
377
+ scroll_result = self.client.scroll(
378
+ collection_name=self.collection_name,
379
+ scroll_filter={
380
+ "must": [
381
+ {
382
+ "key": "record_id",
383
+ "match": {"value": record_id}
384
+ }
385
+ ]
386
+ },
387
+ limit=10000 # 假设单次最多删除 10000 条
388
+ )
389
+
390
+ if scroll_result[0]: # 有记录
391
+ point_ids = [point.id for point in scroll_result[0]]
392
+ self.client.delete(
393
+ collection_name=self.collection_name,
394
+ points_selector=point_ids
395
+ )
396
+ print(f" ✓ 删除现有记录: record_id={record_id}, 删除 {len(point_ids)} 条")
397
+ logger.info(f"删除 Qdrant 现有记录: record_id={record_id}, 删除 {len(point_ids)} 条")
398
+ else:
399
+ print(f" → 准备写入记录: record_id={record_id}")
400
+ except Exception as e:
401
+ print(f" ! 删除现有记录失败: {str(e)}")
402
+ logger.warning(f"删除 Qdrant 现有记录失败: record_id={record_id}, {str(e)}")
403
+ # 继续执行写入,不因为删除失败而中断
404
+ else:
405
+ print(f" → 没有 record_id")
406
+ logger.warning(f"没有 record_id")
407
+ return False
408
+
409
+ points = []
410
+ for item in data:
411
+ # 获取元素级别的 metadata
412
+ element_metadata = item.get('metadata', {})
413
+
414
+ if 'embeddings' in item and item['embeddings']:
415
+ element_id = item.get('element_id') or item.get('id') or str(uuid.uuid4())
416
+
417
+ # 构建 payload(元数据)
418
+ payload = {
419
+ 'text': item.get('text', ''),
420
+ 'record_id': record_id,
421
+ }
422
+
423
+ # 合并文件级别的 metadata 和元素级别的 metadata
424
+ # 文件级别的 metadata 优先级更高
425
+ merged_metadata = {**element_metadata, **metadata}
426
+
427
+ # 将 metadata 中的字段添加到 payload
428
+ # 排除已存在的固定字段,避免冲突
429
+ fixed_fields = {'embeddings', 'text', 'element_id', 'record_id', 'created_at', 'metadata'}
430
+ for key, value in merged_metadata.items():
431
+ if key not in fixed_fields:
432
+ # 特殊处理 data_source 字段:如果是字典则递归展平
433
+ if key == 'data_source' and isinstance(value, dict):
434
+ # 递归展平 data_source 字典,包括嵌套的字典
435
+ flattened = _flatten_dict(value, 'data_source', fixed_fields)
436
+ payload.update(flattened)
437
+ elif key == 'coordinates' and isinstance(value, list):
438
+ payload[key] = value
439
+ elif isinstance(value, (dict, list)):
440
+ # Qdrant 支持 JSON 格式的 payload
441
+ payload[key] = value
442
+ else:
443
+ payload[key] = value
444
+
445
+ # 创建 Point(id 是必需的)
446
+ # Qdrant 的 point id 可以是整数或 UUID 字符串
447
+ # 如果 element_id 是 UUID 格式,直接使用;否则转换为 UUID5(基于 element_id 生成稳定的 UUID)
448
+ try:
449
+ # 尝试将 element_id 解析为 UUID
450
+ point_id = str(uuid.UUID(element_id))
451
+ except (ValueError, TypeError):
452
+ # 如果不是有效的 UUID,使用 UUID5 基于 element_id 生成稳定的 UUID
453
+ point_id = str(uuid.uuid5(uuid.NAMESPACE_URL, str(element_id)))
454
+
455
+ point = PointStruct(
456
+ id=point_id,
457
+ vector=item['embeddings'],
458
+ payload=payload
459
+ )
460
+ points.append(point)
461
+
462
+ if not points:
463
+ print(f" ! 警告: 没有有效的向量数据")
464
+ return False
465
+
466
+ # 批量插入
467
+ self.client.upsert(
468
+ collection_name=self.collection_name,
469
+ points=points
470
+ )
471
+ print(f" ✓ 写入 Qdrant: {len(points)} 条")
472
+ logger.info(f"写入 Qdrant 成功: {len(points)} 条")
473
+ return True
474
+ except Exception as e:
475
+ print(f" ✗ 写入 Qdrant 失败: {str(e)}")
476
+ logger.error(f"写入 Qdrant 失败: {str(e)}")
477
+ return False
478
+
479
+
299
480
  __all__ = [
300
481
  'Destination',
301
482
  'MilvusDestination',
483
+ 'QdrantDestination',
302
484
  'LocalDestination',
303
485
  'S3Destination',
304
486
  ]
@@ -12,7 +12,7 @@ import requests
12
12
 
13
13
  from .config import ParseConfig, ChunkConfig, EmbedConfig, Stage, PipelineStats, PipelineConfig
14
14
  from .sources import Source, S3Source, LocalSource, FtpSource, SmbSource
15
- from .destinations import Destination, MilvusDestination, LocalDestination, S3Destination
15
+ from .destinations import Destination, MilvusDestination, QdrantDestination, LocalDestination, S3Destination
16
16
 
17
17
 
18
18
  logger = logging.getLogger(__name__)
@@ -145,6 +145,14 @@ class Pipeline:
145
145
  'dimension': self.destination.dimension
146
146
  })
147
147
  # api_key 和 token 不在对象中保存,无法恢复
148
+ elif isinstance(self.destination, QdrantDestination):
149
+ config['destination'].update({
150
+ 'url': self.destination.url,
151
+ 'collection_name': self.destination.collection_name,
152
+ 'dimension': self.destination.dimension,
153
+ 'prefer_grpc': getattr(self.destination, 'prefer_grpc', False)
154
+ })
155
+ # api_key 不在对象中保存,无法恢复
148
156
  elif isinstance(self.destination, LocalDestination):
149
157
  config['destination'].update({
150
158
  'output_dir': str(self.destination.output_dir)
@@ -503,6 +511,14 @@ def create_pipeline_from_config(config: Dict[str, Any]) -> Pipeline:
503
511
  api_key=dest_config.get('api_key'),
504
512
  token=dest_config.get('token')
505
513
  )
514
+ elif dest_config['type'] == 'qdrant':
515
+ destination = QdrantDestination(
516
+ url=dest_config['url'],
517
+ collection_name=dest_config['collection_name'],
518
+ dimension=dest_config['dimension'],
519
+ api_key=dest_config.get('api_key'),
520
+ prefer_grpc=dest_config.get('prefer_grpc', False)
521
+ )
506
522
  elif dest_config['type'] == 'local':
507
523
  destination = LocalDestination(
508
524
  output_dir=dest_config['output_dir']
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: xparse-client
3
- Version: 0.2.9
3
+ Version: 0.2.10
4
4
  Summary: 面向Agent和RAG的新一代文档处理 AI Infra
5
5
  License-Expression: MIT
6
6
  Project-URL: Homepage, https://gitlab.intsig.net/xparse1/xparse-pipeline
@@ -13,6 +13,7 @@ Requires-Dist: boto3
13
13
  Requires-Dist: pymilvus[milvus_lite]
14
14
  Requires-Dist: requests
15
15
  Requires-Dist: pysmb
16
+ Requires-Dist: qdrant-client
16
17
  Dynamic: license-file
17
18
 
18
19
  # xParse
@@ -24,7 +25,7 @@ xParse的同步pipeline实现,支持多种数据源与输出。
24
25
  ## 🌟 特点
25
26
 
26
27
  - **灵活的数据源**:支持兼容 S3 协议的对象存储、本地文件系统以及 FTP/SMB 协议文件系统
27
- - **灵活的输出**:支持 Milvus/Zilliz 向量数据库、兼容 S3 协议的对象存储以及本地文件系统
28
+ - **灵活的输出**:支持 Milvus/Zilliz/Qdrant 向量数据库、兼容 S3 协议的对象存储以及本地文件系统
28
29
  - **统一 Pipeline API**:使用 `/api/xparse/pipeline` 一次性完成 parse → chunk → embed 全流程
29
30
  - **配置化处理**:支持灵活配置 parse、chunk、embed 参数
30
31
  - **详细统计信息**:返回每个阶段的处理统计数据
@@ -51,7 +52,7 @@ xParse的同步pipeline实现,支持多种数据源与输出。
51
52
  │ [embeddings + stats]
52
53
 
53
54
  ┌──────────────┐
54
- │ Destination │ 目的地(Milvus/Zilliz/本地)
55
+ │ Destination │ 目的地(Milvus/Zilliz/Qdrant/本地)
55
56
  └──────────────┘
56
57
  ```
57
58
 
@@ -69,7 +70,7 @@ pip install --upgrade xparse-client
69
70
 
70
71
  #### 代码配置
71
72
  ```python
72
- from xparse_client import ParseConfig, ChunkConfig, EmbedConfig, Stage, Pipeline, S3Source, MilvusDestination
73
+ from xparse_client import ParseConfig, ChunkConfig, EmbedConfig, Stage, Pipeline, S3Source, MilvusDestination, QdrantDestination
73
74
 
74
75
  # 使用新的 stages 格式创建配置
75
76
  stages = [
@@ -354,6 +355,28 @@ destination = MilvusDestination(
354
355
  )
355
356
  ```
356
357
 
358
+ #### Qdrant 向量存储
359
+
360
+ ```python
361
+ destination = QdrantDestination(
362
+ url='http://localhost:6333', # Qdrant 服务地址(本地或云端)
363
+ collection_name='my_collection', # Collection 名称
364
+ dimension=1024, # 向量维度,需与 embed API 返回一致
365
+ api_key='your-api-key', # 可选,Qdrant Cloud API Key
366
+ prefer_grpc=False # 可选,是否优先使用 gRPC(默认 False)
367
+ )
368
+ ```
369
+
370
+ **Qdrant Cloud 示例:**
371
+ ```python
372
+ destination = QdrantDestination(
373
+ url='https://xxxxxxx.us-east-1-0.aws.cloud.qdrant.io',
374
+ collection_name='my_collection',
375
+ dimension=1024,
376
+ api_key='your-api-key'
377
+ )
378
+ ```
379
+
357
380
  #### 本地文件系统目的地
358
381
 
359
382
  将在配置的本地文件地址中写入`json`文件。
@@ -0,0 +1,13 @@
1
+ example/run_pipeline.py,sha256=d4pPDqjiC9dPNh6nmArPOF7fPMY0a-jcvdgtNuV-_kM,15795
2
+ example/run_pipeline_test.py,sha256=pxsNiq_LmP6M4R7tTuja0u-Lu7fW-wIBU1uBf0-agQI,14845
3
+ xparse_client/__init__.py,sha256=C2XLxkCoONl6_B1FmDhWRw84TqOL4pZF20br-K26SSY,1721
4
+ xparse_client/pipeline/__init__.py,sha256=TVlb2AGCNKP0jrv3p4ZLZCPKp68hTVMFi00DTdi6QAo,49
5
+ xparse_client/pipeline/config.py,sha256=FFYq2a0dBWBEj70s2aInXOiQ5MwwHimd6SI2_tkp52w,4138
6
+ xparse_client/pipeline/destinations.py,sha256=9UyZ8Ygjoe4yAq6-VZNZBoNYRbb3mahify3c1AdOHMY,20775
7
+ xparse_client/pipeline/pipeline.py,sha256=ZspagUjiL5wnzGJq6A7riOU8qGXJMtg1fqPm9H09mkk,27272
8
+ xparse_client/pipeline/sources.py,sha256=D-kLrSQ-qsFFFq7JC4sL3Y3Q3Q87Wcpv9R5K85YkDjE,22144
9
+ xparse_client-0.2.10.dist-info/licenses/LICENSE,sha256=ckIP-MbocsP9nqYnta5KgfAicYF196B5TNdHIR6kOO0,1075
10
+ xparse_client-0.2.10.dist-info/METADATA,sha256=gIY_PxB1pTxSlKJZjU7z1Iua6ZMtAfMfHFeztWp2zIw,28785
11
+ xparse_client-0.2.10.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
12
+ xparse_client-0.2.10.dist-info/top_level.txt,sha256=bfX8BWo1sEEQVsI4Ql4Uu80vrfEh5zfajU9YqFTzxMo,22
13
+ xparse_client-0.2.10.dist-info/RECORD,,
@@ -1,13 +0,0 @@
1
- example/run_pipeline.py,sha256=xZ8TLofrK7naEwBe-tiuotcQ8yKWUES_k9iCQcIOIYo,15446
2
- example/run_pipeline_test.py,sha256=pxsNiq_LmP6M4R7tTuja0u-Lu7fW-wIBU1uBf0-agQI,14845
3
- xparse_client/__init__.py,sha256=je1ena3HwLL4CRtLU4r6EAzoOIJthlPjTwshxZnzQDM,1677
4
- xparse_client/pipeline/__init__.py,sha256=TVlb2AGCNKP0jrv3p4ZLZCPKp68hTVMFi00DTdi6QAo,49
5
- xparse_client/pipeline/config.py,sha256=FFYq2a0dBWBEj70s2aInXOiQ5MwwHimd6SI2_tkp52w,4138
6
- xparse_client/pipeline/destinations.py,sha256=F0z1AgVIBOn0m32i4l7LCMkJE0IbBdlpykO_at_wLaE,11931
7
- xparse_client/pipeline/pipeline.py,sha256=IRTxN4YUJi9Wrm1G1ysGvcwsPsGh0inbquBH3nWYmAA,26477
8
- xparse_client/pipeline/sources.py,sha256=D-kLrSQ-qsFFFq7JC4sL3Y3Q3Q87Wcpv9R5K85YkDjE,22144
9
- xparse_client-0.2.9.dist-info/licenses/LICENSE,sha256=ckIP-MbocsP9nqYnta5KgfAicYF196B5TNdHIR6kOO0,1075
10
- xparse_client-0.2.9.dist-info/METADATA,sha256=Faj3fvt9Fc-EW9yFDewhpkqGVo_qSvL5N-tq1aIkkyk,28086
11
- xparse_client-0.2.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
12
- xparse_client-0.2.9.dist-info/top_level.txt,sha256=bfX8BWo1sEEQVsI4Ql4Uu80vrfEh5zfajU9YqFTzxMo,22
13
- xparse_client-0.2.9.dist-info/RECORD,,