bisheng-langchain 0.2.2.1__py3-none-any.whl → 0.2.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,4 @@
1
+ import asyncio
1
2
  from typing import Any, Callable, Dict, List, Union
2
3
 
3
4
  from langchain.callbacks.manager import Callbacks
@@ -41,7 +42,12 @@ class RuleBasedRouter(RouterChain):
41
42
  inputs: Union[Dict[str, Any], Any],
42
43
  callbacks: Callbacks = None,
43
44
  ) -> Route:
44
- result = await self.rule_function(inputs)
45
+ """Route the inputs to the next chain based on the rule function."""
46
+ # 如果是异步function,那么就用await
47
+ if asyncio.iscoroutinefunction(self.rule_function):
48
+ result = await self.rule_function(inputs)
49
+ else:
50
+ result = self.rule_function(inputs)
45
51
  if not result.get('destination') or not result:
46
52
  return Route(None, result['next_inputs'])
47
53
  return Route(result['destination'], result['next_inputs'])
@@ -2,6 +2,7 @@
2
2
  from __future__ import annotations
3
3
 
4
4
  import json
5
+ import logging
5
6
  import sys
6
7
  from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Tuple, Union
7
8
 
@@ -13,11 +14,10 @@ from langchain.schema import ChatGeneration, ChatResult
13
14
  from langchain.schema.messages import (AIMessage, BaseMessage, ChatMessage, FunctionMessage,
14
15
  HumanMessage, SystemMessage)
15
16
  from langchain.utils import get_from_dict_or_env
17
+ from langchain_core.language_models.llms import create_base_retry_decorator
16
18
  from langchain_core.pydantic_v1 import Field, root_validator
17
- from loguru import logger
19
+
18
20
  # from requests.exceptions import HTTPError
19
- from tenacity import (before_sleep_log, retry, retry_if_exception_type, stop_after_attempt,
20
- wait_exponential)
21
21
 
22
22
  # from .interface import MinimaxChatCompletion
23
23
  # from .interface.types import ChatInput
@@ -25,6 +25,8 @@ from tenacity import (before_sleep_log, retry, retry_if_exception_type, stop_aft
25
25
  if TYPE_CHECKING:
26
26
  import tiktoken
27
27
 
28
+ logger = logging.getLogger(__name__)
29
+
28
30
 
29
31
  def _import_tiktoken() -> Any:
30
32
  try:
@@ -36,19 +38,15 @@ def _import_tiktoken() -> Any:
36
38
  return tiktoken
37
39
 
38
40
 
39
- def _create_retry_decorator(llm: BaseHostChatLLM) -> Callable[[Any], Any]:
41
+ def _create_retry_decorator(
42
+ llm: BaseHostChatLLM,
43
+ run_manager: Optional[Union[AsyncCallbackManagerForLLMRun, CallbackManagerForLLMRun]] = None,
44
+ ) -> Callable[[Any], Any]:
40
45
 
41
- min_seconds = 1
42
- max_seconds = 20
43
- # Wait 2^x * 1 second between each retry starting with
44
- # 4 seconds, then up to 10 seconds, then 10 seconds afterwards
45
- return retry(
46
- reraise=True,
47
- stop=stop_after_attempt(llm.max_retries),
48
- wait=wait_exponential(multiplier=1, min=min_seconds, max=max_seconds),
49
- retry=(retry_if_exception_type(Exception)),
50
- before_sleep=before_sleep_log(logger, logger.level('WARNING')),
51
- )
46
+ errors = [requests.exceptions.ReadTimeout, ValueError]
47
+ return create_base_retry_decorator(error_types=errors,
48
+ max_retries=llm.max_retries,
49
+ run_manager=run_manager)
52
50
 
53
51
 
54
52
  def _convert_dict_to_message(_dict: Mapping[str, Any]) -> BaseMessage:
@@ -215,7 +213,7 @@ class BaseHostChatLLM(BaseChatModel):
215
213
  # print('messages:', messages)
216
214
  # print('functions:', kwargs.get('functions', []))
217
215
  if self.verbose:
218
- logger.info(f'payload={params}')
216
+ logger.info('payload=%s', json.dumps(params, indent=2))
219
217
  try:
220
218
  resp = self.client.post(url=self.host_base_url, json=params)
221
219
  if resp.text.startswith('data:'):
@@ -275,7 +273,7 @@ class BaseHostChatLLM(BaseChatModel):
275
273
  try:
276
274
  async with self.client.apost(url=self.host_base_url, json=kwargs) as response:
277
275
  if response.status != 200:
278
- raise ValueError(f'Error: {response.status}')
276
+ raise ValueError(f'Error: {response.status} contet: {response.text}')
279
277
  async for txt in response.content.iter_any():
280
278
  if b'\n' in txt:
281
279
  for txt_ in txt.split(b'\n'):
@@ -297,7 +295,7 @@ class BaseHostChatLLM(BaseChatModel):
297
295
  if text.startswith('{'):
298
296
  yield (is_error, response[len('data:'):])
299
297
  else:
300
- logger.info('agenerate_no_json text={}', text)
298
+ logger.info('agenerate_no_json text=%s', text)
301
299
  if is_error:
302
300
  break
303
301
  elif response.startswith('{'):
@@ -315,7 +313,7 @@ class BaseHostChatLLM(BaseChatModel):
315
313
  """Generate chat completion with retry."""
316
314
  message_dicts, params = self._create_message_dicts(messages, stop)
317
315
  params = {**params, **kwargs}
318
- if self.streaming:
316
+ if self.streaming and 'infer' not in self.host_base_url:
319
317
  inner_completion = ''
320
318
  role = 'assistant'
321
319
  params['stream'] = True
@@ -2,22 +2,15 @@
2
2
  """Loads PDF with semantic splilter."""
3
3
  import io
4
4
  import json
5
- import logging
6
5
  import os
7
6
  import re
8
- import tempfile
9
7
  import time
10
- from abc import ABC
11
8
  from collections import Counter
12
9
  from copy import deepcopy
13
- from pathlib import Path
14
- from typing import Any, Iterator, List, Mapping, Optional, Union
15
- from urllib.parse import urlparse
10
+ from typing import List, Optional, Union
16
11
 
17
12
  import fitz
18
13
  import numpy as np
19
- import pypdfium2
20
- import requests
21
14
  from bisheng_langchain.document_loaders.parsers import LayoutParser
22
15
  from langchain.docstore.document import Document
23
16
  from langchain.document_loaders.blob_loaders import Blob
@@ -72,8 +65,7 @@ def order_by_tbyx(block_info, th=10):
72
65
  for i in range(len(res) - 1):
73
66
  for j in range(i, 0, -1):
74
67
  # restore the order using the
75
- if (abs(res[j + 1][1] - res[j][1]) < th
76
- and (res[j + 1][0] < res[j][0])):
68
+ if (abs(res[j + 1][1] - res[j][1]) < th and (res[j + 1][0] < res[j][0])):
77
69
  tmp = deepcopy(res[j])
78
70
  res[j] = deepcopy(res[j + 1])
79
71
  res[j + 1] = deepcopy(tmp)
@@ -207,8 +199,7 @@ class PDFWithSemanticLoader(BasePDFLoader):
207
199
  html_output_file: str = None,
208
200
  verbose: bool = False) -> None:
209
201
  """Initialize with a file path."""
210
- self.layout_parser = LayoutParser(api_key=layout_api_key,
211
- api_base_url=layout_api_url)
202
+ self.layout_parser = LayoutParser(api_key=layout_api_key, api_base_url=layout_api_url)
212
203
  self.with_columns = with_columns
213
204
  self.is_join_table = is_join_table
214
205
  self.support_rotate = support_rotate
@@ -286,8 +277,7 @@ class PDFWithSemanticLoader(BasePDFLoader):
286
277
  texts = []
287
278
  for b in blocks:
288
279
  if b[-1] != IMG_BLOCK_TYPE:
289
- text = re.sub(RE_MULTISPACE_INCLUDING_NEWLINES, ' ', b[4]
290
- or '').strip()
280
+ text = re.sub(RE_MULTISPACE_INCLUDING_NEWLINES, ' ', b[4] or '').strip()
291
281
  if text:
292
282
  texts.append(text)
293
283
  text_ploys.append(Rect(b[0], b[1], b[2], b[3]))
@@ -301,8 +291,7 @@ class PDFWithSemanticLoader(BasePDFLoader):
301
291
  layout_info = json.loads(layout.page_content)
302
292
  for info in layout_info:
303
293
  bbs = info['bbox']
304
- coords = ((bbs[0], bbs[1]), (bbs[2], bbs[3]), (bbs[4], bbs[5]),
305
- (bbs[6], bbs[7]))
294
+ coords = ((bbs[0], bbs[1]), (bbs[2], bbs[3]), (bbs[4], bbs[5]), (bbs[6], bbs[7]))
306
295
  semantic_polys.append(Polygon(coords))
307
296
  semantic_labels.append(info['category_id'])
308
297
 
@@ -350,8 +339,7 @@ class PDFWithSemanticLoader(BasePDFLoader):
350
339
  rs = text_rects[ind]
351
340
  ord_ind = np.min(ori_orders)
352
341
  mask[ind] = 1
353
- new_block_info.append(
354
- (rect[0], rect[1], rect[2], rect[3], ts, rs, ord_ind))
342
+ new_block_info.append((rect[0], rect[1], rect[2], rect[3], ts, rs, ord_ind))
355
343
 
356
344
  elif np.all(mask[start:end] == 0):
357
345
  rect = merge_rects(text_rects[start:end])
@@ -367,16 +355,14 @@ class PDFWithSemanticLoader(BasePDFLoader):
367
355
  rs = rs[arg_ind]
368
356
 
369
357
  mask[start:end] = 1
370
- new_block_info.append(
371
- (rect[0], rect[1], rect[2], rect[3], ts, rs, ord_ind))
358
+ new_block_info.append((rect[0], rect[1], rect[2], rect[3], ts, rs, ord_ind))
372
359
 
373
360
  for i in range(texts_cnt):
374
361
  if mask[i] == 0:
375
362
  b = blocks[i]
376
363
  r = np.asarray([b[0], b[1], b[2], b[3]])
377
364
  ord_ind = b[-2]
378
- new_block_info.append(
379
- (b[0], b[1], b[2], b[3], [texts[i]], [r], ord_ind))
365
+ new_block_info.append((b[0], b[1], b[2], b[3], [texts[i]], [r], ord_ind))
380
366
 
381
367
  if self.with_columns:
382
368
  new_blocks = sorted(new_block_info, key=lambda x: x[-1])
@@ -430,8 +416,7 @@ class PDFWithSemanticLoader(BasePDFLoader):
430
416
  for label, b in zip(texts_labels, new_blocks):
431
417
  if label in effective_class_inds:
432
418
  text = join_lines(b[4], label == TABLE_ID)
433
- filtered_blocks.append(
434
- (b[0], b[1], b[2], b[3], text, b[5], label))
419
+ filtered_blocks.append((b[0], b[1], b[2], b[3], text, b[5], label))
435
420
 
436
421
  # print('---filtered_blocks---')
437
422
  # for b in filtered_blocks:
@@ -539,8 +524,7 @@ class PDFWithSemanticLoader(BasePDFLoader):
539
524
  if (c0 > LINE_FULL_THRESHOLD and c1 < START_THRESHOLD
540
525
  and c2 < SIMI_HEIGHT_THRESHOLD):
541
526
  new_text = join_lines([b0[4], b1[4]])
542
- new_block = (b0[0], b0[1], b0[2], b0[3], new_text, b0[5],
543
- b0[6])
527
+ new_block = (b0[0], b0[1], b0[2], b0[3], new_text, b0[5], b0[6])
544
528
  groups[i - 1][-1] = new_block
545
529
  groups[i].pop(0)
546
530
 
@@ -549,8 +533,7 @@ class PDFWithSemanticLoader(BasePDFLoader):
549
533
  c0 = (r1_w - r0_w) / r1_h
550
534
  if c0 < SIMI_WIDTH_THRESHOLD:
551
535
  new_text = join_lines([b0[4], b1[4]], True)
552
- new_block = (b0[0], b0[1], b0[2], b0[3], new_text, b0[5],
553
- b0[6])
536
+ new_block = (b0[0], b0[1], b0[2], b0[3], new_text, b0[5], b0[6])
554
537
  groups[i - 1][-1] = new_block
555
538
  groups[i].pop(0)
556
539
 
@@ -559,10 +542,7 @@ class PDFWithSemanticLoader(BasePDFLoader):
559
542
  return groups
560
543
 
561
544
  def save_to_html(self, groups, output_file):
562
- styles = [
563
- 'style="background-color: #EBEBEB;"',
564
- 'style="background-color: #ABBAEA;"'
565
- ]
545
+ styles = ['style="background-color: #EBEBEB;"', 'style="background-color: #ABBAEA;"']
566
546
  idx = 0
567
547
  table_style = 'style="border:1px solid black;"'
568
548
 
@@ -578,8 +558,7 @@ class PDFWithSemanticLoader(BasePDFLoader):
578
558
  rows = b[4].split('\n')
579
559
  content = []
580
560
  for r in rows:
581
- content.append(
582
- f'<tr><td {table_style}>{r}</td></tr>')
561
+ content.append(f'<tr><td {table_style}>{r}</td></tr>')
583
562
  elem_text = '\n'.join(content)
584
563
  text = f'<table {table_style}>{elem_text}</table>'
585
564
  else:
@@ -610,6 +589,7 @@ class PDFWithSemanticLoader(BasePDFLoader):
610
589
 
611
590
  def load(self) -> List[Document]:
612
591
  """Load given path as pages."""
592
+ import pypdfium2
613
593
  blob = Blob.from_path(self.file_path)
614
594
  start = self.start
615
595
  groups = []
@@ -49,13 +49,18 @@ class MixEsVectorRetriever(BaseRetriever):
49
49
  elif self.combine_strategy == 'vector_front':
50
50
  return vector_docs + keyword_docs
51
51
  elif self.combine_strategy == 'mix':
52
- combine_docs = []
52
+ combine_docs_dict = {}
53
53
  min_len = min(len(keyword_docs), len(vector_docs))
54
54
  for i in range(min_len):
55
- combine_docs.append(keyword_docs[i])
56
- combine_docs.append(vector_docs[i])
57
- combine_docs.extend(keyword_docs[min_len:])
58
- combine_docs.extend(vector_docs[min_len:])
55
+ combine_docs_dict[keyword_docs[i].page_content] = keyword_docs[i]
56
+ combine_docs_dict[vector_docs[i].page_content] = vector_docs[i]
57
+ for doc in keyword_docs[min_len:]:
58
+ combine_docs_dict[doc.page_content] = doc
59
+ for doc in vector_docs[min_len:]:
60
+ combine_docs_dict[doc.page_content] = doc
61
+
62
+ # 将字典的值转换为列表
63
+ combine_docs = list(combine_docs_dict.values())
59
64
  return combine_docs
60
65
  else:
61
66
  raise ValueError(f'Expected combine_strategy to be one of '
@@ -88,13 +93,18 @@ class MixEsVectorRetriever(BaseRetriever):
88
93
  elif self.combine_strategy == 'vector_front':
89
94
  return vector_docs + keyword_docs
90
95
  elif self.combine_strategy == 'mix':
91
- combine_docs = []
96
+ combine_docs_dict = {}
92
97
  min_len = min(len(keyword_docs), len(vector_docs))
93
98
  for i in range(min_len):
94
- combine_docs.append(keyword_docs[i])
95
- combine_docs.append(vector_docs[i])
96
- combine_docs.extend(keyword_docs[min_len:])
97
- combine_docs.extend(vector_docs[min_len:])
99
+ combine_docs_dict[keyword_docs[i].page_content] = keyword_docs[i]
100
+ combine_docs_dict[vector_docs[i].page_content] = vector_docs[i]
101
+ for doc in keyword_docs[min_len:]:
102
+ combine_docs_dict[doc.page_content] = doc
103
+ for doc in vector_docs[min_len:]:
104
+ combine_docs_dict[doc.page_content] = doc
105
+
106
+ # 将字典的值转换为列表
107
+ combine_docs = list(combine_docs_dict.values())
98
108
  return combine_docs
99
109
  else:
100
110
  raise ValueError(f'Expected combine_strategy to be one of '
@@ -3,16 +3,16 @@ from __future__ import annotations
3
3
 
4
4
  import uuid
5
5
  from abc import ABC
6
- from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Tuple, Callable
6
+ from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Tuple
7
7
 
8
8
  import jieba.analyse
9
+ from langchain.chains.llm import LLMChain
9
10
  from langchain.docstore.document import Document
10
11
  from langchain.embeddings.base import Embeddings
11
- from langchain.utils import get_from_dict_or_env
12
- from langchain.vectorstores.base import VectorStore
13
- from langchain.chains.llm import LLMChain
14
12
  from langchain.llms.base import BaseLLM
15
13
  from langchain.prompts.prompt import PromptTemplate
14
+ from langchain.utils import get_from_dict_or_env
15
+ from langchain.vectorstores.base import VectorStore
16
16
 
17
17
  if TYPE_CHECKING:
18
18
  from elasticsearch import Elasticsearch # noqa: F401
@@ -23,7 +23,7 @@ def _default_text_mapping() -> Dict:
23
23
 
24
24
 
25
25
  DEFAULT_PROMPT = PromptTemplate(
26
- input_variables=["question"],
26
+ input_variables=['question'],
27
27
  template="""分析给定Question,提取Question中包含的KeyWords,输出列表形式
28
28
 
29
29
  Examples:
@@ -105,6 +105,7 @@ class ElasticKeywordsSearch(VectorStore, ABC):
105
105
  self,
106
106
  elasticsearch_url: str,
107
107
  index_name: str,
108
+ drop_old: Optional[bool] = False,
108
109
  *,
109
110
  ssl_verify: Optional[Dict[str, Any]] = None,
110
111
  llm_chain: Optional[LLMChain] = None,
@@ -117,6 +118,7 @@ class ElasticKeywordsSearch(VectorStore, ABC):
117
118
  'Please install it with `pip install elasticsearch`.')
118
119
  self.index_name = index_name
119
120
  self.llm_chain = llm_chain
121
+ self.drop_old = drop_old
120
122
  _ssl_verify = ssl_verify or {}
121
123
  try:
122
124
  self.client = elasticsearch.Elasticsearch(elasticsearch_url, **_ssl_verify)
@@ -155,6 +157,9 @@ class ElasticKeywordsSearch(VectorStore, ABC):
155
157
  # check to see if the index already exists
156
158
  try:
157
159
  self.client.indices.get(index=self.index_name)
160
+ if texts and self.drop_old:
161
+ self.client.indices.delete(index=self.index_name)
162
+ self.create_index(self.client, self.index_name, mapping)
158
163
  except NotFoundError:
159
164
  # TODO would be nice to create index before embedding,
160
165
  # just to save expensive steps for last
@@ -277,7 +282,10 @@ class ElasticKeywordsSearch(VectorStore, ABC):
277
282
  vectorsearch = cls(elasticsearch_url, index_name, llm_chain=llm_chain, **kwargs)
278
283
  else:
279
284
  vectorsearch = cls(elasticsearch_url, index_name, **kwargs)
280
- vectorsearch.add_texts(texts, metadatas=metadatas, ids=ids, refresh_indices=refresh_indices)
285
+ vectorsearch.add_texts(texts,
286
+ metadatas=metadatas,
287
+ ids=ids,
288
+ refresh_indices=refresh_indices)
281
289
  return vectorsearch
282
290
 
283
291
  def create_index(self, client: Any, index_name: str, mapping: Dict) -> None:
@@ -1,9 +1,8 @@
1
-
2
1
  """Wrapper around the Milvus vector database."""
3
2
  from __future__ import annotations
4
3
 
5
4
  import logging
6
- from typing import Any, Iterable, List, Optional, Tuple, Union, Callable
5
+ from typing import Any, Callable, Iterable, List, Optional, Tuple, Union
7
6
  from uuid import uuid4
8
7
 
9
8
  import numpy as np
@@ -880,7 +879,7 @@ class Milvus(MilvusLangchain):
880
879
  )
881
880
  vector_db.add_texts(texts=texts, metadatas=metadatas)
882
881
  return vector_db
883
-
882
+
884
883
  @staticmethod
885
884
  def _relevance_score_fn(distance: float) -> float:
886
885
  """Normalize the distance to a score on a scale [0, 1]."""
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: bisheng-langchain
3
- Version: 0.2.2.1
3
+ Version: 0.2.2.3
4
4
  Summary: bisheng langchain modules
5
5
  Home-page: https://github.com/dataelement/bisheng
6
6
  Author: DataElem
@@ -26,7 +26,6 @@ Requires-Dist: bisheng-pyautogen
26
26
  Requires-Dist: jieba ==0.42.1
27
27
  Requires-Dist: pydantic ==1.10.13
28
28
  Requires-Dist: pymupdf ==1.23.8
29
- Requires-Dist: pypdfium2 ==4.25.0
30
29
  Requires-Dist: shapely ==2.0.2
31
30
  Requires-Dist: filetype ==1.2.0
32
31
 
@@ -23,9 +23,9 @@ bisheng_langchain/chains/retrieval/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQe
23
23
  bisheng_langchain/chains/retrieval/retrieval_chain.py,sha256=7VLJ-IPVjKfmAVgVET4cvKCO9DCMxwsGgVhW-wz5RZM,3050
24
24
  bisheng_langchain/chains/router/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
25
  bisheng_langchain/chains/router/multi_rule.py,sha256=BiFryj3-7rOxfttD-MyOkKWLCSGB9LVYd2rjOsIfQC8,375
26
- bisheng_langchain/chains/router/rule_router.py,sha256=QWLKqJ9ZCQb9E3oh6pd0C6YZRhHyoJSmDFkx31LNpME,1563
26
+ bisheng_langchain/chains/router/rule_router.py,sha256=R2YRUnwn7s_7DbsSn27uPn4cIV0D-5iXEORXir0tNGM,1835
27
27
  bisheng_langchain/chat_models/__init__.py,sha256=A3_KoMRp96UqHwwYX4Mt60peVNjCsuUHAixqaV44BP4,492
28
- bisheng_langchain/chat_models/host_llm.py,sha256=5DWIOHFwnRx_wQzJmZRqE4xy2kOKKOq7VKdKm3mr6uU,21692
28
+ bisheng_langchain/chat_models/host_llm.py,sha256=phIBLY7AoGPIIxl9saFV2XAcRvgaJ8XUJA06bqiN8Uw,21638
29
29
  bisheng_langchain/chat_models/minimax.py,sha256=JLs_f6vWD9beZYUtjD4FG28G8tZHrGUAWOwdLIuJomw,13901
30
30
  bisheng_langchain/chat_models/proxy_llm.py,sha256=wzVBZik9WC3-f7kyQ1eu3Ooibqpcocln08knf5lV1Nw,17082
31
31
  bisheng_langchain/chat_models/qwen.py,sha256=jGx_tW-LPxfegE6NvY6wID8ps2SsP813atjXnc04C-s,18841
@@ -44,7 +44,7 @@ bisheng_langchain/document_loaders/__init__.py,sha256=LuQ-zMYxde2FeiEcvVtjQqnHoz
44
44
  bisheng_langchain/document_loaders/custom_kv.py,sha256=sUKeK0e8-cCmKyj1FsR7SzBNWjo5zRwHWVS5tKVxuPs,6656
45
45
  bisheng_langchain/document_loaders/elem_html.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
46
  bisheng_langchain/document_loaders/elem_image.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
- bisheng_langchain/document_loaders/elem_pdf.py,sha256=64kUITkrTVJe9CH6IAVSdDVcn2Ekx2PM-jT0cdClXlo,22716
47
+ bisheng_langchain/document_loaders/elem_pdf.py,sha256=K-TXILGNFLFjavhun_MFbUF4t2_WGA3Z-kbnr75lmW8,22243
48
48
  bisheng_langchain/document_loaders/elem_unstrcutured_loader.py,sha256=N2jCmizi9gIsAO38zWL1zOzfNhoo_XmNIX7fteSeR0Q,4883
49
49
  bisheng_langchain/document_loaders/universal_kv.py,sha256=dJF_GQGKBMUjB_kX9CSp7xZRhXgwVuGPbMIzJwPh-C0,4063
50
50
  bisheng_langchain/document_loaders/parsers/__init__.py,sha256=OOM_FJkwaU-zNS58fASw0TH8FNT6VXKb0VrvisgdrII,171
@@ -62,14 +62,14 @@ bisheng_langchain/input_output/__init__.py,sha256=sW_GB7MlrHYsqY1Meb_LeimQqNsMz1
62
62
  bisheng_langchain/input_output/input.py,sha256=I5YDmgbvvj1o2lO9wi8LE37wM0wP5jkhUREU32YrZMQ,1094
63
63
  bisheng_langchain/input_output/output.py,sha256=6U-az6-Cwz665C2YmcH3SYctWVjPFjmW8s70CA_qphk,11585
64
64
  bisheng_langchain/retrievers/__init__.py,sha256=TcyK31IMgFJcYaOCLd9O6qFzXt1VMbtLs-g4C6ml_3w,117
65
- bisheng_langchain/retrievers/mix_es_vector.py,sha256=QbFuOYSdbhUE0j40E0ATmIt8T2iCtTc50xCll-PgRfs,3933
65
+ bisheng_langchain/retrievers/mix_es_vector.py,sha256=dSrrsuMPSgGiu181EOzACyIKiDXR0qNBQz_914USD3E,4465
66
66
  bisheng_langchain/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
67
67
  bisheng_langchain/utils/requests.py,sha256=20ooDlMDMkXig--rSyRqbnAlbGLscBvvkHzFk2AmyGM,8517
68
68
  bisheng_langchain/vectorstores/__init__.py,sha256=zCZgDe7LyQ0iDkfcm5UJ5NxwKQSRHnqrsjx700Fy11M,213
69
- bisheng_langchain/vectorstores/elastic_keywords_search.py,sha256=EeJv6QID5JuCxsD6vALG4-PwMgWu3mg-XebT8vXeqvU,12317
70
- bisheng_langchain/vectorstores/milvus.py,sha256=adonwEleUo5etgJmSNk8mfBX-ZYADjVp89RiUlrB_cA,34429
69
+ bisheng_langchain/vectorstores/elastic_keywords_search.py,sha256=gt_uw_fSMcEZWxbiA3V0RyA-utLOZlUY-qxdwnsfZks,12664
70
+ bisheng_langchain/vectorstores/milvus.py,sha256=44ZbDsIxdsbUnHOpEpCdrW5zvWnYvDdAVoDKjCFoyYI,34424
71
71
  bisheng_langchain/vectorstores/retriever.py,sha256=hj4nAAl352EV_ANnU2OHJn7omCH3nBK82ydo14KqMH4,4353
72
- bisheng_langchain-0.2.2.1.dist-info/METADATA,sha256=bFp2yoHTJj1zyuuGOoaJajyKBpJ0N6xBxqEwbCBGNwI,2333
73
- bisheng_langchain-0.2.2.1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
74
- bisheng_langchain-0.2.2.1.dist-info/top_level.txt,sha256=Z6pPNyCo4ihyr9iqGQbH8sJiC4dAUwA_mAyGRQB5_Fs,18
75
- bisheng_langchain-0.2.2.1.dist-info/RECORD,,
72
+ bisheng_langchain-0.2.2.3.dist-info/METADATA,sha256=9UgVujQ3Ep2eBWGmbeoe8r-dPNqOjPxXHa0_WYSz7pw,2299
73
+ bisheng_langchain-0.2.2.3.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
74
+ bisheng_langchain-0.2.2.3.dist-info/top_level.txt,sha256=Z6pPNyCo4ihyr9iqGQbH8sJiC4dAUwA_mAyGRQB5_Fs,18
75
+ bisheng_langchain-0.2.2.3.dist-info/RECORD,,