bisheng-langchain 0.3.0__py3-none-any.whl → 0.3.0rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,9 +1,8 @@
1
1
  """Chain that runs an arbitrary python function."""
2
2
  import logging
3
- import os
4
3
  from typing import Callable, Dict, Optional
5
4
 
6
- import openai
5
+ import httpx
7
6
  from autogen import AssistantAgent
8
7
  from langchain.base_language import BaseLanguageModel
9
8
 
@@ -45,15 +44,6 @@ Reply "TERMINATE" in the end when everything is done.
45
44
  ):
46
45
  is_termination_msg = (is_termination_msg if is_termination_msg is not None else
47
46
  (lambda x: x.get('content') == 'TERMINATE'))
48
- if openai_proxy:
49
- openai.proxy = {'https': openai_proxy, 'http': openai_proxy}
50
- else:
51
- openai.proxy = None
52
- if openai_api_base:
53
- openai.api_base = openai_api_base
54
- else:
55
- openai.api_base = os.environ.get('OPENAI_API_BASE', 'https://api.openai.com/v1')
56
-
57
47
  config_list = [
58
48
  {
59
49
  'model': model_name,
@@ -63,17 +53,24 @@ Reply "TERMINATE" in the end when everything is done.
63
53
  'api_version': api_version,
64
54
  },
65
55
  ]
66
- llm_config = {
67
- 'seed': 42, # change the seed for different trials
68
- 'temperature': temperature,
69
- 'config_list': config_list,
70
- 'request_timeout': 120,
71
- }
56
+ if openai_proxy:
57
+ config_list[0]['http_client'] = httpx.Client(proxies=openai_proxy)
58
+ config_list[0]['http_async_client'] = httpx.AsyncClient(proxies=openai_proxy)
59
+
60
+ if llm:
61
+ llm_config = llm
62
+
63
+ else:
64
+ llm_config = {
65
+ 'seed': 42, # change the seed for different trials
66
+ 'temperature': temperature,
67
+ 'config_list': config_list,
68
+ 'request_timeout': 120,
69
+ }
72
70
 
73
71
  super().__init__(
74
72
  name,
75
73
  llm_config=llm_config,
76
- llm=llm,
77
74
  system_message=system_message,
78
75
  is_termination_msg=is_termination_msg,
79
76
  max_consecutive_auto_reply=None,
@@ -19,7 +19,6 @@ class AutoGenCustomRole(ConversableAgent):
19
19
  human_input_mode='NEVER',
20
20
  code_execution_config=False,
21
21
  llm_config=False,
22
- llm=None,
23
22
  **kwargs)
24
23
  self.func = func
25
24
  self.coroutine = coroutine
@@ -1,9 +1,8 @@
1
1
  """Chain that runs an arbitrary python function."""
2
2
  import logging
3
- import os
4
3
  from typing import List, Optional
5
4
 
6
- import openai
5
+ import httpx
7
6
  from autogen import Agent, GroupChat, GroupChatManager
8
7
  from langchain.base_language import BaseLanguageModel
9
8
 
@@ -20,6 +19,7 @@ class AutoGenGroupChatManager(GroupChatManager):
20
19
  self,
21
20
  agents: List[Agent],
22
21
  max_round: int = 50,
22
+ llm: Optional[BaseLanguageModel] = None,
23
23
  model_name: Optional[str] = 'gpt-4-0613',
24
24
  openai_api_key: Optional[str] = '',
25
25
  openai_api_base: Optional[str] = '',
@@ -28,7 +28,6 @@ class AutoGenGroupChatManager(GroupChatManager):
28
28
  api_type: Optional[str] = None, # when llm_flag=True, need to set
29
29
  api_version: Optional[str] = None, # when llm_flag=True, need to set
30
30
  name: Optional[str] = 'chat_manager',
31
- llm: Optional[BaseLanguageModel] = None,
32
31
  system_message: Optional[str] = 'Group chat manager.',
33
32
  **kwargs,
34
33
  ):
@@ -37,15 +36,6 @@ class AutoGenGroupChatManager(GroupChatManager):
37
36
 
38
37
  groupchat = GroupChat(agents=agents, messages=[], max_round=max_round)
39
38
 
40
- if openai_proxy:
41
- openai.proxy = {'https': openai_proxy, 'http': openai_proxy}
42
- else:
43
- openai.proxy = None
44
- if openai_api_base:
45
- openai.api_base = openai_api_base
46
- else:
47
- openai.api_base = os.environ.get('OPENAI_API_BASE', 'https://api.openai.com/v1')
48
-
49
39
  config_list = [
50
40
  {
51
41
  'model': model_name,
@@ -55,17 +45,23 @@ class AutoGenGroupChatManager(GroupChatManager):
55
45
  'api_version': api_version,
56
46
  },
57
47
  ]
58
- llm_config = {
59
- 'seed': 42, # change the seed for different trials
60
- 'temperature': temperature,
61
- 'config_list': config_list,
62
- 'request_timeout': 120,
63
- }
48
+ if openai_proxy:
49
+ config_list[0]['http_client'] = httpx.Client(proxies=openai_proxy)
50
+ config_list[0]['http_async_client'] = httpx.AsyncClient(proxies=openai_proxy)
51
+
52
+ if llm:
53
+ llm_config = llm
54
+ else:
55
+ llm_config = {
56
+ 'seed': 42, # change the seed for different trials
57
+ 'temperature': temperature,
58
+ 'config_list': config_list,
59
+ 'request_timeout': 120,
60
+ }
64
61
 
65
62
  super().__init__(
66
63
  groupchat=groupchat,
67
64
  llm_config=llm_config,
68
- llm=llm,
69
65
  name=name,
70
66
  system_message=system_message,
71
67
  )
@@ -1,9 +1,8 @@
1
1
  """Chain that runs an arbitrary python function."""
2
2
  import logging
3
- import os
4
3
  from typing import Callable, Dict, Optional
5
4
 
6
- import openai
5
+ import httpx
7
6
  from autogen import UserProxyAgent
8
7
  from langchain.base_language import BaseLanguageModel
9
8
 
@@ -47,14 +46,6 @@ class AutoGenUserProxyAgent(UserProxyAgent):
47
46
  code_execution_config = False
48
47
 
49
48
  if llm_flag:
50
- if openai_proxy:
51
- openai.proxy = {'https': openai_proxy, 'http': openai_proxy}
52
- else:
53
- openai.proxy = None
54
- if openai_api_base:
55
- openai.api_base = openai_api_base
56
- else:
57
- openai.api_base = os.environ.get('OPENAI_API_BASE', 'https://api.openai.com/v1')
58
49
  config_list = [
59
50
  {
60
51
  'model': model_name,
@@ -64,12 +55,19 @@ class AutoGenUserProxyAgent(UserProxyAgent):
64
55
  'api_version': api_version,
65
56
  },
66
57
  ]
67
- llm_config = {
68
- 'seed': 42, # change the seed for different trials
69
- 'temperature': temperature,
70
- 'config_list': config_list,
71
- 'request_timeout': 120,
72
- }
58
+ if openai_proxy:
59
+ config_list[0]['http_client'] = httpx.Client(proxies=openai_proxy)
60
+ config_list[0]['http_async_client'] = httpx.AsyncClient(proxies=openai_proxy)
61
+
62
+ if llm:
63
+ llm_config = llm
64
+ else:
65
+ llm_config = {
66
+ 'seed': 42, # change the seed for different trials
67
+ 'temperature': temperature,
68
+ 'config_list': config_list,
69
+ 'request_timeout': 120,
70
+ }
73
71
  else:
74
72
  llm_config = False
75
73
 
@@ -80,7 +78,6 @@ class AutoGenUserProxyAgent(UserProxyAgent):
80
78
  function_map=function_map,
81
79
  code_execution_config=code_execution_config,
82
80
  llm_config=llm_config,
83
- llm=llm,
84
81
  system_message=system_message)
85
82
 
86
83
 
@@ -109,7 +106,6 @@ class AutoGenUser(UserProxyAgent):
109
106
  human_input_mode=human_input_mode,
110
107
  code_execution_config=code_execution_config,
111
108
  llm_config=llm_config,
112
- llm=None,
113
109
  system_message=system_message)
114
110
 
115
111
 
@@ -140,5 +136,4 @@ class AutoGenCoder(UserProxyAgent):
140
136
  function_map=function_map,
141
137
  code_execution_config=code_execution_config,
142
138
  llm_config=llm_config,
143
- llm=None,
144
139
  system_message=system_message)
@@ -49,10 +49,12 @@ class APIToolBase(BaseModel):
49
49
  request_timeout=timeout)
50
50
  return values
51
51
 
52
- def run(self, query: str) -> str:
52
+ def run(self, query: str, **kwargs) -> str:
53
53
  """Run query through api and parse result."""
54
54
  if query:
55
55
  self.params[self.input_key] = query
56
+ if kwargs:
57
+ self.params.update(kwargs)
56
58
  if self.params:
57
59
  param = '&'.join([f'{k}={v}' for k, v in self.params.items()])
58
60
  url = self.url + '?' + param if '?' not in self.url else self.url + '&' + param
@@ -64,10 +66,12 @@ class APIToolBase(BaseModel):
64
66
  logger.info('api_call_fail res={}', resp.text)
65
67
  return resp.text
66
68
 
67
- async def arun(self, query: str) -> str:
69
+ async def arun(self, query: str, **kwargs) -> str:
68
70
  """Run query through api and parse result."""
69
71
  if query:
70
72
  self.params[self.input_key] = query
73
+ if kwargs:
74
+ self.params.update(kwargs)
71
75
  if self.params:
72
76
  param = '&'.join([f'{k}={v}' for k, v in self.params.items()])
73
77
  url = self.url + '?' + param if '?' not in self.url else self.url + '&' + param
@@ -8,6 +8,7 @@ from datetime import datetime
8
8
  from typing import List, Type
9
9
 
10
10
  from langchain_core.pydantic_v1 import BaseModel, Field
11
+ from loguru import logger
11
12
 
12
13
  from .base import APIToolBase
13
14
 
@@ -42,13 +43,13 @@ class Stock(BaseModel):
42
43
  todayStart=float(todayStart),
43
44
  yesterdayEnd=float(yesterdayEnd),
44
45
  current=float(current),
46
+ highest=float(highest),
47
+ lowest=float(lowest),
45
48
  changeAmount=round(float(current) - float(yesterdayEnd), 3),
46
49
  changeRate=round((float(current) - float(yesterdayEnd)) / float(yesterdayEnd) * 100,
47
50
  3),
48
51
  vol=float(vol),
49
52
  turnover=float(turnover),
50
- highest=float(highest),
51
- lowest=float(lowest),
52
53
  buyPercent=0.0,
53
54
  )
54
55
 
@@ -64,16 +65,16 @@ class StockArg(BaseModel):
64
65
  stock_exchange: str = Field(
65
66
  description='交易所简写。股票上市的交易所,或者发布行情指数的交易所。可选项有"sh"(上海证券交易所)、" sz"( 深圳证券交易所)、"bj"( 北京证券交易所)',
66
67
  )
67
- stock_symbol: str = Field(
68
- description="""6位数字的股票或者指数代码。
68
+ stock_symbol: str = Field(description="""6位数字的股票或者指数代码。
69
69
  参考信息:
70
70
  - 如果问题中未给出,可能需要上网查询。
71
71
  - 上交所股票通常以 6 开头,深交所股票通常以 0、3 开头,北交所股票通常以 8 开头。
72
- - 上交所行情指数通常以 000 开头,深交所指数通常以 399 开头。同一个指数可能会同时在两个交易所发布,例如沪深 300 有"sh000300"和"sz399300"两个代码。"""
73
- )
72
+ - 上交所行情指数通常以 000 开头,深交所指数通常以 399 开头。同一个指数可能会同时在两个交易所发布,例如沪深 300 有"sh000300"和"sz399300"两个代码。""")
74
73
 
75
74
 
76
- stockPattern = re.compile(r'var hq_str_s[hz]\d{6}="([^,"]+),([^,"]+),([^,"]+),([^,"]+),[^"]+";')
75
+ stockPattern = re.compile(
76
+ r'var hq_str_s[hz]\d{6}="([^,"]+),([^,"]+),([^,"]+),([^,"]+),([^,"]+),([^,"]+),([^,"]+),([^,"]+),([^,"]+),([^,"]+),[^"]+";'
77
+ )
77
78
  kLinePattern = re.compile(r'var _s[hz]\d{6}_\d+_\d+=\((\[.*?\])\)')
78
79
 
79
80
 
@@ -87,7 +88,8 @@ class StockInfo(APIToolBase):
87
88
  for stockNumber in stocks:
88
89
  if len(stockNumber) == 8:
89
90
  # 8位长度的代码必须以sh或者sz开头,后面6位是数字
90
- if (stockNumber.startswith('sh') or stockNumber.startswith('sz')) and stockNumber[2:8].isdecimal():
91
+ if (stockNumber.startswith('sh')
92
+ or stockNumber.startswith('sz')) and stockNumber[2:8].isdecimal():
91
93
  stockList.append(stockNumber)
92
94
  elif len(stockNumber) == 6:
93
95
  # 6位长度的代码必须全是数字
@@ -117,8 +119,9 @@ class StockInfo(APIToolBase):
117
119
  stock = []
118
120
  if match:
119
121
  while match:
120
- stock.append(Stock(match.group(1), match.group(2), match.group(3), match.group(4)),
121
- match.group(5), match.group(6), match.group(9), match.group(10)))
122
+ stock.append(
123
+ Stock(match.group(1), match.group(2), match.group(3), match.group(4),
124
+ match.group(5), match.group(6), match.group(9), match.group(10)))
122
125
  match = stockPattern.search(content, match.end())
123
126
  else:
124
127
  stock = [content]
@@ -137,9 +140,11 @@ class StockInfo(APIToolBase):
137
140
  ts = int(datetime.timestamp(date_obj) * 1000)
138
141
  stock = f'{stock_number}_240_{ts}'
139
142
  count = datetime.today() - date_obj
140
- self.url = self.url.format(stockName=stock_number, stock=stock, count=count.days)
141
-
142
- k_data = super().run('')
143
+ url = self.url.format(stockName=stock_number, stock=stock, count=count.days)
144
+ resp = self.client.get(url)
145
+ if resp.status_code != 200:
146
+ logger.info('api_call_fail res={}', resp.text)
147
+ k_data = resp.text
143
148
  data_array = json.loads(kLinePattern.search(k_data).group(1))
144
149
  for item in data_array:
145
150
  if item.get('day') == date:
@@ -166,9 +171,8 @@ class StockInfo(APIToolBase):
166
171
  ts = int(datetime.timestamp(date_obj) * 1000)
167
172
  stock = f'{stock_number}_240_{ts}'
168
173
  count = datetime.today() - date_obj
169
- self.url = self.url.format(stockName=stock_number, stock=stock, count=count.days)
170
- k_data = await super().arun('')
171
-
174
+ url = self.url.format(stockName=stock_number, stock=stock, count=count.days)
175
+ k_data = await self.async_client.aget(url)
172
176
  data_array = json.loads(kLinePattern.search(k_data).group(1))
173
177
  for item in data_array:
174
178
  if item.get('day') == date:
@@ -199,16 +203,14 @@ class StockInfo(APIToolBase):
199
203
  header = {'Referer': 'http://finance.sina.com.cn'}
200
204
 
201
205
  class stockK(BaseModel):
202
- stock_symbol: str = Field(
203
- description="""6位数字的股票或者指数代码。
206
+ stock_symbol: str = Field(description="""6位数字的股票或者指数代码。
204
207
  参考信息:
205
208
  - 如果问题中未给出,可能需要上网查询。
206
209
  - 上交所股票通常以 6 开头,深交所股票通常以 0、3 开头,北交所股票通常以 8 开头。
207
- - 上交所行情指数通常以 000 开头,深交所指数通常以 399 开头。同一个指数可能会同时在两个交易所发布,例如沪深 300 有"sh000300"和"sz399300"两个代码。"""
208
- )
210
+ - 上交所行情指数通常以 000 开头,深交所指数通常以 399 开头。同一个指数可能会同时在两个交易所发布,例如沪深 300 有"sh000300"和"sz399300"两个代码。""")
209
211
  stock_exchange: str = Field(
210
- description='交易所简写。股票上市的交易所,或者发布行情指数的交易所。可选项有"sh"(上海证券交易所)、" sz"( 深圳证券交易所)、"bj"( 北京证券交易所)',
211
- )
212
+ description=
213
+ '交易所简写。股票上市的交易所,或者发布行情指数的交易所。可选项有"sh"(上海证券交易所)、" sz"( 深圳证券交易所)、"bj"( 北京证券交易所)', )
212
214
  date: str = Field(description='需要查询的时间,按照”2024-03-26“格式,传入日期')
213
215
 
214
216
  return cls(url=url, input_key=input_key, headers=header, args_schema=stockK)
@@ -114,29 +114,7 @@ class CompanyInfo(APIToolBase):
114
114
 
115
115
  @classmethod
116
116
  def all_companys_by_company(cls, api_key: str, pageSize: int = 20, pageNum: int = 1):
117
- """可以通过公司名称获取企业人员的所有相关公司,包括其担任法人、股东、董监高的公司信息"""
118
- url = 'http://open.api.tianyancha.com/services/v4/open/allCompanys'
119
- input_key = 'name'
120
- params = {}
121
- params['pageSize'] = pageSize
122
- params['pageNum'] = pageNum
123
-
124
- class InputArgs(BaseModel):
125
- """args_schema"""
126
- query: str = Field(description='company name to query')
127
-
128
- return cls(url=url,
129
- api_key=api_key,
130
- params=params,
131
- input_key=input_key,
132
- args_schema=InputArgs)
133
-
134
- @classmethod
135
- def all_companys_by_humanname(cls,
136
- api_key: str,
137
- pageSize: int = 20,
138
- pageNum: int = 1) -> CompanyInfo:
139
- """可以通过人名获取企业人员的所有相关公司,包括其担任法人、股东、董监高的公司信息"""
117
+ """可以通过公司名称和人名获取企业人员的所有相关公司,包括其担任法人、股东、董监高的公司信息"""
140
118
  url = 'http://open.api.tianyancha.com/services/v4/open/allCompanys'
141
119
  input_key = 'humanName'
142
120
  params = {}
@@ -145,7 +123,8 @@ class CompanyInfo(APIToolBase):
145
123
 
146
124
  class InputArgs(BaseModel):
147
125
  """args_schema"""
148
- query: str = Field(description='human name to query')
126
+ query: str = Field(description='human who you want to search')
127
+ name: str = Field(description='company name which human worked')
149
128
 
150
129
  return cls(url=url,
151
130
  api_key=api_key,
@@ -132,7 +132,7 @@ class ElasticKeywordsSearch(VectorStore, ABC):
132
132
  self.client.indices.delete(index=index_name)
133
133
  except elasticsearch.exceptions.NotFoundError:
134
134
  pass
135
-
135
+
136
136
  def add_texts(
137
137
  self,
138
138
  texts: Iterable[str],
@@ -195,6 +195,9 @@ class ElasticKeywordsSearch(VectorStore, ABC):
195
195
  query_strategy: str = 'match_phrase',
196
196
  must_or_should: str = 'should',
197
197
  **kwargs: Any) -> List[Document]:
198
+ if k == 0:
199
+ # pm need to control
200
+ return []
198
201
  docs_and_scores = self.similarity_search_with_score(query,
199
202
  k=k,
200
203
  query_strategy=query_strategy,
@@ -218,6 +221,9 @@ class ElasticKeywordsSearch(VectorStore, ABC):
218
221
  query_strategy: str = 'match_phrase',
219
222
  must_or_should: str = 'should',
220
223
  **kwargs: Any) -> List[Tuple[Document, float]]:
224
+ if k == 0:
225
+ # pm need to control
226
+ return []
221
227
  assert must_or_should in ['must', 'should'], 'only support must and should.'
222
228
  # llm or jiaba extract keywords
223
229
  if self.llm_chain:
@@ -288,10 +294,17 @@ class ElasticKeywordsSearch(VectorStore, ABC):
288
294
  index_name = index_name or uuid.uuid4().hex
289
295
  if llm:
290
296
  llm_chain = LLMChain(llm=llm, prompt=prompt)
291
- vectorsearch = cls(elasticsearch_url, index_name, llm_chain=llm_chain, drop_old=drop_old, **kwargs)
297
+ vectorsearch = cls(elasticsearch_url,
298
+ index_name,
299
+ llm_chain=llm_chain,
300
+ drop_old=drop_old,
301
+ **kwargs)
292
302
  else:
293
303
  vectorsearch = cls(elasticsearch_url, index_name, drop_old=drop_old, **kwargs)
294
- vectorsearch.add_texts(texts, metadatas=metadatas, ids=ids, refresh_indices=refresh_indices)
304
+ vectorsearch.add_texts(texts,
305
+ metadatas=metadatas,
306
+ ids=ids,
307
+ refresh_indices=refresh_indices)
295
308
 
296
309
  return vectorsearch
297
310
 
@@ -552,6 +552,9 @@ class Milvus(MilvusLangchain):
552
552
  Returns:
553
553
  List[Document]: Document results for search.
554
554
  """
555
+ if k == 0:
556
+ # pm need to control
557
+ return []
555
558
  if self.col is None:
556
559
  logger.debug('No existing collection to search.')
557
560
  return []
@@ -587,6 +590,9 @@ class Milvus(MilvusLangchain):
587
590
  Returns:
588
591
  List[Document]: Document results for search.
589
592
  """
593
+ if k == 0:
594
+ # pm need to control
595
+ return []
590
596
  if self.col is None:
591
597
  logger.debug('No existing collection to search.')
592
598
  return []
@@ -626,6 +632,9 @@ class Milvus(MilvusLangchain):
626
632
  Returns:
627
633
  List[float], List[Tuple[Document, any, any]]:
628
634
  """
635
+ if k == 0:
636
+ # pm need to control
637
+ return []
629
638
  if self.col is None:
630
639
  logger.debug('No existing collection to search.')
631
640
  return []
@@ -669,6 +678,9 @@ class Milvus(MilvusLangchain):
669
678
  Returns:
670
679
  List[Tuple[Document, float]]: Result doc and score.
671
680
  """
681
+ if k == 0:
682
+ # pm need to control
683
+ return []
672
684
  if self.col is None:
673
685
  logger.debug('No existing collection to search.')
674
686
  return []
@@ -741,6 +753,9 @@ class Milvus(MilvusLangchain):
741
753
  Returns:
742
754
  List[Document]: Document results for search.
743
755
  """
756
+ if k == 0:
757
+ # pm need to control
758
+ return []
744
759
  if self.col is None:
745
760
  logger.debug('No existing collection to search.')
746
761
  return []
@@ -790,6 +805,9 @@ class Milvus(MilvusLangchain):
790
805
  Returns:
791
806
  List[Document]: Document results for search.
792
807
  """
808
+ if k == 0:
809
+ # pm need to control
810
+ return []
793
811
  if self.col is None:
794
812
  logger.debug('No existing collection to search.')
795
813
  return []
@@ -908,7 +926,7 @@ class Milvus(MilvusLangchain):
908
926
 
909
927
  def _select_relevance_score_fn(self) -> Callable[[float], float]:
910
928
  return self._relevance_score_fn
911
-
929
+
912
930
  def query(self, expr: str, timeout: Optional[int] = None, **kwargs: Any) -> List[Document]:
913
931
  output_fields = self.fields[:]
914
932
  output_fields.remove(self._vector_field)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: bisheng-langchain
3
- Version: 0.3.0
3
+ Version: 0.3.0rc0
4
4
  Summary: bisheng langchain modules
5
5
  Home-page: https://github.com/dataelement/bisheng
6
6
  Author: DataElem
@@ -8,10 +8,10 @@ bisheng_langchain/agents/chatglm_functions_agent/prompt.py,sha256=OiBTRUOhvhSyO2
8
8
  bisheng_langchain/agents/llm_functions_agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
9
  bisheng_langchain/agents/llm_functions_agent/base.py,sha256=DbykNAk3vU2sfTPTSM2KotHygXgzAJSUmo4tA0h9ezc,12296
10
10
  bisheng_langchain/autogen_role/__init__.py,sha256=MnTGbAOK770JM9l95Qcxu93s2gNAmhlil7K9HdFG81o,430
11
- bisheng_langchain/autogen_role/assistant.py,sha256=VGCoxJaRxRG6ZIJa2TsxcLZbMbF4KC8PRB76DOuznNU,4736
12
- bisheng_langchain/autogen_role/custom.py,sha256=8xxtAzNF_N1fysyChynVD19t659Qvtcyj_LNiOrE7ew,2499
13
- bisheng_langchain/autogen_role/groupchat_manager.py,sha256=O9XIove5yzyF_g3K5DnF-Fasdx0sUrRWMogYgEDYJAI,2314
14
- bisheng_langchain/autogen_role/user.py,sha256=lISbJN5yFsUXHnDCUwr5t6R8O8K3dOMspH4l4_kITnE,5885
11
+ bisheng_langchain/autogen_role/assistant.py,sha256=rqUaD6fbW6d1jtzfrUQv5pJMKJgVGLagllz8LvzPCxY,4657
12
+ bisheng_langchain/autogen_role/custom.py,sha256=vAyEGxnmV9anyLL12v4ZB_A2VOPwdl-kjGP037I8jPw,2464
13
+ bisheng_langchain/autogen_role/groupchat_manager.py,sha256=AybsH3duoAFpo3bojOYVeSOE4iYkkbgmYIga6m2Jj_Y,2234
14
+ bisheng_langchain/autogen_role/user.py,sha256=fbaORhC7oQjxGhc2RYIWpELdIogFBsgqgQUhZsK6Osk,5715
15
15
  bisheng_langchain/chains/__init__.py,sha256=oxN2tUMt_kNxKd_FzCQ7x8xIwojtdCNNKo-DI7q0unM,759
16
16
  bisheng_langchain/chains/loader_output.py,sha256=02ZercAFaudStTZ4t7mcVkGRj5pD78HZ6NO8HbmbDH8,1903
17
17
  bisheng_langchain/chains/transform.py,sha256=G2fMqoMB62e03ES--aoVjEo06FzYWb87jCt3EOsiwwg,2805
@@ -81,11 +81,11 @@ bisheng_langchain/gpts/prompts/opening_dialog_prompt.py,sha256=U6SDslWuXAB1ZamLZ
81
81
  bisheng_langchain/gpts/prompts/select_tools_prompt.py,sha256=AyvVnrLEsQy7RHuGTPkcrMUxgA98Q0TzF-xweoc7GyY,1400
82
82
  bisheng_langchain/gpts/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
83
83
  bisheng_langchain/gpts/tools/api_tools/__init__.py,sha256=mrmTV5bT5R1mEx9hbMAWKzNAAC4EL6biNn53dx5lYsc,1593
84
- bisheng_langchain/gpts/tools/api_tools/base.py,sha256=TF5MW0e62YvcfABp_-U32ESMKvN9CXPFKqiCeaZ3xFk,3458
84
+ bisheng_langchain/gpts/tools/api_tools/base.py,sha256=t0gFRiXD-2InCHNDU_T1H0eSMXsWP_2sHBeLJ0bZOkc,3594
85
85
  bisheng_langchain/gpts/tools/api_tools/flow.py,sha256=u1_ASWlCcZarKR-293kACB_qQ1RzJuzPC3YZSl2JR-E,1814
86
86
  bisheng_langchain/gpts/tools/api_tools/macro_data.py,sha256=rlFNhjJ3HEHfWeW9Wqb27eeF1Q1Qmd2SA8VfgUK4ACs,19270
87
- bisheng_langchain/gpts/tools/api_tools/sina.py,sha256=DCDuG-gxyFO2LCPdT-oy358iyfTMyMTP0-6awXYEfpg,9277
88
- bisheng_langchain/gpts/tools/api_tools/tianyancha.py,sha256=sQbjPt8K0dLupFprWwc_z938DBB8nB7ydyIV5frWSJ0,7461
87
+ bisheng_langchain/gpts/tools/api_tools/sina.py,sha256=tY4MXHNBEUiBN1wld2un_w7nHW_njXV7haLo3sgpDf0,9502
88
+ bisheng_langchain/gpts/tools/api_tools/tianyancha.py,sha256=abDAz-yAH1-2rKiSmZ6TgnrNUnpgAZpDY8oDiWfWapc,6684
89
89
  bisheng_langchain/gpts/tools/bing_search/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
90
90
  bisheng_langchain/gpts/tools/bing_search/tool.py,sha256=v_VlqcMplITA5go5qWA4qZ5p43E1-1s0bzmyY7H0hqY,1710
91
91
  bisheng_langchain/gpts/tools/calculator/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -105,10 +105,10 @@ bisheng_langchain/retrievers/mix_es_vector.py,sha256=dSrrsuMPSgGiu181EOzACyIKiDX
105
105
  bisheng_langchain/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
106
106
  bisheng_langchain/utils/requests.py,sha256=vWGKyNTxApVeaVdKxqACfIT1Q8wMy-jC3kUv2Ce9Mzc,8688
107
107
  bisheng_langchain/vectorstores/__init__.py,sha256=zCZgDe7LyQ0iDkfcm5UJ5NxwKQSRHnqrsjx700Fy11M,213
108
- bisheng_langchain/vectorstores/elastic_keywords_search.py,sha256=ACUzgeTwzVOVrm0EqBXF_VhzwrWZJbKYQgqNSW5VhbQ,12929
109
- bisheng_langchain/vectorstores/milvus.py,sha256=hk1XqmWoz04lltubzRcZHEcXXFMkxMeK84hH0GZoo1c,35857
108
+ bisheng_langchain/vectorstores/elastic_keywords_search.py,sha256=JV_GM40cYx0PtPPvH2JYxtsMV0psSW2CDKagpR4M_0o,13286
109
+ bisheng_langchain/vectorstores/milvus.py,sha256=lrnezKnYXhyH5M1g3a-Mcwpj9mwzAj44TKmzyUXlQYY,36297
110
110
  bisheng_langchain/vectorstores/retriever.py,sha256=hj4nAAl352EV_ANnU2OHJn7omCH3nBK82ydo14KqMH4,4353
111
- bisheng_langchain-0.3.0.dist-info/METADATA,sha256=lMi-o-cJ2A6Knag8E11kUld2Tv_WLpD_f0pjXPqBQ7s,2411
112
- bisheng_langchain-0.3.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
113
- bisheng_langchain-0.3.0.dist-info/top_level.txt,sha256=Z6pPNyCo4ihyr9iqGQbH8sJiC4dAUwA_mAyGRQB5_Fs,18
114
- bisheng_langchain-0.3.0.dist-info/RECORD,,
111
+ bisheng_langchain-0.3.0rc0.dist-info/METADATA,sha256=-wUMNJfiSIK6i3D9pP74PdmDKuLtDcrW3BcNpNzqv1A,2414
112
+ bisheng_langchain-0.3.0rc0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
113
+ bisheng_langchain-0.3.0rc0.dist-info/top_level.txt,sha256=Z6pPNyCo4ihyr9iqGQbH8sJiC4dAUwA_mAyGRQB5_Fs,18
114
+ bisheng_langchain-0.3.0rc0.dist-info/RECORD,,