bisheng-langchain 0.2.3.2__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bisheng_langchain/agents/llm_functions_agent/base.py +1 -1
- bisheng_langchain/chains/__init__.py +2 -1
- bisheng_langchain/chains/transform.py +85 -0
- bisheng_langchain/chat_models/host_llm.py +19 -5
- bisheng_langchain/chat_models/qwen.py +29 -8
- bisheng_langchain/document_loaders/custom_kv.py +1 -1
- bisheng_langchain/embeddings/host_embedding.py +9 -11
- bisheng_langchain/gpts/__init__.py +0 -0
- bisheng_langchain/gpts/agent_types/__init__.py +10 -0
- bisheng_langchain/gpts/agent_types/llm_functions_agent.py +220 -0
- bisheng_langchain/gpts/assistant.py +137 -0
- bisheng_langchain/gpts/auto_optimization.py +130 -0
- bisheng_langchain/gpts/auto_tool_selected.py +54 -0
- bisheng_langchain/gpts/load_tools.py +161 -0
- bisheng_langchain/gpts/message_types.py +11 -0
- bisheng_langchain/gpts/prompts/__init__.py +15 -0
- bisheng_langchain/gpts/prompts/assistant_prompt_opt.py +95 -0
- bisheng_langchain/gpts/prompts/base_prompt.py +1 -0
- bisheng_langchain/gpts/prompts/breif_description_prompt.py +104 -0
- bisheng_langchain/gpts/prompts/opening_dialog_prompt.py +118 -0
- bisheng_langchain/gpts/prompts/select_tools_prompt.py +29 -0
- bisheng_langchain/gpts/tools/__init__.py +0 -0
- bisheng_langchain/gpts/tools/api_tools/__init__.py +50 -0
- bisheng_langchain/gpts/tools/api_tools/base.py +90 -0
- bisheng_langchain/gpts/tools/api_tools/flow.py +59 -0
- bisheng_langchain/gpts/tools/api_tools/macro_data.py +397 -0
- bisheng_langchain/gpts/tools/api_tools/sina.py +221 -0
- bisheng_langchain/gpts/tools/api_tools/tianyancha.py +160 -0
- bisheng_langchain/gpts/tools/bing_search/__init__.py +0 -0
- bisheng_langchain/gpts/tools/bing_search/tool.py +55 -0
- bisheng_langchain/gpts/tools/calculator/__init__.py +0 -0
- bisheng_langchain/gpts/tools/calculator/tool.py +25 -0
- bisheng_langchain/gpts/tools/code_interpreter/__init__.py +0 -0
- bisheng_langchain/gpts/tools/code_interpreter/tool.py +261 -0
- bisheng_langchain/gpts/tools/dalle_image_generator/__init__.py +0 -0
- bisheng_langchain/gpts/tools/dalle_image_generator/tool.py +181 -0
- bisheng_langchain/gpts/tools/get_current_time/__init__.py +0 -0
- bisheng_langchain/gpts/tools/get_current_time/tool.py +23 -0
- bisheng_langchain/gpts/utils.py +197 -0
- bisheng_langchain/utils/requests.py +5 -1
- bisheng_langchain/vectorstores/milvus.py +1 -1
- {bisheng_langchain-0.2.3.2.dist-info → bisheng_langchain-0.3.0.dist-info}/METADATA +5 -2
- {bisheng_langchain-0.2.3.2.dist-info → bisheng_langchain-0.3.0.dist-info}/RECORD +45 -12
- {bisheng_langchain-0.2.3.2.dist-info → bisheng_langchain-0.3.0.dist-info}/WHEEL +0 -0
- {bisheng_langchain-0.2.3.2.dist-info → bisheng_langchain-0.3.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,181 @@
|
|
1
|
+
import logging
|
2
|
+
import os
|
3
|
+
from typing import Any, Dict, Mapping, Optional, Tuple, Type, Union
|
4
|
+
|
5
|
+
from langchain.pydantic_v1 import BaseModel, Field
|
6
|
+
from langchain_community.utilities.dalle_image_generator import DallEAPIWrapper
|
7
|
+
from langchain_community.utils.openai import is_openai_v1
|
8
|
+
from langchain_core.callbacks import CallbackManagerForToolRun
|
9
|
+
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator
|
10
|
+
from langchain_core.tools import BaseTool
|
11
|
+
from langchain_core.utils import get_from_dict_or_env, get_pydantic_field_names
|
12
|
+
|
13
|
+
logger = logging.getLogger(__name__)
|
14
|
+
|
15
|
+
|
16
|
+
class DallEAPIWrapper(BaseModel):
|
17
|
+
"""Wrapper for OpenAI's DALL-E Image Generator.
|
18
|
+
|
19
|
+
https://platform.openai.com/docs/guides/images/generations?context=node
|
20
|
+
|
21
|
+
Usage instructions:
|
22
|
+
|
23
|
+
1. `pip install openai`
|
24
|
+
2. save your OPENAI_API_KEY in an environment variable
|
25
|
+
"""
|
26
|
+
|
27
|
+
client: Any #: :meta private:
|
28
|
+
async_client: Any = Field(default=None, exclude=True) #: :meta private:
|
29
|
+
model_name: str = Field(default="dall-e-2", alias="model")
|
30
|
+
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
|
31
|
+
openai_api_key: Optional[str] = Field(default=None, alias="api_key")
|
32
|
+
"""Automatically inferred from env var `OPENAI_API_KEY` if not provided."""
|
33
|
+
openai_api_base: Optional[str] = Field(default=None, alias="base_url")
|
34
|
+
"""Base URL path for API requests, leave blank if not using a proxy or service
|
35
|
+
emulator."""
|
36
|
+
openai_organization: Optional[str] = Field(default=None, alias="organization")
|
37
|
+
"""Automatically inferred from env var `OPENAI_ORG_ID` if not provided."""
|
38
|
+
# to support explicit proxy for OpenAI
|
39
|
+
openai_proxy: Optional[str] = None
|
40
|
+
request_timeout: Union[float, Tuple[float, float], Any, None] = Field(default=None, alias="timeout")
|
41
|
+
n: int = 1
|
42
|
+
"""Number of images to generate"""
|
43
|
+
size: str = "1024x1024"
|
44
|
+
"""Size of image to generate"""
|
45
|
+
separator: str = "\n"
|
46
|
+
"""Separator to use when multiple URLs are returned."""
|
47
|
+
quality: Optional[str] = "standard"
|
48
|
+
"""Quality of the image that will be generated"""
|
49
|
+
max_retries: int = 2
|
50
|
+
"""Maximum number of retries to make when generating."""
|
51
|
+
default_headers: Union[Mapping[str, str], None] = None
|
52
|
+
default_query: Union[Mapping[str, object], None] = None
|
53
|
+
# Configure a custom httpx client. See the
|
54
|
+
# [httpx documentation](https://www.python-httpx.org/api/#client) for more details.
|
55
|
+
http_client: Union[Any, None] = None
|
56
|
+
"""Optional httpx.Client."""
|
57
|
+
http_async_client: Union[Any, None] = None
|
58
|
+
"""Optional httpx.AsyncClient. Only used for async invocations. Must specify
|
59
|
+
http_client as well if you'd like a custom client for sync invocations."""
|
60
|
+
|
61
|
+
class Config:
|
62
|
+
"""Configuration for this pydantic object."""
|
63
|
+
|
64
|
+
extra = Extra.forbid
|
65
|
+
|
66
|
+
@root_validator(pre=True)
|
67
|
+
def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]:
|
68
|
+
"""Build extra kwargs from additional params that were passed in."""
|
69
|
+
all_required_field_names = get_pydantic_field_names(cls)
|
70
|
+
extra = values.get("model_kwargs", {})
|
71
|
+
for field_name in list(values):
|
72
|
+
if field_name in extra:
|
73
|
+
raise ValueError(f"Found {field_name} supplied twice.")
|
74
|
+
if field_name not in all_required_field_names:
|
75
|
+
logger.warning(
|
76
|
+
f"""WARNING! {field_name} is not default parameter.
|
77
|
+
{field_name} was transferred to model_kwargs.
|
78
|
+
Please confirm that {field_name} is what you intended."""
|
79
|
+
)
|
80
|
+
extra[field_name] = values.pop(field_name)
|
81
|
+
|
82
|
+
invalid_model_kwargs = all_required_field_names.intersection(extra.keys())
|
83
|
+
if invalid_model_kwargs:
|
84
|
+
raise ValueError(
|
85
|
+
f"Parameters {invalid_model_kwargs} should be specified explicitly. "
|
86
|
+
f"Instead they were passed in as part of `model_kwargs` parameter."
|
87
|
+
)
|
88
|
+
|
89
|
+
values["model_kwargs"] = extra
|
90
|
+
return values
|
91
|
+
|
92
|
+
@root_validator()
|
93
|
+
def validate_environment(cls, values: Dict) -> Dict:
|
94
|
+
"""Validate that api key and python package exists in environment."""
|
95
|
+
values["openai_api_key"] = get_from_dict_or_env(values, "openai_api_key", "OPENAI_API_KEY")
|
96
|
+
# Check OPENAI_ORGANIZATION for backwards compatibility.
|
97
|
+
values["openai_organization"] = (
|
98
|
+
values["openai_organization"] or os.getenv("OPENAI_ORG_ID") or os.getenv("OPENAI_ORGANIZATION") or None
|
99
|
+
)
|
100
|
+
values["openai_api_base"] = values["openai_api_base"] or os.getenv("OPENAI_API_BASE")
|
101
|
+
values["openai_proxy"] = get_from_dict_or_env(
|
102
|
+
values,
|
103
|
+
"openai_proxy",
|
104
|
+
"OPENAI_PROXY",
|
105
|
+
default="",
|
106
|
+
)
|
107
|
+
|
108
|
+
try:
|
109
|
+
import openai
|
110
|
+
|
111
|
+
except ImportError:
|
112
|
+
raise ImportError("Could not import openai python package. " "Please install it with `pip install openai`.")
|
113
|
+
|
114
|
+
if is_openai_v1():
|
115
|
+
client_params = {
|
116
|
+
"api_key": values["openai_api_key"],
|
117
|
+
"organization": values["openai_organization"],
|
118
|
+
"base_url": values["openai_api_base"],
|
119
|
+
"timeout": values["request_timeout"],
|
120
|
+
"max_retries": values["max_retries"],
|
121
|
+
"default_headers": values["default_headers"],
|
122
|
+
"default_query": values["default_query"],
|
123
|
+
# "http_client": values["http_client"],
|
124
|
+
}
|
125
|
+
|
126
|
+
if not values.get("client"):
|
127
|
+
sync_specific = {"http_client": values["http_client"]}
|
128
|
+
values["client"] = openai.OpenAI(
|
129
|
+
**client_params,
|
130
|
+
**sync_specific,
|
131
|
+
).images
|
132
|
+
if not values.get("async_client"):
|
133
|
+
async_specific = {"http_client": values["http_async_client"]}
|
134
|
+
values["async_client"] = openai.AsyncOpenAI(
|
135
|
+
**client_params,
|
136
|
+
**async_specific,
|
137
|
+
).images
|
138
|
+
elif not values.get("client"):
|
139
|
+
values["client"] = openai.Image
|
140
|
+
else:
|
141
|
+
pass
|
142
|
+
return values
|
143
|
+
|
144
|
+
def run(self, query: str) -> str:
|
145
|
+
"""Run query through OpenAI and parse result."""
|
146
|
+
if is_openai_v1():
|
147
|
+
response = self.client.generate(
|
148
|
+
prompt=query,
|
149
|
+
n=self.n,
|
150
|
+
size=self.size,
|
151
|
+
model=self.model_name,
|
152
|
+
quality=self.quality,
|
153
|
+
)
|
154
|
+
image_urls = self.separator.join([item.url for item in response.data])
|
155
|
+
else:
|
156
|
+
response = self.client.create(prompt=query, n=self.n, size=self.size, model=self.model_name)
|
157
|
+
image_urls = self.separator.join([item["url"] for item in response["data"]])
|
158
|
+
|
159
|
+
return image_urls if image_urls else "No image was generated"
|
160
|
+
|
161
|
+
|
162
|
+
class DallEInput(BaseModel):
|
163
|
+
query: str = Field(description="Description about image.")
|
164
|
+
|
165
|
+
|
166
|
+
class DallEImageGenerator(BaseTool):
|
167
|
+
|
168
|
+
name: str = "dalle_image_generator"
|
169
|
+
description: str = (
|
170
|
+
"A wrapper around OpenAI DALL-E API. Useful for when you need to generate images from a text description. Input should be an image description."
|
171
|
+
)
|
172
|
+
args_schema: Type[BaseModel] = DallEInput
|
173
|
+
api_wrapper: DallEAPIWrapper
|
174
|
+
|
175
|
+
def _run(
|
176
|
+
self,
|
177
|
+
query: str,
|
178
|
+
run_manager: Optional[CallbackManagerForToolRun] = None,
|
179
|
+
) -> str:
|
180
|
+
"""Use the tool."""
|
181
|
+
return self.api_wrapper.run(query)
|
File without changes
|
@@ -0,0 +1,23 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
|
3
|
+
import pytz
|
4
|
+
from langchain.pydantic_v1 import BaseModel, Field
|
5
|
+
from langchain.tools import tool
|
6
|
+
|
7
|
+
|
8
|
+
class GetCurTimeInput(BaseModel):
|
9
|
+
timezone: str = Field(
|
10
|
+
default='Asia/Shanghai',
|
11
|
+
description="The timezone to get the current time in. Such as 'Asia/Shanghai','Pacific/Palau' or 'US/Mountain'.",
|
12
|
+
)
|
13
|
+
|
14
|
+
|
15
|
+
@tool(args_schema=GetCurTimeInput)
|
16
|
+
def get_current_time(timezone='Asia/Shanghai'):
|
17
|
+
"""
|
18
|
+
获取当前UTC时间以及主要时区的时间,可用于时间、日期等场景相关的计算。当问题涉及到时间,调用此工具来查询和时间有关的内容。
|
19
|
+
"""
|
20
|
+
tz = pytz.timezone(timezone)
|
21
|
+
current_time = datetime.now(tz)
|
22
|
+
formatted_time = current_time.strftime("%A, %B %d, %Y %I:%M %p")
|
23
|
+
return formatted_time
|
@@ -0,0 +1,197 @@
|
|
1
|
+
# This module is used to import any langchain class by name.
|
2
|
+
|
3
|
+
import importlib
|
4
|
+
from typing import Any, Type
|
5
|
+
|
6
|
+
from langchain.agents import Agent
|
7
|
+
from langchain.base_language import BaseLanguageModel
|
8
|
+
from langchain.chains.base import Chain
|
9
|
+
from langchain.chat_models.base import BaseChatModel
|
10
|
+
from langchain.prompts import PromptTemplate
|
11
|
+
from langchain.tools import BaseTool
|
12
|
+
|
13
|
+
|
14
|
+
def import_module(module_path: str) -> Any:
|
15
|
+
"""Import module from module path"""
|
16
|
+
if 'from' not in module_path:
|
17
|
+
# Import the module using the module path
|
18
|
+
return importlib.import_module(module_path)
|
19
|
+
# Split the module path into its components
|
20
|
+
_, module_path, _, object_name = module_path.split()
|
21
|
+
|
22
|
+
# Import the module using the module path
|
23
|
+
module = importlib.import_module(module_path)
|
24
|
+
|
25
|
+
return getattr(module, object_name)
|
26
|
+
|
27
|
+
|
28
|
+
def import_class(class_path: str) -> Any:
|
29
|
+
"""Import class from class path"""
|
30
|
+
module_path, class_name = class_path.rsplit('.', 1)
|
31
|
+
module = import_module(module_path)
|
32
|
+
return getattr(module, class_name)
|
33
|
+
|
34
|
+
|
35
|
+
def import_by_type(_type: str, name: str) -> Any:
|
36
|
+
from bisheng_langchain import chat_models
|
37
|
+
"""Import class by type and name"""
|
38
|
+
if _type is None:
|
39
|
+
raise ValueError(f'Type cannot be None. Check if {name} is in the config file.')
|
40
|
+
func_dict = {
|
41
|
+
'agents': import_agent,
|
42
|
+
'prompts': import_prompt,
|
43
|
+
'llms': {
|
44
|
+
'llm': import_llm,
|
45
|
+
'chat': import_chat_llm,
|
46
|
+
'contribute': import_chain_contribute_llm,
|
47
|
+
'chatopenai': import_chat_openai,
|
48
|
+
},
|
49
|
+
'tools': import_tool,
|
50
|
+
'chains': import_chain,
|
51
|
+
'toolkits': import_toolkit,
|
52
|
+
'memory': import_memory,
|
53
|
+
'embeddings': import_embedding,
|
54
|
+
'vectorstores': import_vectorstore,
|
55
|
+
'documentloaders': import_documentloader,
|
56
|
+
'textsplitters': import_textsplitter,
|
57
|
+
'utilities': import_utility,
|
58
|
+
'output_parsers': import_output_parser,
|
59
|
+
'retrievers': import_retriever,
|
60
|
+
'autogenRoles': import_autogenRoles,
|
61
|
+
'inputOutput': import_inputoutput,
|
62
|
+
}
|
63
|
+
if _type == 'llms':
|
64
|
+
if name.lower() == 'chatopenai':
|
65
|
+
key = 'chatopenai'
|
66
|
+
else:
|
67
|
+
key = 'contribute' if name in chat_models.__all__ else 'chat' if 'chat' in name.lower(
|
68
|
+
) else 'llm'
|
69
|
+
loaded_func = func_dict[_type][key] # type: ignore
|
70
|
+
else:
|
71
|
+
loaded_func = func_dict[_type]
|
72
|
+
|
73
|
+
return loaded_func(name)
|
74
|
+
|
75
|
+
|
76
|
+
def import_inputoutput(input_output: str) -> Any:
|
77
|
+
"""Import output parser from output parser name"""
|
78
|
+
return import_module(f'from bisheng_langchain.input_output import {input_output}')
|
79
|
+
|
80
|
+
|
81
|
+
def import_output_parser(output_parser: str) -> Any:
|
82
|
+
"""Import output parser from output parser name"""
|
83
|
+
return import_module(f'from langchain.output_parsers import {output_parser}')
|
84
|
+
|
85
|
+
|
86
|
+
def import_chat_llm(llm: str) -> BaseChatModel:
|
87
|
+
"""Import chat llm from llm name"""
|
88
|
+
return import_class(f'langchain.chat_models.{llm}')
|
89
|
+
|
90
|
+
|
91
|
+
def import_chain_contribute_llm(llm: str) -> BaseChatModel:
|
92
|
+
"""Import chat llm from llm name"""
|
93
|
+
return import_class(f'bisheng_langchain.chat_models.{llm}')
|
94
|
+
|
95
|
+
|
96
|
+
def import_retriever(retriever: str) -> Any:
|
97
|
+
"""Import retriever from retriever name"""
|
98
|
+
return import_module(f'from langchain.retrievers import {retriever}')
|
99
|
+
|
100
|
+
|
101
|
+
def import_autogenRoles(autogen: str) -> Any:
|
102
|
+
return import_module(f'from bisheng_langchain.autogen_role import {autogen}')
|
103
|
+
|
104
|
+
|
105
|
+
def import_memory(memory: str) -> Any:
|
106
|
+
"""Import memory from memory name"""
|
107
|
+
return import_module(f'from langchain.memory import {memory}')
|
108
|
+
|
109
|
+
|
110
|
+
def import_prompt(prompt: str) -> Type[PromptTemplate]:
|
111
|
+
"""Import prompt from prompt name"""
|
112
|
+
if prompt == 'ZeroShotPrompt':
|
113
|
+
return import_class('langchain.prompts.PromptTemplate')
|
114
|
+
return import_class(f'langchain.prompts.{prompt}')
|
115
|
+
|
116
|
+
|
117
|
+
def import_toolkit(toolkit: str) -> Any:
|
118
|
+
"""Import toolkit from toolkit name"""
|
119
|
+
from bisheng.interface.toolkits.base import toolkits_creator
|
120
|
+
return toolkits_creator[toolkit]
|
121
|
+
|
122
|
+
|
123
|
+
def import_agent(agent: str) -> Agent:
|
124
|
+
"""Import agent from agent name"""
|
125
|
+
# check for custom agent
|
126
|
+
from bisheng_langchain import agents
|
127
|
+
|
128
|
+
if agent in agents.__all__:
|
129
|
+
return import_class(f'bisheng_langchain.agents.{agent}')
|
130
|
+
return import_class(f'langchain.agents.{agent}')
|
131
|
+
|
132
|
+
|
133
|
+
def import_llm(llm: str) -> BaseLanguageModel:
|
134
|
+
"""Import llm from llm name"""
|
135
|
+
return import_class(f'langchain.llms.{llm}')
|
136
|
+
|
137
|
+
|
138
|
+
def import_chat_openai(llm: str) -> BaseLanguageModel:
|
139
|
+
"""Import llm from llm name"""
|
140
|
+
return import_class(f'langchain_openai.{llm}')
|
141
|
+
|
142
|
+
|
143
|
+
def import_tool(tool: str) -> BaseTool:
|
144
|
+
"""Import tool from tool name"""
|
145
|
+
return import_class(f'langchain.tools.{tool}')
|
146
|
+
|
147
|
+
|
148
|
+
def import_chain(chain: str) -> Type[Chain]:
|
149
|
+
"""Import chain from chain name"""
|
150
|
+
from bisheng_langchain import chains
|
151
|
+
|
152
|
+
if chain in chains.__all__:
|
153
|
+
return import_class(f'bisheng_langchain.chains.{chain}')
|
154
|
+
return import_class(f'langchain.chains.{chain}')
|
155
|
+
|
156
|
+
|
157
|
+
def import_embedding(embedding: str) -> Any:
|
158
|
+
"""Import embedding from embedding name"""
|
159
|
+
from bisheng_langchain import embeddings
|
160
|
+
|
161
|
+
if embedding in embeddings.__all__:
|
162
|
+
return import_class(f'bisheng_langchain.embeddings.{embedding}')
|
163
|
+
return import_class(f'langchain.embeddings.{embedding}')
|
164
|
+
|
165
|
+
|
166
|
+
def import_vectorstore(vectorstore: str) -> Any:
|
167
|
+
"""Import vectorstore from vectorstore name"""
|
168
|
+
from bisheng_langchain import vectorstores
|
169
|
+
|
170
|
+
if vectorstore in vectorstores.__all__:
|
171
|
+
return import_class(f'bisheng_langchain.vectorstores.{vectorstore}')
|
172
|
+
return import_class(f'langchain.vectorstores.{vectorstore}')
|
173
|
+
|
174
|
+
|
175
|
+
def import_documentloader(documentloader: str) -> Any:
|
176
|
+
"""Import documentloader from documentloader name"""
|
177
|
+
from bisheng_langchain import document_loaders
|
178
|
+
|
179
|
+
if documentloader in document_loaders.__all__:
|
180
|
+
return import_class(f'bisheng_langchain.document_loaders.{documentloader}')
|
181
|
+
return import_class(f'langchain.document_loaders.{documentloader}')
|
182
|
+
|
183
|
+
|
184
|
+
def import_textsplitter(textsplitter: str) -> Any:
|
185
|
+
"""Import textsplitter from textsplitter name"""
|
186
|
+
from bisheng_langchain import text_splitter
|
187
|
+
|
188
|
+
if textsplitter in dir(text_splitter):
|
189
|
+
return import_class(f'bisheng_langchain.text_splitter.{textsplitter}')
|
190
|
+
return import_class(f'langchain.text_splitter.{textsplitter}')
|
191
|
+
|
192
|
+
|
193
|
+
def import_utility(utility: str) -> Any:
|
194
|
+
"""Import utility from utility name"""
|
195
|
+
if utility == 'SQLDatabase':
|
196
|
+
return import_class(f'langchain.sql_database.{utility}')
|
197
|
+
return import_class(f'langchain.utilities.{utility}')
|
@@ -138,6 +138,7 @@ class TextRequestsWrapper(BaseModel):
|
|
138
138
|
headers: Optional[Dict[str, str]] = None
|
139
139
|
aiosession: Optional[aiohttp.ClientSession] = None
|
140
140
|
auth: Optional[Any] = None
|
141
|
+
request_timeout: Union[float, Tuple[float, float]] = 120
|
141
142
|
|
142
143
|
class Config:
|
143
144
|
"""Configuration for this pydantic object."""
|
@@ -147,7 +148,10 @@ class TextRequestsWrapper(BaseModel):
|
|
147
148
|
|
148
149
|
@property
|
149
150
|
def requests(self) -> Requests:
|
150
|
-
return Requests(headers=self.headers,
|
151
|
+
return Requests(headers=self.headers,
|
152
|
+
aiosession=self.aiosession,
|
153
|
+
auth=self.auth,
|
154
|
+
request_timeout=self.request_timeout)
|
151
155
|
|
152
156
|
def get(self, url: str, **kwargs: Any) -> str:
|
153
157
|
"""GET the URL and return the text."""
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: bisheng-langchain
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.3.0
|
4
4
|
Summary: bisheng langchain modules
|
5
5
|
Home-page: https://github.com/dataelement/bisheng
|
6
6
|
Author: DataElem
|
@@ -16,7 +16,7 @@ Classifier: License :: OSI Approved :: Apache Software License
|
|
16
16
|
Classifier: Operating System :: OS Independent
|
17
17
|
Requires-Python: >=3.6
|
18
18
|
Description-Content-Type: text/markdown
|
19
|
-
Requires-Dist: langchain
|
19
|
+
Requires-Dist: langchain ==0.1.12
|
20
20
|
Requires-Dist: zhipuai
|
21
21
|
Requires-Dist: websocket-client
|
22
22
|
Requires-Dist: elasticsearch
|
@@ -28,6 +28,9 @@ Requires-Dist: pydantic ==1.10.13
|
|
28
28
|
Requires-Dist: pymupdf ==1.23.8
|
29
29
|
Requires-Dist: shapely ==2.0.2
|
30
30
|
Requires-Dist: filetype ==1.2.0
|
31
|
+
Requires-Dist: langgraph ==0.0.30
|
32
|
+
Requires-Dist: openai ==1.14.3
|
33
|
+
Requires-Dist: langchain-openai ==0.1.0
|
31
34
|
|
32
35
|
## What is bisheng-langchain?
|
33
36
|
|
@@ -6,14 +6,15 @@ bisheng_langchain/agents/chatglm_functions_agent/base.py,sha256=tyytq0XIFXpfxDP0
|
|
6
6
|
bisheng_langchain/agents/chatglm_functions_agent/output_parser.py,sha256=M7vDzQFqFUMmL250FHeNKXMwatkCdD0x1D0hyqGYRAA,3497
|
7
7
|
bisheng_langchain/agents/chatglm_functions_agent/prompt.py,sha256=OiBTRUOhvhSyO2jO2ByUUiaCrkK_tIUH9pMWWKs-aF4,992
|
8
8
|
bisheng_langchain/agents/llm_functions_agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
9
|
-
bisheng_langchain/agents/llm_functions_agent/base.py,sha256=
|
9
|
+
bisheng_langchain/agents/llm_functions_agent/base.py,sha256=DbykNAk3vU2sfTPTSM2KotHygXgzAJSUmo4tA0h9ezc,12296
|
10
10
|
bisheng_langchain/autogen_role/__init__.py,sha256=MnTGbAOK770JM9l95Qcxu93s2gNAmhlil7K9HdFG81o,430
|
11
11
|
bisheng_langchain/autogen_role/assistant.py,sha256=VGCoxJaRxRG6ZIJa2TsxcLZbMbF4KC8PRB76DOuznNU,4736
|
12
12
|
bisheng_langchain/autogen_role/custom.py,sha256=8xxtAzNF_N1fysyChynVD19t659Qvtcyj_LNiOrE7ew,2499
|
13
13
|
bisheng_langchain/autogen_role/groupchat_manager.py,sha256=O9XIove5yzyF_g3K5DnF-Fasdx0sUrRWMogYgEDYJAI,2314
|
14
14
|
bisheng_langchain/autogen_role/user.py,sha256=lISbJN5yFsUXHnDCUwr5t6R8O8K3dOMspH4l4_kITnE,5885
|
15
|
-
bisheng_langchain/chains/__init__.py,sha256=
|
15
|
+
bisheng_langchain/chains/__init__.py,sha256=oxN2tUMt_kNxKd_FzCQ7x8xIwojtdCNNKo-DI7q0unM,759
|
16
16
|
bisheng_langchain/chains/loader_output.py,sha256=02ZercAFaudStTZ4t7mcVkGRj5pD78HZ6NO8HbmbDH8,1903
|
17
|
+
bisheng_langchain/chains/transform.py,sha256=G2fMqoMB62e03ES--aoVjEo06FzYWb87jCt3EOsiwwg,2805
|
17
18
|
bisheng_langchain/chains/autogen/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
18
19
|
bisheng_langchain/chains/autogen/auto_gen.py,sha256=QIkfCO9-VN2wRkl3_TWVj-JkdL2dqMQNy93j3uB401s,3270
|
19
20
|
bisheng_langchain/chains/combine_documents/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -27,10 +28,10 @@ bisheng_langchain/chains/router/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm
|
|
27
28
|
bisheng_langchain/chains/router/multi_rule.py,sha256=BiFryj3-7rOxfttD-MyOkKWLCSGB9LVYd2rjOsIfQC8,375
|
28
29
|
bisheng_langchain/chains/router/rule_router.py,sha256=R2YRUnwn7s_7DbsSn27uPn4cIV0D-5iXEORXir0tNGM,1835
|
29
30
|
bisheng_langchain/chat_models/__init__.py,sha256=4-HTLE_SXO4hmNJu6yQxiQKBt2IFca_ezllVBLmvbEE,635
|
30
|
-
bisheng_langchain/chat_models/host_llm.py,sha256=
|
31
|
+
bisheng_langchain/chat_models/host_llm.py,sha256=sBu_Vg-r7z6IJUV8Etwll4JTG5OvET-IXH7PZw8Ijrc,23152
|
31
32
|
bisheng_langchain/chat_models/minimax.py,sha256=JLs_f6vWD9beZYUtjD4FG28G8tZHrGUAWOwdLIuJomw,13901
|
32
33
|
bisheng_langchain/chat_models/proxy_llm.py,sha256=wzVBZik9WC3-f7kyQ1eu3Ooibqpcocln08knf5lV1Nw,17082
|
33
|
-
bisheng_langchain/chat_models/qwen.py,sha256=
|
34
|
+
bisheng_langchain/chat_models/qwen.py,sha256=W73KxDRQBUZEzttEM4K7ZzPqbN-82O6YQmpX-HB_wZU,19971
|
34
35
|
bisheng_langchain/chat_models/sensetime.py,sha256=fuQ5yYGO5F7o7iQ7us17MlL4TAWRRFCCpNN9bAF-ydc,17056
|
35
36
|
bisheng_langchain/chat_models/wenxin.py,sha256=OBXmFWkUWZMu1lUz6hPAEawsbAcdgMWcm9WkJJLZyng,13671
|
36
37
|
bisheng_langchain/chat_models/xunfeiai.py,sha256=Yz09-I8u6XhGVnT5mdel15Z3CCQZqApJkgnaxyiZNFk,14037
|
@@ -44,7 +45,7 @@ bisheng_langchain/chat_models/interface/wenxin.py,sha256=z_K1Nj78dDYYgiVIzc5sGkO
|
|
44
45
|
bisheng_langchain/chat_models/interface/xunfei.py,sha256=DPHAZM_uHg0A8GnebgkRbLENhBW7bBtRHzKC0gFKZgc,7514
|
45
46
|
bisheng_langchain/chat_models/interface/zhipuai.py,sha256=67Ej6DRk-IlXUfEZPg-pjcYPyqZb32ClrBP-9k-3EEs,2636
|
46
47
|
bisheng_langchain/document_loaders/__init__.py,sha256=LuQ-zMYxde2FeiEcvVtjQqnHozki5pF_pDDa88_fBdg,366
|
47
|
-
bisheng_langchain/document_loaders/custom_kv.py,sha256=
|
48
|
+
bisheng_langchain/document_loaders/custom_kv.py,sha256=xWUPhcr1hjbdya4zgEHG4Fl0sI4RNQ6D2vqFo0c24G8,6656
|
48
49
|
bisheng_langchain/document_loaders/elem_html.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
49
50
|
bisheng_langchain/document_loaders/elem_image.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
50
51
|
bisheng_langchain/document_loaders/elem_pdf.py,sha256=K-TXILGNFLFjavhun_MFbUF4t2_WGA3Z-kbnr75lmW8,22243
|
@@ -56,13 +57,45 @@ bisheng_langchain/document_loaders/parsers/image.py,sha256=7Vx4dD_WiSTojS4TMIJFx
|
|
56
57
|
bisheng_langchain/document_loaders/parsers/ocr_client.py,sha256=rRh1coJYn24n7FaINBZH5yO6Edm9TRywY6UOXpcerVo,1612
|
57
58
|
bisheng_langchain/document_loaders/parsers/test_image.py,sha256=EJHozq5oFfLBlLL5Lr6XFkrkvSttPpohprs9OjDzAKM,8685
|
58
59
|
bisheng_langchain/embeddings/__init__.py,sha256=_zLLb9cH4Ct4UpKQhtXr7V2IQ7LUnlCKkKTroTE_Enk,534
|
59
|
-
bisheng_langchain/embeddings/host_embedding.py,sha256=
|
60
|
+
bisheng_langchain/embeddings/host_embedding.py,sha256=eWhV6JZCclSr6jZXWbjhHURiWaUB8sC8CbgGCEa-AKk,6295
|
60
61
|
bisheng_langchain/embeddings/huggingfacegte.py,sha256=RPfSXu7oMv6vgIjLqrPZ1Qz3K0yEuYn7VO0u7m7PzK8,3192
|
61
62
|
bisheng_langchain/embeddings/huggingfacemultilingual.py,sha256=g7-yKJ-qIPUZQaRnGz312S-f3aJCGcdHemAR3znE-uo,3415
|
62
63
|
bisheng_langchain/embeddings/wenxin.py,sha256=6zx53tSUguvny4gGe5CTmfwV-QtGqKmcT-Jlgf2xVUs,4737
|
63
64
|
bisheng_langchain/embeddings/interface/__init__.py,sha256=GNY3tibpRxpAdAfSvQmXBKo0xKSLke_9y4clofi_WOE,98
|
64
65
|
bisheng_langchain/embeddings/interface/types.py,sha256=VdurbtsnjCPdlOjPFcK2Mg6r9bJYYHb3tepvkk-y3nM,461
|
65
66
|
bisheng_langchain/embeddings/interface/wenxin.py,sha256=5d9gI4enmfkD80s0FHKiDt33O0mwM8Xc5WTubnMUy8c,3104
|
67
|
+
bisheng_langchain/gpts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
68
|
+
bisheng_langchain/gpts/assistant.py,sha256=KCYPU1Bs4GtWcLk9Ya2NuQyXE0Twn7-92eSBTIzpq7I,5083
|
69
|
+
bisheng_langchain/gpts/auto_optimization.py,sha256=Vf3zzYEpVf916dYt4RV9E1uw4vTXjE7ZXogUIdxjHYU,3786
|
70
|
+
bisheng_langchain/gpts/auto_tool_selected.py,sha256=25lFLadqQ36t63EKMEF3zJOG_jkoRB9IfP5eRkY1JZo,1777
|
71
|
+
bisheng_langchain/gpts/load_tools.py,sha256=C7tlRLy4wAArr9qtkRl9dW6QXdspLLbcv0UvulW9A9U,6345
|
72
|
+
bisheng_langchain/gpts/message_types.py,sha256=7EJOx62j9E1U67jxWgxE_I7a8IjAvvKANknXkD2gFm0,213
|
73
|
+
bisheng_langchain/gpts/utils.py,sha256=t3YDxaJ0OYd6EKsek7PJFRYnsezwzEFK5oVU-PRbu5g,6671
|
74
|
+
bisheng_langchain/gpts/agent_types/__init__.py,sha256=bg0zlTYGfNXoSBqcICHlzNpVQbejMYeyji_dzvP5qQ0,261
|
75
|
+
bisheng_langchain/gpts/agent_types/llm_functions_agent.py,sha256=ynFHXuaqABeiKvhcetOFQPyQMlNtEAdtTccoIwiJbGQ,8419
|
76
|
+
bisheng_langchain/gpts/prompts/__init__.py,sha256=IfuoxVpsSLKJtDx0aJbRgnSZYZr_kDPsL92CvefzF-s,568
|
77
|
+
bisheng_langchain/gpts/prompts/assistant_prompt_opt.py,sha256=TZsRK4XPMrUhGg0PoMyiE3wE-aG34UmlVflkCl_c0QI,4151
|
78
|
+
bisheng_langchain/gpts/prompts/base_prompt.py,sha256=v2eO0c6RF8e6MtGdleHs5B4YTkikg6IZUuBvL2zvyOI,55
|
79
|
+
bisheng_langchain/gpts/prompts/breif_description_prompt.py,sha256=w4A5et0jB-GkxEMQBp4i6GKX3RkVeu7NzWEjOZZAicM,5336
|
80
|
+
bisheng_langchain/gpts/prompts/opening_dialog_prompt.py,sha256=U6SDslWuXAB1ZamLZVujpEjAY8L244IZfD2qFVRTzPM,5962
|
81
|
+
bisheng_langchain/gpts/prompts/select_tools_prompt.py,sha256=AyvVnrLEsQy7RHuGTPkcrMUxgA98Q0TzF-xweoc7GyY,1400
|
82
|
+
bisheng_langchain/gpts/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
83
|
+
bisheng_langchain/gpts/tools/api_tools/__init__.py,sha256=mrmTV5bT5R1mEx9hbMAWKzNAAC4EL6biNn53dx5lYsc,1593
|
84
|
+
bisheng_langchain/gpts/tools/api_tools/base.py,sha256=TF5MW0e62YvcfABp_-U32ESMKvN9CXPFKqiCeaZ3xFk,3458
|
85
|
+
bisheng_langchain/gpts/tools/api_tools/flow.py,sha256=u1_ASWlCcZarKR-293kACB_qQ1RzJuzPC3YZSl2JR-E,1814
|
86
|
+
bisheng_langchain/gpts/tools/api_tools/macro_data.py,sha256=rlFNhjJ3HEHfWeW9Wqb27eeF1Q1Qmd2SA8VfgUK4ACs,19270
|
87
|
+
bisheng_langchain/gpts/tools/api_tools/sina.py,sha256=DCDuG-gxyFO2LCPdT-oy358iyfTMyMTP0-6awXYEfpg,9277
|
88
|
+
bisheng_langchain/gpts/tools/api_tools/tianyancha.py,sha256=sQbjPt8K0dLupFprWwc_z938DBB8nB7ydyIV5frWSJ0,7461
|
89
|
+
bisheng_langchain/gpts/tools/bing_search/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
90
|
+
bisheng_langchain/gpts/tools/bing_search/tool.py,sha256=v_VlqcMplITA5go5qWA4qZ5p43E1-1s0bzmyY7H0hqY,1710
|
91
|
+
bisheng_langchain/gpts/tools/calculator/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
92
|
+
bisheng_langchain/gpts/tools/calculator/tool.py,sha256=iwGPE7jvxZg_jUL2Aq9HHwnRJrF9-ongwrsBX6uk1U0,705
|
93
|
+
bisheng_langchain/gpts/tools/code_interpreter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
94
|
+
bisheng_langchain/gpts/tools/code_interpreter/tool.py,sha256=PGipxd-qtW31GonRGfGow7nylI-osSnmBsvEJDlMUCE,8717
|
95
|
+
bisheng_langchain/gpts/tools/dalle_image_generator/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
96
|
+
bisheng_langchain/gpts/tools/dalle_image_generator/tool.py,sha256=mhxdNNhBESjbOy30Rnp6hQhnrV4evQpv-B1fFXcU-68,7528
|
97
|
+
bisheng_langchain/gpts/tools/get_current_time/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
98
|
+
bisheng_langchain/gpts/tools/get_current_time/tool.py,sha256=3uvk7Yu07qhZy1sBrFMhGEwyxEGMB8vubizs9x-6DG8,801
|
66
99
|
bisheng_langchain/input_output/__init__.py,sha256=sW_GB7MlrHYsqY1Meb_LeimQqNsMz1gH-00Tqb2BUyM,153
|
67
100
|
bisheng_langchain/input_output/input.py,sha256=I5YDmgbvvj1o2lO9wi8LE37wM0wP5jkhUREU32YrZMQ,1094
|
68
101
|
bisheng_langchain/input_output/output.py,sha256=6U-az6-Cwz665C2YmcH3SYctWVjPFjmW8s70CA_qphk,11585
|
@@ -70,12 +103,12 @@ bisheng_langchain/retrievers/__init__.py,sha256=XqBeNyPyNCJf-SzNBiFlkxtjrtHUFTTi
|
|
70
103
|
bisheng_langchain/retrievers/ensemble.py,sha256=nLsTKpJmaigrECCWzrvDUwhE-qs9Mg7gPRXfPo5qFMI,5942
|
71
104
|
bisheng_langchain/retrievers/mix_es_vector.py,sha256=dSrrsuMPSgGiu181EOzACyIKiDXR0qNBQz_914USD3E,4465
|
72
105
|
bisheng_langchain/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
73
|
-
bisheng_langchain/utils/requests.py,sha256=
|
106
|
+
bisheng_langchain/utils/requests.py,sha256=vWGKyNTxApVeaVdKxqACfIT1Q8wMy-jC3kUv2Ce9Mzc,8688
|
74
107
|
bisheng_langchain/vectorstores/__init__.py,sha256=zCZgDe7LyQ0iDkfcm5UJ5NxwKQSRHnqrsjx700Fy11M,213
|
75
108
|
bisheng_langchain/vectorstores/elastic_keywords_search.py,sha256=ACUzgeTwzVOVrm0EqBXF_VhzwrWZJbKYQgqNSW5VhbQ,12929
|
76
|
-
bisheng_langchain/vectorstores/milvus.py,sha256
|
109
|
+
bisheng_langchain/vectorstores/milvus.py,sha256=hk1XqmWoz04lltubzRcZHEcXXFMkxMeK84hH0GZoo1c,35857
|
77
110
|
bisheng_langchain/vectorstores/retriever.py,sha256=hj4nAAl352EV_ANnU2OHJn7omCH3nBK82ydo14KqMH4,4353
|
78
|
-
bisheng_langchain-0.
|
79
|
-
bisheng_langchain-0.
|
80
|
-
bisheng_langchain-0.
|
81
|
-
bisheng_langchain-0.
|
111
|
+
bisheng_langchain-0.3.0.dist-info/METADATA,sha256=lMi-o-cJ2A6Knag8E11kUld2Tv_WLpD_f0pjXPqBQ7s,2411
|
112
|
+
bisheng_langchain-0.3.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
113
|
+
bisheng_langchain-0.3.0.dist-info/top_level.txt,sha256=Z6pPNyCo4ihyr9iqGQbH8sJiC4dAUwA_mAyGRQB5_Fs,18
|
114
|
+
bisheng_langchain-0.3.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|