dhisana 0.0.1.dev243__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dhisana/__init__.py +1 -0
- dhisana/cli/__init__.py +1 -0
- dhisana/cli/cli.py +20 -0
- dhisana/cli/datasets.py +27 -0
- dhisana/cli/models.py +26 -0
- dhisana/cli/predictions.py +20 -0
- dhisana/schemas/__init__.py +1 -0
- dhisana/schemas/common.py +399 -0
- dhisana/schemas/sales.py +965 -0
- dhisana/ui/__init__.py +1 -0
- dhisana/ui/components.py +472 -0
- dhisana/utils/__init__.py +1 -0
- dhisana/utils/add_mapping.py +352 -0
- dhisana/utils/agent_tools.py +51 -0
- dhisana/utils/apollo_tools.py +1597 -0
- dhisana/utils/assistant_tool_tag.py +4 -0
- dhisana/utils/built_with_api_tools.py +282 -0
- dhisana/utils/cache_output_tools.py +98 -0
- dhisana/utils/cache_output_tools_local.py +78 -0
- dhisana/utils/check_email_validity_tools.py +717 -0
- dhisana/utils/check_for_intent_signal.py +107 -0
- dhisana/utils/check_linkedin_url_validity.py +209 -0
- dhisana/utils/clay_tools.py +43 -0
- dhisana/utils/clean_properties.py +135 -0
- dhisana/utils/company_utils.py +60 -0
- dhisana/utils/compose_salesnav_query.py +259 -0
- dhisana/utils/compose_search_query.py +759 -0
- dhisana/utils/compose_three_step_workflow.py +234 -0
- dhisana/utils/composite_tools.py +137 -0
- dhisana/utils/dataframe_tools.py +237 -0
- dhisana/utils/domain_parser.py +45 -0
- dhisana/utils/email_body_utils.py +72 -0
- dhisana/utils/email_parse_helpers.py +132 -0
- dhisana/utils/email_provider.py +375 -0
- dhisana/utils/enrich_lead_information.py +933 -0
- dhisana/utils/extract_email_content_for_llm.py +101 -0
- dhisana/utils/fetch_openai_config.py +129 -0
- dhisana/utils/field_validators.py +426 -0
- dhisana/utils/g2_tools.py +104 -0
- dhisana/utils/generate_content.py +41 -0
- dhisana/utils/generate_custom_message.py +271 -0
- dhisana/utils/generate_email.py +278 -0
- dhisana/utils/generate_email_response.py +465 -0
- dhisana/utils/generate_flow.py +102 -0
- dhisana/utils/generate_leads_salesnav.py +303 -0
- dhisana/utils/generate_linkedin_connect_message.py +224 -0
- dhisana/utils/generate_linkedin_response_message.py +317 -0
- dhisana/utils/generate_structured_output_internal.py +462 -0
- dhisana/utils/google_custom_search.py +267 -0
- dhisana/utils/google_oauth_tools.py +727 -0
- dhisana/utils/google_workspace_tools.py +1294 -0
- dhisana/utils/hubspot_clearbit.py +96 -0
- dhisana/utils/hubspot_crm_tools.py +2440 -0
- dhisana/utils/instantly_tools.py +149 -0
- dhisana/utils/linkedin_crawler.py +168 -0
- dhisana/utils/lusha_tools.py +333 -0
- dhisana/utils/mailgun_tools.py +156 -0
- dhisana/utils/mailreach_tools.py +123 -0
- dhisana/utils/microsoft365_tools.py +455 -0
- dhisana/utils/openai_assistant_and_file_utils.py +267 -0
- dhisana/utils/openai_helpers.py +977 -0
- dhisana/utils/openapi_spec_to_tools.py +45 -0
- dhisana/utils/openapi_tool/__init__.py +1 -0
- dhisana/utils/openapi_tool/api_models.py +633 -0
- dhisana/utils/openapi_tool/convert_openai_spec_to_tool.py +271 -0
- dhisana/utils/openapi_tool/openapi_tool.py +319 -0
- dhisana/utils/parse_linkedin_messages_txt.py +100 -0
- dhisana/utils/profile.py +37 -0
- dhisana/utils/proxy_curl_tools.py +1226 -0
- dhisana/utils/proxycurl_search_leads.py +426 -0
- dhisana/utils/python_function_to_tools.py +83 -0
- dhisana/utils/research_lead.py +176 -0
- dhisana/utils/sales_navigator_crawler.py +1103 -0
- dhisana/utils/salesforce_crm_tools.py +477 -0
- dhisana/utils/search_router.py +131 -0
- dhisana/utils/search_router_jobs.py +51 -0
- dhisana/utils/sendgrid_tools.py +162 -0
- dhisana/utils/serarch_router_local_business.py +75 -0
- dhisana/utils/serpapi_additional_tools.py +290 -0
- dhisana/utils/serpapi_google_jobs.py +117 -0
- dhisana/utils/serpapi_google_search.py +188 -0
- dhisana/utils/serpapi_local_business_search.py +129 -0
- dhisana/utils/serpapi_search_tools.py +852 -0
- dhisana/utils/serperdev_google_jobs.py +125 -0
- dhisana/utils/serperdev_local_business.py +154 -0
- dhisana/utils/serperdev_search.py +233 -0
- dhisana/utils/smtp_email_tools.py +582 -0
- dhisana/utils/test_connect.py +2087 -0
- dhisana/utils/trasform_json.py +173 -0
- dhisana/utils/web_download_parse_tools.py +189 -0
- dhisana/utils/workflow_code_model.py +5 -0
- dhisana/utils/zoominfo_tools.py +357 -0
- dhisana/workflow/__init__.py +1 -0
- dhisana/workflow/agent.py +18 -0
- dhisana/workflow/flow.py +44 -0
- dhisana/workflow/task.py +43 -0
- dhisana/workflow/test.py +90 -0
- dhisana-0.0.1.dev243.dist-info/METADATA +43 -0
- dhisana-0.0.1.dev243.dist-info/RECORD +102 -0
- dhisana-0.0.1.dev243.dist-info/WHEEL +5 -0
- dhisana-0.0.1.dev243.dist-info/entry_points.txt +2 -0
- dhisana-0.0.1.dev243.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,267 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Vector-store and file helpers that work with **either** OpenAI or Azure OpenAI,
|
|
3
|
+
using the shared factory functions defined in `dhisana.utils.fetch_openai_config`.
|
|
4
|
+
|
|
5
|
+
Only the client initialisation lines changed; all business logic is untouched.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
import re
|
|
11
|
+
import traceback
|
|
12
|
+
from typing import Any, Dict, List, Optional
|
|
13
|
+
|
|
14
|
+
from fastapi import HTTPException
|
|
15
|
+
|
|
16
|
+
import openai # still needed for openai.NotFoundError
|
|
17
|
+
from dhisana.utils.fetch_openai_config import (
|
|
18
|
+
create_openai_client, # synchronous client
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
# ---------------------------------------------------------------------------
|
|
22
|
+
# Vector-store helpers
|
|
23
|
+
# ---------------------------------------------------------------------------
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
async def create_vector_store(
|
|
27
|
+
vector_store_name: str,
|
|
28
|
+
tool_config: Optional[List[Dict]] = None,
|
|
29
|
+
) -> Dict[str, Any]:
|
|
30
|
+
"""Create a new vector store and return its metadata."""
|
|
31
|
+
normalized_name = re.sub(r"[^a-z0-9_]+", "_", vector_store_name.lower())[:64]
|
|
32
|
+
client = create_openai_client(tool_config)
|
|
33
|
+
|
|
34
|
+
try:
|
|
35
|
+
vs = client.vector_stores.create(name=normalized_name)
|
|
36
|
+
return {
|
|
37
|
+
"id": vs.id,
|
|
38
|
+
"name": vs.name,
|
|
39
|
+
"created_at": vs.created_at,
|
|
40
|
+
"file_count": vs.file_counts.completed,
|
|
41
|
+
}
|
|
42
|
+
except Exception as e:
|
|
43
|
+
logging.error(f"Error creating vector store: {e}\n{traceback.format_exc()}")
|
|
44
|
+
raise HTTPException(status_code=400, detail=str(e))
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
async def delete_vector_store(
|
|
48
|
+
vector_store_id: str,
|
|
49
|
+
tool_config: Optional[List[Dict]] = None,
|
|
50
|
+
) -> None:
|
|
51
|
+
"""Delete a vector store by ID."""
|
|
52
|
+
client = create_openai_client(tool_config)
|
|
53
|
+
try:
|
|
54
|
+
client.vector_stores.delete(vector_store_id=vector_store_id)
|
|
55
|
+
except Exception as e:
|
|
56
|
+
logging.error(f"Error deleting vector store {vector_store_id}: {e}")
|
|
57
|
+
raise HTTPException(status_code=400, detail=str(e))
|
|
58
|
+
|
|
59
|
+
# ---------------------------------------------------------------------------
|
|
60
|
+
# File-upload helpers
|
|
61
|
+
# ---------------------------------------------------------------------------
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
async def upload_file_openai_and_vector_store(
|
|
65
|
+
file_path_or_bytes: Any,
|
|
66
|
+
file_name: str,
|
|
67
|
+
mime_type: str,
|
|
68
|
+
vector_store_id: str,
|
|
69
|
+
tool_config: Optional[List[Dict]] = None,
|
|
70
|
+
):
|
|
71
|
+
"""Upload a file and attach it to a vector store (purpose = assistants / vision)."""
|
|
72
|
+
client = create_openai_client(tool_config)
|
|
73
|
+
purpose = "vision" if mime_type in {"image/jpeg", "image/png"} else "assistants"
|
|
74
|
+
|
|
75
|
+
try:
|
|
76
|
+
if isinstance(file_path_or_bytes, str):
|
|
77
|
+
file_upload = client.files.create(
|
|
78
|
+
file=open(file_path_or_bytes, "rb"),
|
|
79
|
+
purpose=purpose,
|
|
80
|
+
)
|
|
81
|
+
elif isinstance(file_path_or_bytes, bytes):
|
|
82
|
+
file_upload = client.files.create(
|
|
83
|
+
file=(file_name, file_path_or_bytes, mime_type),
|
|
84
|
+
purpose=purpose,
|
|
85
|
+
)
|
|
86
|
+
else:
|
|
87
|
+
raise ValueError("Unknown file content type. Must be path or bytes.")
|
|
88
|
+
|
|
89
|
+
if purpose == "assistants" and vector_store_id:
|
|
90
|
+
client.vector_stores.files.create(
|
|
91
|
+
vector_store_id=vector_store_id, file_id=file_upload.id
|
|
92
|
+
)
|
|
93
|
+
return file_upload
|
|
94
|
+
except Exception as e:
|
|
95
|
+
logging.error(f"Error uploading file {file_name}: {e}\n{traceback.format_exc()}")
|
|
96
|
+
raise HTTPException(status_code=400, detail=str(e))
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
async def upload_file_openai(
|
|
100
|
+
file_path_or_bytes: Any,
|
|
101
|
+
file_name: str,
|
|
102
|
+
mime_type: str,
|
|
103
|
+
tool_config: Optional[List[Dict]] = None,
|
|
104
|
+
):
|
|
105
|
+
"""Upload a standalone file (not attached to a vector store)."""
|
|
106
|
+
client = create_openai_client(tool_config)
|
|
107
|
+
purpose = "vision" if mime_type in {"image/jpeg", "image/png"} else "assistants"
|
|
108
|
+
|
|
109
|
+
try:
|
|
110
|
+
if isinstance(file_path_or_bytes, str):
|
|
111
|
+
file_upload = client.files.create(
|
|
112
|
+
file=open(file_path_or_bytes, "rb"),
|
|
113
|
+
purpose=purpose,
|
|
114
|
+
)
|
|
115
|
+
else:
|
|
116
|
+
file_upload = client.files.create(
|
|
117
|
+
file=(file_name, file_path_or_bytes, mime_type),
|
|
118
|
+
purpose=purpose,
|
|
119
|
+
)
|
|
120
|
+
return file_upload
|
|
121
|
+
except Exception as e:
|
|
122
|
+
logging.error(f"Error uploading file {file_name}: {e}\n{traceback.format_exc()}")
|
|
123
|
+
raise HTTPException(status_code=400, detail=str(e))
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
async def attach_file_to_vector_store(
|
|
127
|
+
file_id: str,
|
|
128
|
+
vector_store_id: str,
|
|
129
|
+
tool_config: Optional[List[Dict]] = None,
|
|
130
|
+
):
|
|
131
|
+
"""Attach an already-uploaded file to a vector store."""
|
|
132
|
+
client = create_openai_client(tool_config)
|
|
133
|
+
try:
|
|
134
|
+
return client.vector_stores.files.create(
|
|
135
|
+
vector_store_id=vector_store_id, file_id=file_id
|
|
136
|
+
)
|
|
137
|
+
except Exception as e:
|
|
138
|
+
logging.error(
|
|
139
|
+
f"Error attaching file {file_id} to vector store {vector_store_id}: {e}"
|
|
140
|
+
)
|
|
141
|
+
raise HTTPException(status_code=400, detail=str(e))
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
async def delete_files(
|
|
145
|
+
file_ids: List[str],
|
|
146
|
+
vector_store_id: Optional[str] = None,
|
|
147
|
+
tool_config: Optional[List[Dict]] = None,
|
|
148
|
+
):
|
|
149
|
+
"""Delete files from vector store (if given) and OpenAI storage."""
|
|
150
|
+
client = create_openai_client(tool_config)
|
|
151
|
+
|
|
152
|
+
for fid in file_ids:
|
|
153
|
+
try:
|
|
154
|
+
if vector_store_id:
|
|
155
|
+
client.vector_stores.files.delete(
|
|
156
|
+
vector_store_id=vector_store_id, file_id=fid
|
|
157
|
+
)
|
|
158
|
+
client.files.delete(file_id=fid)
|
|
159
|
+
except openai.NotFoundError:
|
|
160
|
+
logging.warning(f"File not found: {fid}")
|
|
161
|
+
except Exception as e:
|
|
162
|
+
logging.error(f"Error deleting file {fid}: {e}\n{traceback.format_exc()}")
|
|
163
|
+
|
|
164
|
+
# ---------------------------------------------------------------------------
|
|
165
|
+
# RAG / Responses helpers
|
|
166
|
+
# ---------------------------------------------------------------------------
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
async def run_file_search(
|
|
170
|
+
query: str,
|
|
171
|
+
vector_store_id: str,
|
|
172
|
+
model: str = "gpt-5.1-chat",
|
|
173
|
+
max_num_results: int = 5,
|
|
174
|
+
store: bool = True,
|
|
175
|
+
tool_config: Optional[List[Dict]] = None,
|
|
176
|
+
) -> Dict[str, Any]:
|
|
177
|
+
"""Single-shot file_search + answer with the new Responses API."""
|
|
178
|
+
client = create_openai_client(tool_config)
|
|
179
|
+
|
|
180
|
+
try:
|
|
181
|
+
rsp = client.responses.create(
|
|
182
|
+
input=query,
|
|
183
|
+
model=model,
|
|
184
|
+
store=store,
|
|
185
|
+
tools=[
|
|
186
|
+
{
|
|
187
|
+
"type": "file_search",
|
|
188
|
+
"vector_store_ids": [vector_store_id],
|
|
189
|
+
"max_num_results": max_num_results,
|
|
190
|
+
}
|
|
191
|
+
],
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
if len(rsp.output) > 1 and rsp.output[1].content:
|
|
195
|
+
fs_chunk = rsp.output[1].content[0]
|
|
196
|
+
annotations = fs_chunk.annotations or []
|
|
197
|
+
retrieved_files = list({ann.filename for ann in annotations})
|
|
198
|
+
return {
|
|
199
|
+
"answer": fs_chunk.text,
|
|
200
|
+
"retrieved_files": retrieved_files,
|
|
201
|
+
"annotations": annotations,
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
return {
|
|
205
|
+
"answer": rsp.output_text,
|
|
206
|
+
"retrieved_files": [],
|
|
207
|
+
"annotations": [],
|
|
208
|
+
}
|
|
209
|
+
except Exception as e:
|
|
210
|
+
logging.error(f"Error in run_file_search: {e}\n{traceback.format_exc()}")
|
|
211
|
+
raise HTTPException(status_code=400, detail=str(e))
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
async def run_response_text(
|
|
215
|
+
prompt: str,
|
|
216
|
+
model: str = "gpt-5.1-chat",
|
|
217
|
+
max_tokens: int = 2048,
|
|
218
|
+
store: bool = True,
|
|
219
|
+
tool_config: Optional[List[Dict]] = None,
|
|
220
|
+
) -> (str, str):
|
|
221
|
+
"""Plain text completion via the Responses API."""
|
|
222
|
+
client = create_openai_client(tool_config)
|
|
223
|
+
|
|
224
|
+
try:
|
|
225
|
+
rsp = client.responses.create(
|
|
226
|
+
input=[{"role": "user", "content": prompt}],
|
|
227
|
+
model=model,
|
|
228
|
+
max_tokens=max_tokens,
|
|
229
|
+
store=store,
|
|
230
|
+
)
|
|
231
|
+
return rsp.output_text, "success"
|
|
232
|
+
except Exception as e:
|
|
233
|
+
logging.error(f"Error in run_response_text: {e}\n{traceback.format_exc()}")
|
|
234
|
+
return f"An error occurred: {e}", "error"
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
async def run_response_structured(
|
|
238
|
+
prompt: str,
|
|
239
|
+
response_format: dict,
|
|
240
|
+
model: str = "gpt-5.1-chat",
|
|
241
|
+
max_tokens: int = 1024,
|
|
242
|
+
store: bool = True,
|
|
243
|
+
tool_config: Optional[List[Dict]] = None,
|
|
244
|
+
) -> (Any, str):
|
|
245
|
+
"""Structured JSON output via Responses API."""
|
|
246
|
+
client = create_openai_client(tool_config)
|
|
247
|
+
|
|
248
|
+
try:
|
|
249
|
+
rsp = client.responses.create(
|
|
250
|
+
input=[{"role": "user", "content": prompt}],
|
|
251
|
+
model=model,
|
|
252
|
+
max_tokens=max_tokens,
|
|
253
|
+
store=store,
|
|
254
|
+
text={"format": response_format},
|
|
255
|
+
)
|
|
256
|
+
if rsp.output:
|
|
257
|
+
raw = rsp.output[0].content[0].text
|
|
258
|
+
try:
|
|
259
|
+
return json.loads(raw), "success"
|
|
260
|
+
except json.JSONDecodeError:
|
|
261
|
+
return raw, "error"
|
|
262
|
+
return "No output returned", "error"
|
|
263
|
+
except Exception as e:
|
|
264
|
+
logging.error(
|
|
265
|
+
f"Error in run_response_structured: {e}\n{traceback.format_exc()}"
|
|
266
|
+
)
|
|
267
|
+
return f"An error occurred: {e}", "error"
|