dhisana 0.0.1.dev243__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dhisana/__init__.py +1 -0
- dhisana/cli/__init__.py +1 -0
- dhisana/cli/cli.py +20 -0
- dhisana/cli/datasets.py +27 -0
- dhisana/cli/models.py +26 -0
- dhisana/cli/predictions.py +20 -0
- dhisana/schemas/__init__.py +1 -0
- dhisana/schemas/common.py +399 -0
- dhisana/schemas/sales.py +965 -0
- dhisana/ui/__init__.py +1 -0
- dhisana/ui/components.py +472 -0
- dhisana/utils/__init__.py +1 -0
- dhisana/utils/add_mapping.py +352 -0
- dhisana/utils/agent_tools.py +51 -0
- dhisana/utils/apollo_tools.py +1597 -0
- dhisana/utils/assistant_tool_tag.py +4 -0
- dhisana/utils/built_with_api_tools.py +282 -0
- dhisana/utils/cache_output_tools.py +98 -0
- dhisana/utils/cache_output_tools_local.py +78 -0
- dhisana/utils/check_email_validity_tools.py +717 -0
- dhisana/utils/check_for_intent_signal.py +107 -0
- dhisana/utils/check_linkedin_url_validity.py +209 -0
- dhisana/utils/clay_tools.py +43 -0
- dhisana/utils/clean_properties.py +135 -0
- dhisana/utils/company_utils.py +60 -0
- dhisana/utils/compose_salesnav_query.py +259 -0
- dhisana/utils/compose_search_query.py +759 -0
- dhisana/utils/compose_three_step_workflow.py +234 -0
- dhisana/utils/composite_tools.py +137 -0
- dhisana/utils/dataframe_tools.py +237 -0
- dhisana/utils/domain_parser.py +45 -0
- dhisana/utils/email_body_utils.py +72 -0
- dhisana/utils/email_parse_helpers.py +132 -0
- dhisana/utils/email_provider.py +375 -0
- dhisana/utils/enrich_lead_information.py +933 -0
- dhisana/utils/extract_email_content_for_llm.py +101 -0
- dhisana/utils/fetch_openai_config.py +129 -0
- dhisana/utils/field_validators.py +426 -0
- dhisana/utils/g2_tools.py +104 -0
- dhisana/utils/generate_content.py +41 -0
- dhisana/utils/generate_custom_message.py +271 -0
- dhisana/utils/generate_email.py +278 -0
- dhisana/utils/generate_email_response.py +465 -0
- dhisana/utils/generate_flow.py +102 -0
- dhisana/utils/generate_leads_salesnav.py +303 -0
- dhisana/utils/generate_linkedin_connect_message.py +224 -0
- dhisana/utils/generate_linkedin_response_message.py +317 -0
- dhisana/utils/generate_structured_output_internal.py +462 -0
- dhisana/utils/google_custom_search.py +267 -0
- dhisana/utils/google_oauth_tools.py +727 -0
- dhisana/utils/google_workspace_tools.py +1294 -0
- dhisana/utils/hubspot_clearbit.py +96 -0
- dhisana/utils/hubspot_crm_tools.py +2440 -0
- dhisana/utils/instantly_tools.py +149 -0
- dhisana/utils/linkedin_crawler.py +168 -0
- dhisana/utils/lusha_tools.py +333 -0
- dhisana/utils/mailgun_tools.py +156 -0
- dhisana/utils/mailreach_tools.py +123 -0
- dhisana/utils/microsoft365_tools.py +455 -0
- dhisana/utils/openai_assistant_and_file_utils.py +267 -0
- dhisana/utils/openai_helpers.py +977 -0
- dhisana/utils/openapi_spec_to_tools.py +45 -0
- dhisana/utils/openapi_tool/__init__.py +1 -0
- dhisana/utils/openapi_tool/api_models.py +633 -0
- dhisana/utils/openapi_tool/convert_openai_spec_to_tool.py +271 -0
- dhisana/utils/openapi_tool/openapi_tool.py +319 -0
- dhisana/utils/parse_linkedin_messages_txt.py +100 -0
- dhisana/utils/profile.py +37 -0
- dhisana/utils/proxy_curl_tools.py +1226 -0
- dhisana/utils/proxycurl_search_leads.py +426 -0
- dhisana/utils/python_function_to_tools.py +83 -0
- dhisana/utils/research_lead.py +176 -0
- dhisana/utils/sales_navigator_crawler.py +1103 -0
- dhisana/utils/salesforce_crm_tools.py +477 -0
- dhisana/utils/search_router.py +131 -0
- dhisana/utils/search_router_jobs.py +51 -0
- dhisana/utils/sendgrid_tools.py +162 -0
- dhisana/utils/serarch_router_local_business.py +75 -0
- dhisana/utils/serpapi_additional_tools.py +290 -0
- dhisana/utils/serpapi_google_jobs.py +117 -0
- dhisana/utils/serpapi_google_search.py +188 -0
- dhisana/utils/serpapi_local_business_search.py +129 -0
- dhisana/utils/serpapi_search_tools.py +852 -0
- dhisana/utils/serperdev_google_jobs.py +125 -0
- dhisana/utils/serperdev_local_business.py +154 -0
- dhisana/utils/serperdev_search.py +233 -0
- dhisana/utils/smtp_email_tools.py +582 -0
- dhisana/utils/test_connect.py +2087 -0
- dhisana/utils/trasform_json.py +173 -0
- dhisana/utils/web_download_parse_tools.py +189 -0
- dhisana/utils/workflow_code_model.py +5 -0
- dhisana/utils/zoominfo_tools.py +357 -0
- dhisana/workflow/__init__.py +1 -0
- dhisana/workflow/agent.py +18 -0
- dhisana/workflow/flow.py +44 -0
- dhisana/workflow/task.py +43 -0
- dhisana/workflow/test.py +90 -0
- dhisana-0.0.1.dev243.dist-info/METADATA +43 -0
- dhisana-0.0.1.dev243.dist-info/RECORD +102 -0
- dhisana-0.0.1.dev243.dist-info/WHEEL +5 -0
- dhisana-0.0.1.dev243.dist-info/entry_points.txt +2 -0
- dhisana-0.0.1.dev243.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,271 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from collections import defaultdict
|
|
5
|
+
import re
|
|
6
|
+
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple
|
|
7
|
+
|
|
8
|
+
from fastapi import logger
|
|
9
|
+
from openapi_pydantic import Parameter
|
|
10
|
+
from pydantic import BaseModel, Field
|
|
11
|
+
import aiohttp
|
|
12
|
+
from typing import Any, Optional
|
|
13
|
+
|
|
14
|
+
from .api_models import INVALID_LOCATION_TEMPL, APIProperty, APIRequestBody
|
|
15
|
+
from .openapi_tool import HTTPVerb, OpenAPISpec
|
|
16
|
+
|
|
17
|
+
def _format_url(url: str, path_params: dict) -> str:
|
|
18
|
+
expected_path_param = re.findall(r"{(.*?)}", url)
|
|
19
|
+
new_params = {}
|
|
20
|
+
for param in expected_path_param:
|
|
21
|
+
clean_param = param.lstrip(".;").rstrip("*")
|
|
22
|
+
val = path_params[clean_param]
|
|
23
|
+
if isinstance(val, list):
|
|
24
|
+
if param[0] == ".":
|
|
25
|
+
sep = "." if param[-1] == "*" else ","
|
|
26
|
+
new_val = "." + sep.join(val)
|
|
27
|
+
elif param[0] == ";":
|
|
28
|
+
sep = f"{clean_param}=" if param[-1] == "*" else ","
|
|
29
|
+
new_val = f"{clean_param}=" + sep.join(val)
|
|
30
|
+
else:
|
|
31
|
+
new_val = ",".join(val)
|
|
32
|
+
elif isinstance(val, dict):
|
|
33
|
+
kv_sep = "=" if param[-1] == "*" else ","
|
|
34
|
+
kv_strs = [kv_sep.join((k, v)) for k, v in val.items()]
|
|
35
|
+
if param[0] == ".":
|
|
36
|
+
sep = "."
|
|
37
|
+
new_val = "."
|
|
38
|
+
elif param[0] == ";":
|
|
39
|
+
sep = ";"
|
|
40
|
+
new_val = ";"
|
|
41
|
+
else:
|
|
42
|
+
sep = ","
|
|
43
|
+
new_val = ""
|
|
44
|
+
new_val += sep.join(kv_strs)
|
|
45
|
+
else:
|
|
46
|
+
if param[0] == ".":
|
|
47
|
+
new_val = f".{val}"
|
|
48
|
+
elif param[0] == ";":
|
|
49
|
+
new_val = f";{clean_param}={val}"
|
|
50
|
+
else:
|
|
51
|
+
new_val = val
|
|
52
|
+
new_params[param] = new_val
|
|
53
|
+
return url.format(**new_params)
|
|
54
|
+
|
|
55
|
+
class APIOperation(BaseModel):
|
|
56
|
+
"""A model for a single API operation."""
|
|
57
|
+
|
|
58
|
+
operation_id: str = Field(alias="operation_id")
|
|
59
|
+
"""The unique identifier of the operation."""
|
|
60
|
+
|
|
61
|
+
description: Optional[str] = Field(alias="description")
|
|
62
|
+
"""The description of the operation."""
|
|
63
|
+
|
|
64
|
+
base_url: str = Field(alias="base_url")
|
|
65
|
+
"""The base URL of the operation."""
|
|
66
|
+
|
|
67
|
+
path: str = Field(alias="path")
|
|
68
|
+
"""The path of the operation."""
|
|
69
|
+
|
|
70
|
+
method: HTTPVerb = Field(alias="method")
|
|
71
|
+
"""The HTTP method of the operation."""
|
|
72
|
+
|
|
73
|
+
properties: Sequence[APIProperty] = Field(alias="properties")
|
|
74
|
+
|
|
75
|
+
# TODO: Add parse in used components to be able to specify what type of
|
|
76
|
+
# referenced object it is.
|
|
77
|
+
# """The properties of the operation."""
|
|
78
|
+
# components: Dict[str, BaseModel] = Field(alias="components")
|
|
79
|
+
|
|
80
|
+
request_body: Optional[APIRequestBody] = Field(alias="request_body")
|
|
81
|
+
"""The request body of the operation."""
|
|
82
|
+
|
|
83
|
+
@staticmethod
|
|
84
|
+
def _get_properties_from_parameters(
|
|
85
|
+
parameters: List[Parameter], spec: OpenAPISpec
|
|
86
|
+
) -> List[APIProperty]:
|
|
87
|
+
"""Get the properties of the operation."""
|
|
88
|
+
properties = []
|
|
89
|
+
for param in parameters:
|
|
90
|
+
if APIProperty.is_supported_location(param.param_in):
|
|
91
|
+
properties.append(APIProperty.from_parameter(param, spec))
|
|
92
|
+
elif param.required:
|
|
93
|
+
raise ValueError(
|
|
94
|
+
INVALID_LOCATION_TEMPL.format(
|
|
95
|
+
location=param.param_in, name=param.name
|
|
96
|
+
)
|
|
97
|
+
)
|
|
98
|
+
else:
|
|
99
|
+
logger.warning(
|
|
100
|
+
INVALID_LOCATION_TEMPL.format(
|
|
101
|
+
location=param.param_in, name=param.name
|
|
102
|
+
)
|
|
103
|
+
+ " Ignoring optional parameter"
|
|
104
|
+
)
|
|
105
|
+
pass
|
|
106
|
+
return properties
|
|
107
|
+
|
|
108
|
+
@classmethod
|
|
109
|
+
def from_openapi_url(
|
|
110
|
+
cls,
|
|
111
|
+
spec_url: str,
|
|
112
|
+
path: str,
|
|
113
|
+
method: str,
|
|
114
|
+
) -> "APIOperation":
|
|
115
|
+
"""Create an APIOperation from an OpenAPI URL."""
|
|
116
|
+
spec = OpenAPISpec.from_url(spec_url)
|
|
117
|
+
return cls.from_openapi_spec(spec, path, method)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
@classmethod
|
|
121
|
+
def from_openapi_spec(
|
|
122
|
+
cls,
|
|
123
|
+
spec: OpenAPISpec,
|
|
124
|
+
path: str,
|
|
125
|
+
method: str,
|
|
126
|
+
) -> "APIOperation":
|
|
127
|
+
"""Create an APIOperation from an OpenAPI spec."""
|
|
128
|
+
operation = spec.get_operation(path, method)
|
|
129
|
+
parameters = spec.get_parameters_for_operation(operation)
|
|
130
|
+
properties = cls._get_properties_from_parameters(parameters, spec)
|
|
131
|
+
operation_id = OpenAPISpec.get_cleaned_operation_id(spec, operation, path, method)
|
|
132
|
+
request_body = spec.get_request_body_for_operation(operation)
|
|
133
|
+
api_request_body = (
|
|
134
|
+
APIRequestBody.from_request_body(request_body, spec)
|
|
135
|
+
if request_body is not None
|
|
136
|
+
else None
|
|
137
|
+
)
|
|
138
|
+
description = operation.description or operation.summary
|
|
139
|
+
if not description and spec.paths is not None:
|
|
140
|
+
description = spec.paths[path].description or spec.paths[path].summary
|
|
141
|
+
return cls(
|
|
142
|
+
operation_id=operation_id,
|
|
143
|
+
description=description or "",
|
|
144
|
+
base_url=spec.base_url,
|
|
145
|
+
path=path,
|
|
146
|
+
method=method, # type: ignore[arg-type]
|
|
147
|
+
properties=properties,
|
|
148
|
+
request_body=api_request_body,
|
|
149
|
+
)
|
|
150
|
+
def _openapi_params_to_json_schema(params: List[Parameter], spec: OpenAPISpec) -> dict:
|
|
151
|
+
properties = {}
|
|
152
|
+
required = []
|
|
153
|
+
for p in params:
|
|
154
|
+
if p.param_schema:
|
|
155
|
+
schema = spec.get_schema(p.param_schema)
|
|
156
|
+
else:
|
|
157
|
+
media_type_schema = list(p.content.values())[0].media_type_schema # type: ignore # noqa: E501
|
|
158
|
+
schema = spec.get_schema(media_type_schema)
|
|
159
|
+
if p.description and not schema.description:
|
|
160
|
+
schema.description = p.description
|
|
161
|
+
properties[p.name] = json.loads(schema.json(exclude_none=True))
|
|
162
|
+
if p.required:
|
|
163
|
+
required.append(p.name)
|
|
164
|
+
return {"type": "object", "properties": properties, "required": required}
|
|
165
|
+
|
|
166
|
+
def openapi_spec_to_openai_fn(
|
|
167
|
+
spec: OpenAPISpec,
|
|
168
|
+
) -> Tuple[List[Dict[str, Any]], Callable]:
|
|
169
|
+
"""Convert a valid OpenAPI spec to the JSON Schema format expected for OpenAI
|
|
170
|
+
functions.
|
|
171
|
+
|
|
172
|
+
Args:
|
|
173
|
+
spec: OpenAPI spec to convert.
|
|
174
|
+
|
|
175
|
+
Returns:
|
|
176
|
+
Tuple of the OpenAI functions JSON schema and a default function for executing
|
|
177
|
+
a request based on the OpenAI function schema.
|
|
178
|
+
"""
|
|
179
|
+
if not spec.paths:
|
|
180
|
+
return [], lambda: None
|
|
181
|
+
functions = []
|
|
182
|
+
_name_to_call_map = {}
|
|
183
|
+
for path in spec.paths:
|
|
184
|
+
path_params = {
|
|
185
|
+
(p.name, p.param_in): p for p in spec.get_parameters_for_path(path)
|
|
186
|
+
}
|
|
187
|
+
for method in spec.get_methods_for_path(path):
|
|
188
|
+
request_args = {}
|
|
189
|
+
op = spec.get_operation(path, method)
|
|
190
|
+
op_params = path_params.copy()
|
|
191
|
+
for param in spec.get_parameters_for_operation(op):
|
|
192
|
+
op_params[(param.name, param.param_in)] = param
|
|
193
|
+
params_by_type = defaultdict(list)
|
|
194
|
+
for name_loc, p in op_params.items():
|
|
195
|
+
params_by_type[name_loc[1]].append(p)
|
|
196
|
+
param_loc_to_arg_name = {
|
|
197
|
+
"query": "params",
|
|
198
|
+
"header": "headers",
|
|
199
|
+
"cookie": "cookies",
|
|
200
|
+
"path": "path_params",
|
|
201
|
+
}
|
|
202
|
+
for param_loc, arg_name in param_loc_to_arg_name.items():
|
|
203
|
+
if params_by_type[param_loc]:
|
|
204
|
+
request_args[arg_name] = _openapi_params_to_json_schema(
|
|
205
|
+
params_by_type[param_loc], spec
|
|
206
|
+
)
|
|
207
|
+
request_body = spec.get_request_body_for_operation(op)
|
|
208
|
+
# TODO: Support more MIME types.
|
|
209
|
+
if request_body and request_body.content:
|
|
210
|
+
media_types = {}
|
|
211
|
+
for media_type, media_type_object in request_body.content.items():
|
|
212
|
+
if media_type_object.media_type_schema:
|
|
213
|
+
schema = spec.get_schema(media_type_object.media_type_schema)
|
|
214
|
+
media_types[media_type] = json.loads(
|
|
215
|
+
schema.json(exclude_none=True)
|
|
216
|
+
)
|
|
217
|
+
if len(media_types) == 1:
|
|
218
|
+
media_type, schema_dict = list(media_types.items())[0]
|
|
219
|
+
key = "json" if media_type == "application/json" else "data"
|
|
220
|
+
request_args[key] = schema_dict
|
|
221
|
+
elif len(media_types) > 1:
|
|
222
|
+
request_args["data"] = {"anyOf": list(media_types.values())}
|
|
223
|
+
|
|
224
|
+
api_op = APIOperation.from_openapi_spec(spec, path, method)
|
|
225
|
+
fn = {
|
|
226
|
+
"type": "function",
|
|
227
|
+
"function":{
|
|
228
|
+
"name": api_op.operation_id,
|
|
229
|
+
"description": api_op.description,
|
|
230
|
+
"parameters": {
|
|
231
|
+
"type": "object",
|
|
232
|
+
"properties": request_args,
|
|
233
|
+
},
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
functions.append(fn)
|
|
237
|
+
_name_to_call_map[fn["function"]["name"]] = {
|
|
238
|
+
"method": method,
|
|
239
|
+
"url": api_op.base_url + api_op.path,
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
async def default_call_api(
|
|
243
|
+
name: str,
|
|
244
|
+
fn_args: dict,
|
|
245
|
+
headers: Optional[dict] = None,
|
|
246
|
+
params: Optional[dict] = None,
|
|
247
|
+
**kwargs: Any,
|
|
248
|
+
) -> Any:
|
|
249
|
+
method = _name_to_call_map[name]["method"]
|
|
250
|
+
url = _name_to_call_map[name]["url"]
|
|
251
|
+
path_params = fn_args.pop("path_params", {})
|
|
252
|
+
url = _format_url(url, path_params)
|
|
253
|
+
if "data" in fn_args and isinstance(fn_args["data"], dict):
|
|
254
|
+
fn_args["data"] = json.dumps(fn_args["data"])
|
|
255
|
+
_kwargs = {**fn_args, **kwargs}
|
|
256
|
+
if headers is not None:
|
|
257
|
+
if "headers" in _kwargs:
|
|
258
|
+
_kwargs["headers"].update(headers)
|
|
259
|
+
else:
|
|
260
|
+
_kwargs["headers"] = headers
|
|
261
|
+
if params is not None:
|
|
262
|
+
if "params" in _kwargs:
|
|
263
|
+
_kwargs["params"].update(params)
|
|
264
|
+
else:
|
|
265
|
+
_kwargs["params"] = params
|
|
266
|
+
|
|
267
|
+
async with aiohttp.ClientSession() as session:
|
|
268
|
+
async with session.request(method, url, **_kwargs) as response:
|
|
269
|
+
return response.status, response.reason, await response.text()
|
|
270
|
+
|
|
271
|
+
return functions, default_call_api
|
|
@@ -0,0 +1,319 @@
|
|
|
1
|
+
"""Utility functions for parsing an OpenAPI spec."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import copy
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
import re
|
|
8
|
+
from enum import Enum
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import TYPE_CHECKING, Dict, List, Optional, Union
|
|
11
|
+
|
|
12
|
+
from pydantic import ValidationError
|
|
13
|
+
import requests
|
|
14
|
+
import yaml
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class HTTPVerb(str, Enum):
|
|
20
|
+
"""Enumerator of the HTTP verbs."""
|
|
21
|
+
|
|
22
|
+
GET = "get"
|
|
23
|
+
PUT = "put"
|
|
24
|
+
POST = "post"
|
|
25
|
+
DELETE = "delete"
|
|
26
|
+
OPTIONS = "options"
|
|
27
|
+
HEAD = "head"
|
|
28
|
+
PATCH = "patch"
|
|
29
|
+
TRACE = "trace"
|
|
30
|
+
|
|
31
|
+
@classmethod
|
|
32
|
+
def from_str(cls, verb: str) -> HTTPVerb:
|
|
33
|
+
"""Parse an HTTP verb."""
|
|
34
|
+
try:
|
|
35
|
+
return cls(verb)
|
|
36
|
+
except ValueError:
|
|
37
|
+
raise ValueError(f"Invalid HTTP verb. Valid values are {cls.__members__}")
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
if TYPE_CHECKING:
|
|
41
|
+
from openapi_pydantic import (
|
|
42
|
+
Components,
|
|
43
|
+
Operation,
|
|
44
|
+
Parameter,
|
|
45
|
+
PathItem,
|
|
46
|
+
Paths,
|
|
47
|
+
Reference,
|
|
48
|
+
RequestBody,
|
|
49
|
+
Schema,
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
try:
|
|
53
|
+
from openapi_pydantic import OpenAPI
|
|
54
|
+
except ImportError:
|
|
55
|
+
OpenAPI = object # type: ignore
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class OpenAPISpec(OpenAPI):
|
|
59
|
+
"""OpenAPI Model that removes mis-formatted parts of the spec."""
|
|
60
|
+
|
|
61
|
+
openapi: str = "3.1.0" # overriding overly restrictive type from parent class
|
|
62
|
+
|
|
63
|
+
@property
|
|
64
|
+
def _paths_strict(self) -> Paths:
|
|
65
|
+
if not self.paths:
|
|
66
|
+
raise ValueError("No paths found in spec")
|
|
67
|
+
return self.paths
|
|
68
|
+
|
|
69
|
+
def _get_path_strict(self, path: str) -> PathItem:
|
|
70
|
+
path_item = self._paths_strict.get(path)
|
|
71
|
+
if not path_item:
|
|
72
|
+
raise ValueError(f"No path found for {path}")
|
|
73
|
+
return path_item
|
|
74
|
+
|
|
75
|
+
@property
|
|
76
|
+
def _components_strict(self) -> Components:
|
|
77
|
+
"""Get components or err."""
|
|
78
|
+
if self.components is None:
|
|
79
|
+
raise ValueError("No components found in spec. ")
|
|
80
|
+
return self.components
|
|
81
|
+
|
|
82
|
+
@property
|
|
83
|
+
def _parameters_strict(self) -> Dict[str, Union[Parameter, Reference]]:
|
|
84
|
+
"""Get parameters or err."""
|
|
85
|
+
parameters = self._components_strict.parameters
|
|
86
|
+
if parameters is None:
|
|
87
|
+
raise ValueError("No parameters found in spec. ")
|
|
88
|
+
return parameters
|
|
89
|
+
|
|
90
|
+
@property
|
|
91
|
+
def _schemas_strict(self) -> Dict[str, Schema]:
|
|
92
|
+
"""Get the dictionary of schemas or err."""
|
|
93
|
+
schemas = self._components_strict.schemas
|
|
94
|
+
if schemas is None:
|
|
95
|
+
raise ValueError("No schemas found in spec. ")
|
|
96
|
+
return schemas
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def _request_bodies_strict(self) -> Dict[str, Union[RequestBody, Reference]]:
|
|
100
|
+
"""Get the request body or err."""
|
|
101
|
+
request_bodies = self._components_strict.requestBodies
|
|
102
|
+
if request_bodies is None:
|
|
103
|
+
raise ValueError("No request body found in spec. ")
|
|
104
|
+
return request_bodies
|
|
105
|
+
|
|
106
|
+
def _get_referenced_parameter(self, ref: Reference) -> Union[Parameter, Reference]:
|
|
107
|
+
"""Get a parameter (or nested reference) or err."""
|
|
108
|
+
ref_name = ref.ref.split("/")[-1]
|
|
109
|
+
parameters = self._parameters_strict
|
|
110
|
+
if ref_name not in parameters:
|
|
111
|
+
raise ValueError(f"No parameter found for {ref_name}")
|
|
112
|
+
return parameters[ref_name]
|
|
113
|
+
|
|
114
|
+
def _get_root_referenced_parameter(self, ref: Reference) -> Parameter:
|
|
115
|
+
"""Get the root reference or err."""
|
|
116
|
+
from openapi_pydantic import Reference
|
|
117
|
+
|
|
118
|
+
parameter = self._get_referenced_parameter(ref)
|
|
119
|
+
while isinstance(parameter, Reference):
|
|
120
|
+
parameter = self._get_referenced_parameter(parameter)
|
|
121
|
+
return parameter
|
|
122
|
+
|
|
123
|
+
def get_referenced_schema(self, ref: Reference) -> Schema:
|
|
124
|
+
"""Get a schema (or nested reference) or err."""
|
|
125
|
+
ref_name = ref.ref.split("/")[-1]
|
|
126
|
+
schemas = self._schemas_strict
|
|
127
|
+
if ref_name not in schemas:
|
|
128
|
+
raise ValueError(f"No schema found for {ref_name}")
|
|
129
|
+
return schemas[ref_name]
|
|
130
|
+
|
|
131
|
+
def get_schema(self, schema: Union[Reference, Schema]) -> Schema:
|
|
132
|
+
from openapi_pydantic import Reference
|
|
133
|
+
|
|
134
|
+
if isinstance(schema, Reference):
|
|
135
|
+
return self.get_referenced_schema(schema)
|
|
136
|
+
return schema
|
|
137
|
+
|
|
138
|
+
def _get_root_referenced_schema(self, ref: Reference) -> Schema:
|
|
139
|
+
"""Get the root reference or err."""
|
|
140
|
+
from openapi_pydantic import Reference
|
|
141
|
+
|
|
142
|
+
schema = self.get_referenced_schema(ref)
|
|
143
|
+
while isinstance(schema, Reference):
|
|
144
|
+
schema = self.get_referenced_schema(schema)
|
|
145
|
+
return schema
|
|
146
|
+
|
|
147
|
+
def _get_referenced_request_body(
|
|
148
|
+
self, ref: Reference
|
|
149
|
+
) -> Optional[Union[Reference, RequestBody]]:
|
|
150
|
+
"""Get a request body (or nested reference) or err."""
|
|
151
|
+
ref_name = ref.ref.split("/")[-1]
|
|
152
|
+
request_bodies = self._request_bodies_strict
|
|
153
|
+
if ref_name not in request_bodies:
|
|
154
|
+
raise ValueError(f"No request body found for {ref_name}")
|
|
155
|
+
return request_bodies[ref_name]
|
|
156
|
+
|
|
157
|
+
def _get_root_referenced_request_body(
|
|
158
|
+
self, ref: Reference
|
|
159
|
+
) -> Optional[RequestBody]:
|
|
160
|
+
"""Get the root request Body or err."""
|
|
161
|
+
from openapi_pydantic import Reference
|
|
162
|
+
|
|
163
|
+
request_body = self._get_referenced_request_body(ref)
|
|
164
|
+
while isinstance(request_body, Reference):
|
|
165
|
+
request_body = self._get_referenced_request_body(request_body)
|
|
166
|
+
return request_body
|
|
167
|
+
|
|
168
|
+
@staticmethod
|
|
169
|
+
def _alert_unsupported_spec(obj: dict) -> None:
|
|
170
|
+
"""Alert if the spec is not supported."""
|
|
171
|
+
warning_message = (
|
|
172
|
+
" This may result in degraded performance."
|
|
173
|
+
+ " Convert your OpenAPI spec to 3.1.* spec"
|
|
174
|
+
+ " for better support."
|
|
175
|
+
)
|
|
176
|
+
swagger_version = obj.get("swagger")
|
|
177
|
+
openapi_version = obj.get("openapi")
|
|
178
|
+
if isinstance(openapi_version, str):
|
|
179
|
+
if openapi_version != "3.1.0":
|
|
180
|
+
logger.warning(
|
|
181
|
+
f"Attempting to load an OpenAPI {openapi_version}"
|
|
182
|
+
f" spec. {warning_message}"
|
|
183
|
+
)
|
|
184
|
+
else:
|
|
185
|
+
pass
|
|
186
|
+
elif isinstance(swagger_version, str):
|
|
187
|
+
logger.warning(
|
|
188
|
+
f"Attempting to load a Swagger {swagger_version}"
|
|
189
|
+
f" spec. {warning_message}"
|
|
190
|
+
)
|
|
191
|
+
else:
|
|
192
|
+
raise ValueError(
|
|
193
|
+
"Attempting to load an unsupported spec:"
|
|
194
|
+
f"\n\n{obj}\n{warning_message}"
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
@classmethod
|
|
198
|
+
def parse_obj(cls, obj: dict) -> OpenAPISpec:
|
|
199
|
+
try:
|
|
200
|
+
cls._alert_unsupported_spec(obj)
|
|
201
|
+
return super().parse_obj(obj)
|
|
202
|
+
except ValidationError as e:
|
|
203
|
+
# We are handling possibly misconfigured specs and
|
|
204
|
+
# want to do a best-effort job to get a reasonable interface out of it.
|
|
205
|
+
new_obj = copy.deepcopy(obj)
|
|
206
|
+
for error in e.errors():
|
|
207
|
+
keys = error["loc"]
|
|
208
|
+
item = new_obj
|
|
209
|
+
for key in keys[:-1]:
|
|
210
|
+
item = item[key]
|
|
211
|
+
item.pop(keys[-1], None)
|
|
212
|
+
return cls.parse_obj(new_obj)
|
|
213
|
+
|
|
214
|
+
@classmethod
|
|
215
|
+
def from_spec_dict(cls, spec_dict: dict) -> OpenAPISpec:
|
|
216
|
+
"""Get an OpenAPI spec from a dict."""
|
|
217
|
+
return cls.parse_obj(spec_dict)
|
|
218
|
+
|
|
219
|
+
@classmethod
|
|
220
|
+
def from_text(cls, text: str) -> OpenAPISpec:
|
|
221
|
+
"""Get an OpenAPI spec from a text."""
|
|
222
|
+
try:
|
|
223
|
+
spec_dict = json.loads(text)
|
|
224
|
+
except json.JSONDecodeError:
|
|
225
|
+
spec_dict = yaml.safe_load(text)
|
|
226
|
+
return cls.from_spec_dict(spec_dict)
|
|
227
|
+
|
|
228
|
+
@classmethod
|
|
229
|
+
def from_file(cls, path: Union[str, Path]) -> OpenAPISpec:
|
|
230
|
+
"""Get an OpenAPI spec from a file path."""
|
|
231
|
+
path_ = path if isinstance(path, Path) else Path(path)
|
|
232
|
+
if not path_.exists():
|
|
233
|
+
raise FileNotFoundError(f"{path} does not exist")
|
|
234
|
+
with path_.open("r") as f:
|
|
235
|
+
return cls.from_text(f.read())
|
|
236
|
+
|
|
237
|
+
@classmethod
|
|
238
|
+
def from_url(cls, url: str) -> OpenAPISpec:
|
|
239
|
+
"""Get an OpenAPI spec from a URL."""
|
|
240
|
+
response = requests.get(url)
|
|
241
|
+
return cls.from_text(response.text)
|
|
242
|
+
|
|
243
|
+
@property
|
|
244
|
+
def base_url(self) -> str:
|
|
245
|
+
"""Get the base url."""
|
|
246
|
+
return self.servers[0].url
|
|
247
|
+
|
|
248
|
+
def get_methods_for_path(self, path: str) -> List[str]:
|
|
249
|
+
"""Return a list of valid methods for the specified path."""
|
|
250
|
+
from openapi_pydantic import Operation
|
|
251
|
+
|
|
252
|
+
path_item = self._get_path_strict(path)
|
|
253
|
+
results = []
|
|
254
|
+
for method in HTTPVerb:
|
|
255
|
+
operation = getattr(path_item, method.value, None)
|
|
256
|
+
if isinstance(operation, Operation):
|
|
257
|
+
results.append(method.value)
|
|
258
|
+
return results
|
|
259
|
+
|
|
260
|
+
def get_parameters_for_path(self, path: str) -> List[Parameter]:
|
|
261
|
+
from openapi_pydantic import Reference
|
|
262
|
+
|
|
263
|
+
path_item = self._get_path_strict(path)
|
|
264
|
+
parameters = []
|
|
265
|
+
if not path_item.parameters:
|
|
266
|
+
return []
|
|
267
|
+
for parameter in path_item.parameters:
|
|
268
|
+
if isinstance(parameter, Reference):
|
|
269
|
+
parameter = self._get_root_referenced_parameter(parameter)
|
|
270
|
+
parameters.append(parameter)
|
|
271
|
+
return parameters
|
|
272
|
+
|
|
273
|
+
def get_operation(self, path: str, method: str) -> Operation:
|
|
274
|
+
"""Get the operation object for a given path and HTTP method."""
|
|
275
|
+
from openapi_pydantic import Operation
|
|
276
|
+
|
|
277
|
+
path_item = self._get_path_strict(path)
|
|
278
|
+
operation_obj = getattr(path_item, method, None)
|
|
279
|
+
if not isinstance(operation_obj, Operation):
|
|
280
|
+
raise ValueError(f"No {method} method found for {path}")
|
|
281
|
+
return operation_obj
|
|
282
|
+
|
|
283
|
+
def get_parameters_for_operation(self, operation: Operation) -> List[Parameter]:
|
|
284
|
+
"""Get the components for a given operation."""
|
|
285
|
+
from openapi_pydantic import Reference
|
|
286
|
+
|
|
287
|
+
parameters = []
|
|
288
|
+
if operation.parameters:
|
|
289
|
+
for parameter in operation.parameters:
|
|
290
|
+
if isinstance(parameter, Reference):
|
|
291
|
+
parameter = self._get_root_referenced_parameter(parameter)
|
|
292
|
+
parameters.append(parameter)
|
|
293
|
+
return parameters
|
|
294
|
+
|
|
295
|
+
def get_request_body_for_operation(
|
|
296
|
+
self, operation: Operation
|
|
297
|
+
) -> Optional[RequestBody]:
|
|
298
|
+
"""Get the request body for a given operation."""
|
|
299
|
+
from openapi_pydantic import Reference
|
|
300
|
+
|
|
301
|
+
request_body = operation.requestBody
|
|
302
|
+
if isinstance(request_body, Reference):
|
|
303
|
+
request_body = self._get_root_referenced_request_body(request_body)
|
|
304
|
+
return request_body
|
|
305
|
+
|
|
306
|
+
@staticmethod
|
|
307
|
+
def get_cleaned_operation_id(spec: OpenAPISpec, operation: Operation, path: str, method: str) -> str:
|
|
308
|
+
"""Get a cleaned operation id from an operation id."""
|
|
309
|
+
# Sanitize the base URL
|
|
310
|
+
base_url = spec.base_url
|
|
311
|
+
sanitized_base_url = re.sub(r"https?://(www\.)?", "", base_url)
|
|
312
|
+
sanitized_base_url = re.sub(r"[^a-zA-Z0-9]", "_", sanitized_base_url).strip("_").lower()
|
|
313
|
+
|
|
314
|
+
# Replace all punctuation of any kind with underscore
|
|
315
|
+
path = re.sub(r"[^a-zA-Z0-9]", "_", path.lstrip("/"))
|
|
316
|
+
operation_id = f"{sanitized_base_url}_{path}_{method}"
|
|
317
|
+
operation_id = operation_id.replace("-", "_").replace(".", "_").replace("/", "_").lower()
|
|
318
|
+
return operation_id
|
|
319
|
+
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
from typing import List
|
|
3
|
+
from datetime import datetime, date
|
|
4
|
+
from dhisana.schemas.sales import MessageItem
|
|
5
|
+
|
|
6
|
+
DAY_NAMES = {
|
|
7
|
+
"monday", "tuesday", "wednesday", "thursday",
|
|
8
|
+
"friday", "saturday", "sunday"
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
def is_day_line(line: str) -> bool:
|
|
12
|
+
"""Check if line is a simple day name (case-insensitive)."""
|
|
13
|
+
return line.strip().lower() in DAY_NAMES
|
|
14
|
+
|
|
15
|
+
def parse_time_line(time_str: str) -> str:
|
|
16
|
+
"""
|
|
17
|
+
Parse a time string like "6:38 PM" or "14:10" using today's date,
|
|
18
|
+
returning an ISO8601 string. Returns an empty string if it fails.
|
|
19
|
+
"""
|
|
20
|
+
today_str = date.today().strftime("%Y-%m-%d")
|
|
21
|
+
for fmt in ["%I:%M %p", "%H:%M"]:
|
|
22
|
+
try:
|
|
23
|
+
dt = datetime.strptime(f"{today_str} {time_str}", f"%Y-%m-%d {fmt}")
|
|
24
|
+
return dt.isoformat()
|
|
25
|
+
except ValueError:
|
|
26
|
+
pass
|
|
27
|
+
return "" # If we can’t parse it, return empty
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def parse_conversation(conversation_text: str) -> List[MessageItem]:
|
|
31
|
+
"""
|
|
32
|
+
Given raw text containing lines like:
|
|
33
|
+
'Load more'
|
|
34
|
+
'Thursday'
|
|
35
|
+
'You'
|
|
36
|
+
'6:38 PM'
|
|
37
|
+
'Hello, ...'
|
|
38
|
+
Parse them into MessageItems with empty subject/email,
|
|
39
|
+
and return a list sorted from latest (top) to oldest (bottom).
|
|
40
|
+
"""
|
|
41
|
+
# Split lines, remove empties and extra spaces
|
|
42
|
+
lines = [line.strip() for line in conversation_text.split('\n') if line.strip()]
|
|
43
|
+
|
|
44
|
+
messages: List[MessageItem] = []
|
|
45
|
+
i = 0
|
|
46
|
+
while i < len(lines):
|
|
47
|
+
line = lines[i]
|
|
48
|
+
|
|
49
|
+
# Skip lines that say 'Load more' or day lines (Thursday, Monday, etc.)
|
|
50
|
+
if line.lower().startswith("load more") or is_day_line(line):
|
|
51
|
+
i += 1
|
|
52
|
+
continue
|
|
53
|
+
|
|
54
|
+
# This line should be the sender (e.g. "You" or "Madhukar Devaraju")
|
|
55
|
+
sender = line
|
|
56
|
+
i += 1
|
|
57
|
+
if i >= len(lines):
|
|
58
|
+
break
|
|
59
|
+
|
|
60
|
+
# Next line should be the time
|
|
61
|
+
time_line = lines[i]
|
|
62
|
+
time_iso = parse_time_line(time_line)
|
|
63
|
+
if not time_iso:
|
|
64
|
+
# If we cannot parse the time here, skip it and move on
|
|
65
|
+
i += 1
|
|
66
|
+
continue
|
|
67
|
+
i += 1
|
|
68
|
+
|
|
69
|
+
# Collect body until the next recognized "sender" or "day" or "Load more" or valid time
|
|
70
|
+
body_lines = []
|
|
71
|
+
while i < len(lines):
|
|
72
|
+
nxt = lines[i]
|
|
73
|
+
if nxt.lower().startswith("load more") or is_day_line(nxt):
|
|
74
|
+
# Reached a new block
|
|
75
|
+
break
|
|
76
|
+
if parse_time_line(nxt):
|
|
77
|
+
# If nxt is a time line, it means a new message is coming
|
|
78
|
+
break
|
|
79
|
+
# Otherwise, treat it as part of the message body
|
|
80
|
+
body_lines.append(nxt)
|
|
81
|
+
i += 1
|
|
82
|
+
|
|
83
|
+
# We have enough info to form one message
|
|
84
|
+
body_text = "\n".join(body_lines).strip()
|
|
85
|
+
message_item = MessageItem(
|
|
86
|
+
message_id=str(uuid.uuid4()),
|
|
87
|
+
thread_id=str(uuid.uuid4()),
|
|
88
|
+
sender_name=sender,
|
|
89
|
+
sender_email="", # LinkedIn message => keep empty
|
|
90
|
+
receiver_name="", # keep empty by default
|
|
91
|
+
receiver_email="", # keep empty by default
|
|
92
|
+
iso_datetime=time_iso,
|
|
93
|
+
subject="", # LinkedIn => keep empty
|
|
94
|
+
body=body_text
|
|
95
|
+
)
|
|
96
|
+
messages.append(message_item)
|
|
97
|
+
|
|
98
|
+
# Reverse the list so the latest is on top
|
|
99
|
+
messages.reverse()
|
|
100
|
+
return messages
|