unstructured-ingest 0.5.10__py3-none-any.whl → 0.5.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of unstructured-ingest might be problematic. Click here for more details.
- test/integration/connectors/test_astradb.py +21 -0
- test/integration/connectors/test_chroma.py +18 -0
- test/integration/connectors/test_zendesk.py +142 -0
- test/integration/connectors/utils/validation/destination.py +2 -1
- unstructured_ingest/__version__.py +1 -1
- unstructured_ingest/v2/interfaces/downloader.py +2 -3
- unstructured_ingest/v2/processes/connectors/astradb.py +36 -28
- unstructured_ingest/v2/processes/connectors/neo4j.py +41 -18
- unstructured_ingest/v2/processes/connectors/zendesk/__init__.py +31 -0
- unstructured_ingest/v2/processes/connectors/zendesk/client.py +243 -0
- unstructured_ingest/v2/processes/connectors/zendesk/zendesk.py +425 -0
- {unstructured_ingest-0.5.10.dist-info → unstructured_ingest-0.5.12.dist-info}/METADATA +24 -20
- {unstructured_ingest-0.5.10.dist-info → unstructured_ingest-0.5.12.dist-info}/RECORD +17 -13
- {unstructured_ingest-0.5.10.dist-info → unstructured_ingest-0.5.12.dist-info}/LICENSE.md +0 -0
- {unstructured_ingest-0.5.10.dist-info → unstructured_ingest-0.5.12.dist-info}/WHEEL +0 -0
- {unstructured_ingest-0.5.10.dist-info → unstructured_ingest-0.5.12.dist-info}/entry_points.txt +0 -0
- {unstructured_ingest-0.5.10.dist-info → unstructured_ingest-0.5.12.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,243 @@
|
|
|
1
|
+
import base64
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from typing import Dict, List
|
|
4
|
+
|
|
5
|
+
from unstructured_ingest.utils.dep_check import requires_dependencies
|
|
6
|
+
from unstructured_ingest.v2.errors import ProviderError, RateLimitError, UserAuthError, UserError
|
|
7
|
+
from unstructured_ingest.v2.logger import logger
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass
|
|
11
|
+
class Comment:
|
|
12
|
+
id: int
|
|
13
|
+
author_id: str
|
|
14
|
+
body: str
|
|
15
|
+
parent_ticket_id: str
|
|
16
|
+
metadata: dict
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class ZendeskTicket:
|
|
21
|
+
id: int
|
|
22
|
+
subject: str
|
|
23
|
+
description: str
|
|
24
|
+
generated_ts: int
|
|
25
|
+
metadata: dict
|
|
26
|
+
|
|
27
|
+
def __lt__(self, other):
|
|
28
|
+
return int(self.id) < int(other.id)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass
|
|
32
|
+
class ZendeskArticle:
|
|
33
|
+
id: int
|
|
34
|
+
author_id: str
|
|
35
|
+
title: str
|
|
36
|
+
content: str
|
|
37
|
+
|
|
38
|
+
def __lt__(self, other):
|
|
39
|
+
return int(self.id) < int(other.id)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class ZendeskClient:
|
|
43
|
+
|
|
44
|
+
@requires_dependencies(["httpx"], extras="zendesk")
|
|
45
|
+
def __init__(self, token: str, subdomain: str, email: str):
|
|
46
|
+
import httpx
|
|
47
|
+
|
|
48
|
+
# should be okay to be blocking.
|
|
49
|
+
url_to_check = f"https://{subdomain}.zendesk.com/api/v2/groups.json"
|
|
50
|
+
auth = f"{email}/token", token
|
|
51
|
+
|
|
52
|
+
try:
|
|
53
|
+
_ = httpx.get(url_to_check, auth=auth)
|
|
54
|
+
except Exception as e:
|
|
55
|
+
raise self.wrap_error(e=e)
|
|
56
|
+
|
|
57
|
+
self._token = token
|
|
58
|
+
self._subdomain = subdomain
|
|
59
|
+
self._email = email
|
|
60
|
+
self._auth = auth
|
|
61
|
+
|
|
62
|
+
@requires_dependencies(["httpx"], extras="zendesk")
|
|
63
|
+
def wrap_error(self, e: Exception) -> Exception:
|
|
64
|
+
import httpx
|
|
65
|
+
|
|
66
|
+
if not isinstance(e, httpx.HTTPStatusError):
|
|
67
|
+
logger.error(f"unhandled exception from Zendesk client: {e}", exc_info=True)
|
|
68
|
+
return e
|
|
69
|
+
url = e.request.url
|
|
70
|
+
response_code = e.response.status_code
|
|
71
|
+
if response_code == 401:
|
|
72
|
+
logger.error(
|
|
73
|
+
f"Failed to connect via auth,"
|
|
74
|
+
f"{url} using zendesk response, status code {response_code}"
|
|
75
|
+
)
|
|
76
|
+
return UserAuthError(e)
|
|
77
|
+
if response_code == 429:
|
|
78
|
+
logger.error(
|
|
79
|
+
f"Failed to connect via rate limits"
|
|
80
|
+
f"{url} using zendesk response, status code {response_code}"
|
|
81
|
+
)
|
|
82
|
+
return RateLimitError(e)
|
|
83
|
+
if 400 <= response_code < 500:
|
|
84
|
+
logger.error(
|
|
85
|
+
f"Failed to connect to {url} using zendesk response, status code {response_code}"
|
|
86
|
+
)
|
|
87
|
+
return UserError(e)
|
|
88
|
+
if response_code > 500:
|
|
89
|
+
logger.error(
|
|
90
|
+
f"Failed to connect to {url} using zendesk response, status code {response_code}"
|
|
91
|
+
)
|
|
92
|
+
return ProviderError(e)
|
|
93
|
+
logger.error(f"unhandled http status error from Zendesk client: {e}", exc_info=True)
|
|
94
|
+
return e
|
|
95
|
+
|
|
96
|
+
@requires_dependencies(["httpx"], extras="zendesk")
|
|
97
|
+
async def get_articles_async(self) -> List[ZendeskArticle]:
|
|
98
|
+
"""
|
|
99
|
+
Retrieves article content from Zendesk asynchronously.
|
|
100
|
+
"""
|
|
101
|
+
import httpx
|
|
102
|
+
|
|
103
|
+
articles: List[ZendeskArticle] = []
|
|
104
|
+
|
|
105
|
+
article_url = f"https://{self._subdomain}.zendesk.com/api/v2/help_center/articles.json"
|
|
106
|
+
|
|
107
|
+
try:
|
|
108
|
+
async with httpx.AsyncClient() as client:
|
|
109
|
+
response = await client.get(article_url, auth=self._auth)
|
|
110
|
+
response.raise_for_status()
|
|
111
|
+
except Exception as e:
|
|
112
|
+
raise self.wrap_error(e=e)
|
|
113
|
+
|
|
114
|
+
articles_in_response: List[dict] = response.json()["articles"]
|
|
115
|
+
|
|
116
|
+
articles = [
|
|
117
|
+
ZendeskArticle(
|
|
118
|
+
id=int(entry["id"]),
|
|
119
|
+
author_id=str(entry["author_id"]),
|
|
120
|
+
title=str(entry["title"]),
|
|
121
|
+
content=entry["body"],
|
|
122
|
+
)
|
|
123
|
+
for entry in articles_in_response
|
|
124
|
+
]
|
|
125
|
+
return articles
|
|
126
|
+
|
|
127
|
+
@requires_dependencies(["httpx"], extras="zendesk")
|
|
128
|
+
async def get_comments_async(self, ticket_id: int) -> List["Comment"]:
|
|
129
|
+
import httpx
|
|
130
|
+
|
|
131
|
+
comments_url = f"https://{self._subdomain}.zendesk.com/api/v2/tickets/{ticket_id}/comments"
|
|
132
|
+
|
|
133
|
+
try:
|
|
134
|
+
async with httpx.AsyncClient() as client:
|
|
135
|
+
response = await client.get(comments_url, auth=self._auth)
|
|
136
|
+
response.raise_for_status()
|
|
137
|
+
except Exception as e:
|
|
138
|
+
raise self.wrap_error(e=e)
|
|
139
|
+
|
|
140
|
+
return [
|
|
141
|
+
Comment(
|
|
142
|
+
id=int(entry["id"]),
|
|
143
|
+
author_id=entry["author_id"],
|
|
144
|
+
body=entry["body"],
|
|
145
|
+
metadata=entry,
|
|
146
|
+
parent_ticket_id=ticket_id,
|
|
147
|
+
)
|
|
148
|
+
for entry in response.json()["comments"]
|
|
149
|
+
]
|
|
150
|
+
|
|
151
|
+
@requires_dependencies(["httpx"], extras="zendesk")
|
|
152
|
+
def get_users(self) -> List[dict]:
|
|
153
|
+
import httpx
|
|
154
|
+
|
|
155
|
+
users: List[dict] = []
|
|
156
|
+
|
|
157
|
+
users_url = f"https://{self._subdomain}.zendesk.com/api/v2/users"
|
|
158
|
+
try:
|
|
159
|
+
response = httpx.get(users_url, auth=self._auth)
|
|
160
|
+
response.raise_for_status()
|
|
161
|
+
except Exception as e:
|
|
162
|
+
raise self.wrap_error(e=e)
|
|
163
|
+
|
|
164
|
+
users_in_response: List[dict] = response.json()["users"]
|
|
165
|
+
users = users_in_response
|
|
166
|
+
|
|
167
|
+
return users
|
|
168
|
+
|
|
169
|
+
@requires_dependencies(["httpx"], extras="zendesk")
|
|
170
|
+
async def get_tickets_async(self) -> List["ZendeskTicket"]:
|
|
171
|
+
import httpx
|
|
172
|
+
|
|
173
|
+
tickets: List["ZendeskTicket"] = []
|
|
174
|
+
tickets_url = f"https://{self._subdomain}.zendesk.com/api/v2/tickets"
|
|
175
|
+
|
|
176
|
+
try:
|
|
177
|
+
async with httpx.AsyncClient() as client:
|
|
178
|
+
response = await client.get(tickets_url, auth=self._auth)
|
|
179
|
+
response.raise_for_status()
|
|
180
|
+
except Exception as e:
|
|
181
|
+
raise self.wrap_error(e=e)
|
|
182
|
+
|
|
183
|
+
tickets_in_response: List[dict] = response.json()["tickets"]
|
|
184
|
+
|
|
185
|
+
for entry in tickets_in_response:
|
|
186
|
+
ticket = ZendeskTicket(
|
|
187
|
+
id=int(entry["id"]),
|
|
188
|
+
subject=entry["subject"],
|
|
189
|
+
description=entry["description"],
|
|
190
|
+
generated_ts=entry["generated_timestamp"],
|
|
191
|
+
metadata=entry,
|
|
192
|
+
)
|
|
193
|
+
tickets.append(ticket)
|
|
194
|
+
|
|
195
|
+
return tickets
|
|
196
|
+
|
|
197
|
+
@requires_dependencies(["httpx"], extras="zendesk")
|
|
198
|
+
async def get_article_attachments_async(self, article_id: str):
|
|
199
|
+
"""
|
|
200
|
+
Handles article attachments such as images and stores them as UTF-8 encoded bytes.
|
|
201
|
+
"""
|
|
202
|
+
import httpx
|
|
203
|
+
|
|
204
|
+
article_attachment_url = (
|
|
205
|
+
f"https://{self._subdomain}.zendesk.com/api/v2/help_center/"
|
|
206
|
+
f"articles/{article_id}/attachments"
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
try:
|
|
210
|
+
async with httpx.AsyncClient() as client:
|
|
211
|
+
response = await client.get(article_attachment_url, auth=self._auth)
|
|
212
|
+
response.raise_for_status()
|
|
213
|
+
except Exception as e:
|
|
214
|
+
raise self.wrap_error(e=e)
|
|
215
|
+
|
|
216
|
+
attachments_in_response: List[Dict] = response.json().get("article_attachments", [])
|
|
217
|
+
attachments = []
|
|
218
|
+
|
|
219
|
+
for attachment in attachments_in_response:
|
|
220
|
+
attachment_data = {
|
|
221
|
+
"id": attachment["id"],
|
|
222
|
+
"file_name": attachment["file_name"],
|
|
223
|
+
"content_type": attachment["content_type"],
|
|
224
|
+
"size": attachment["size"],
|
|
225
|
+
"url": attachment["url"],
|
|
226
|
+
"content_url": attachment["content_url"],
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
try:
|
|
230
|
+
async with httpx.AsyncClient() as client:
|
|
231
|
+
download_response = await client.get(attachment["content_url"], auth=self._auth)
|
|
232
|
+
download_response.raise_for_status()
|
|
233
|
+
except Exception as e:
|
|
234
|
+
raise self.wrap_error(e=e)
|
|
235
|
+
|
|
236
|
+
encoded_content = base64.b64encode(download_response.content).decode("utf-8")
|
|
237
|
+
attachment_data["encoded_content"] = (
|
|
238
|
+
f"data:{attachment_data['content_type']};base64,{encoded_content}"
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
attachments.append(attachment_data)
|
|
242
|
+
|
|
243
|
+
return attachments
|
|
@@ -0,0 +1,425 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import datetime
|
|
4
|
+
import hashlib
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from time import time
|
|
8
|
+
from typing import Any, AsyncGenerator, List, Literal
|
|
9
|
+
|
|
10
|
+
from pydantic import BaseModel, Field, Secret
|
|
11
|
+
|
|
12
|
+
from unstructured_ingest.utils.data_prep import batch_generator
|
|
13
|
+
from unstructured_ingest.utils.dep_check import requires_dependencies
|
|
14
|
+
from unstructured_ingest.utils.html import HtmlMixin
|
|
15
|
+
from unstructured_ingest.v2.errors import UserAuthError
|
|
16
|
+
from unstructured_ingest.v2.interfaces import (
|
|
17
|
+
AccessConfig,
|
|
18
|
+
BatchFileData,
|
|
19
|
+
BatchItem,
|
|
20
|
+
ConnectionConfig,
|
|
21
|
+
Downloader,
|
|
22
|
+
DownloaderConfig,
|
|
23
|
+
DownloadResponse,
|
|
24
|
+
FileData,
|
|
25
|
+
FileDataSourceMetadata,
|
|
26
|
+
Indexer,
|
|
27
|
+
IndexerConfig,
|
|
28
|
+
SourceIdentifiers,
|
|
29
|
+
)
|
|
30
|
+
from unstructured_ingest.v2.logger import logger
|
|
31
|
+
from unstructured_ingest.v2.processes.connector_registry import SourceRegistryEntry
|
|
32
|
+
|
|
33
|
+
from .client import ZendeskArticle, ZendeskClient, ZendeskTicket
|
|
34
|
+
|
|
35
|
+
CONNECTOR_TYPE = "zendesk"
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class ZendeskAdditionalMetadata(BaseModel):
|
|
39
|
+
item_type: str
|
|
40
|
+
leading_id: str # is the same as id just being verbose.
|
|
41
|
+
tail_id: str # last id in the batch.
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class ZendeskFileDataSourceMetadata(FileDataSourceMetadata):
|
|
45
|
+
"""
|
|
46
|
+
inherits metadata object as tickets and articles
|
|
47
|
+
are treated in single batch, we need to denote indices ticket/article
|
|
48
|
+
as the source metadata.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class ZendeskBatchFileData(BatchFileData):
|
|
53
|
+
additional_metadata: ZendeskAdditionalMetadata
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class ZendeskAccessConfig(AccessConfig):
|
|
57
|
+
api_token: str = Field(
|
|
58
|
+
description="API token for zendesk generated under Apps and Integrations"
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class ZendeskBatchItemTicket(BatchItem):
|
|
63
|
+
subject: str
|
|
64
|
+
description: str
|
|
65
|
+
item_type: str = "tickets" # placeholder for downloader
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class ZendeskBatchItemArticle(BatchItem):
|
|
69
|
+
title: str
|
|
70
|
+
author_id: str
|
|
71
|
+
title: str
|
|
72
|
+
content: str
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class ZendeskConnectionConfig(ConnectionConfig):
|
|
76
|
+
subdomain: str = Field(description="Subdomain for zendesk site, <sub-domain>.company.com")
|
|
77
|
+
email: str = Field(description="Email for zendesk site registered at the subdomain")
|
|
78
|
+
access_config: Secret[ZendeskAccessConfig]
|
|
79
|
+
|
|
80
|
+
async def get_client_async(self) -> ZendeskClient:
|
|
81
|
+
"""Provides an async manager for ZendeskClient."""
|
|
82
|
+
access_config = self.access_config.get_secret_value()
|
|
83
|
+
|
|
84
|
+
client = ZendeskClient(
|
|
85
|
+
email=self.email, subdomain=self.subdomain, token=access_config.api_token
|
|
86
|
+
)
|
|
87
|
+
return client
|
|
88
|
+
|
|
89
|
+
def get_client(self) -> ZendeskClient:
|
|
90
|
+
|
|
91
|
+
access_config = self.access_config.get_secret_value()
|
|
92
|
+
|
|
93
|
+
client = ZendeskClient(
|
|
94
|
+
email=self.email, subdomain=self.subdomain, token=access_config.api_token
|
|
95
|
+
)
|
|
96
|
+
return client
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
class ZendeskIndexerConfig(IndexerConfig):
|
|
100
|
+
batch_size: int = Field(
|
|
101
|
+
default=2,
|
|
102
|
+
description="Number of tickets or articles.",
|
|
103
|
+
)
|
|
104
|
+
item_type: Literal["tickets", "articles", "all"] = Field(
|
|
105
|
+
default="tickets",
|
|
106
|
+
description="Type of item from zendesk to parse, can only be `tickets` or `articles`.",
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
@dataclass
|
|
111
|
+
class ZendeskIndexer(Indexer):
|
|
112
|
+
connection_config: ZendeskConnectionConfig
|
|
113
|
+
index_config: ZendeskIndexerConfig
|
|
114
|
+
connector_type: str = CONNECTOR_TYPE
|
|
115
|
+
|
|
116
|
+
def precheck(self) -> None:
|
|
117
|
+
"""Validates connection to Zendesk API."""
|
|
118
|
+
try:
|
|
119
|
+
client = self.connection_config.get_client()
|
|
120
|
+
if not client.get_users():
|
|
121
|
+
subdomain_endpoint = f"{self.connection_config.subdomain}.zendesk.com"
|
|
122
|
+
raise UserAuthError(f"Users do not exist in subdomain {subdomain_endpoint}")
|
|
123
|
+
except UserAuthError as e:
|
|
124
|
+
logger.error(f"Source connection error: {e}", exc_info=True)
|
|
125
|
+
raise
|
|
126
|
+
except Exception as e:
|
|
127
|
+
logger.error(f"Failed to validate connection to Zendesk: {e}", exc_info=True)
|
|
128
|
+
raise UserAuthError(f"Failed to validate connection: {e}")
|
|
129
|
+
|
|
130
|
+
def is_async(self) -> bool:
|
|
131
|
+
return True
|
|
132
|
+
|
|
133
|
+
async def _list_articles_async(self) -> List[ZendeskArticle]:
|
|
134
|
+
client = await self.connection_config.get_client_async()
|
|
135
|
+
return await client.get_articles_async()
|
|
136
|
+
|
|
137
|
+
async def _list_tickets_async(self) -> List[ZendeskTicket]:
|
|
138
|
+
client = await self.connection_config.get_client_async()
|
|
139
|
+
return await client.get_tickets_async()
|
|
140
|
+
|
|
141
|
+
def _generate_fullpath(self, identifier: str) -> Path:
|
|
142
|
+
return Path(hashlib.sha256(identifier.encode("utf-8")).hexdigest()[:16] + ".txt")
|
|
143
|
+
|
|
144
|
+
async def handle_articles_async(
|
|
145
|
+
self, articles: List[ZendeskArticle], batch_size: int
|
|
146
|
+
) -> AsyncGenerator[ZendeskBatchFileData, None]:
|
|
147
|
+
"""Parses articles from a list and yields FileData objects asynchronously in batches."""
|
|
148
|
+
for article_batch in batch_generator(articles, batch_size=batch_size):
|
|
149
|
+
|
|
150
|
+
article_batch = sorted(article_batch)
|
|
151
|
+
|
|
152
|
+
additional_metadata = ZendeskAdditionalMetadata(
|
|
153
|
+
item_type="articles",
|
|
154
|
+
leading_id=str(article_batch[0].id),
|
|
155
|
+
tail_id=str(article_batch[-1].id),
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
metadata = ZendeskFileDataSourceMetadata(
|
|
159
|
+
date_processed=str(time()),
|
|
160
|
+
record_locator={
|
|
161
|
+
"id": str(article_batch[0].id),
|
|
162
|
+
"item_type": "articles",
|
|
163
|
+
},
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
batch_items: List[ZendeskBatchItemArticle] = [
|
|
167
|
+
ZendeskBatchItemArticle(
|
|
168
|
+
identifier=str(article.id),
|
|
169
|
+
author_id=str(article.author_id),
|
|
170
|
+
title=str(article.title),
|
|
171
|
+
content=str(article.content),
|
|
172
|
+
)
|
|
173
|
+
for article in article_batch
|
|
174
|
+
]
|
|
175
|
+
|
|
176
|
+
full_path = self._generate_fullpath(str(article_batch[0].id))
|
|
177
|
+
full_path = Path(str(full_path).replace(".txt", ".html"))
|
|
178
|
+
|
|
179
|
+
source_identifiers = SourceIdentifiers(filename=full_path.name, fullpath=str(full_path))
|
|
180
|
+
|
|
181
|
+
batched_file_data = ZendeskBatchFileData(
|
|
182
|
+
identifier=str(article_batch[0].id),
|
|
183
|
+
connector_type=self.connector_type,
|
|
184
|
+
metadata=metadata,
|
|
185
|
+
batch_items=batch_items,
|
|
186
|
+
additional_metadata=additional_metadata,
|
|
187
|
+
source_identifiers=source_identifiers,
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
yield batched_file_data
|
|
191
|
+
|
|
192
|
+
async def handle_tickets_async(
|
|
193
|
+
self, tickets: List[ZendeskTicket], batch_size: int
|
|
194
|
+
) -> AsyncGenerator[ZendeskBatchFileData, None]:
|
|
195
|
+
"""Parses tickets from a list and yields FileData objects asynchronously in batches."""
|
|
196
|
+
for ticket_batch in batch_generator(tickets, batch_size=batch_size):
|
|
197
|
+
|
|
198
|
+
sorted_batch = sorted(ticket_batch)
|
|
199
|
+
|
|
200
|
+
additional_metadata = ZendeskAdditionalMetadata(
|
|
201
|
+
item_type="tickets",
|
|
202
|
+
leading_id=str(sorted_batch[0].id),
|
|
203
|
+
tail_id=str(sorted_batch[-1].id),
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
metadata = ZendeskFileDataSourceMetadata(
|
|
207
|
+
date_processed=str(time()),
|
|
208
|
+
record_locator={
|
|
209
|
+
"id": str(sorted_batch[0].id),
|
|
210
|
+
"item_type": "tickets",
|
|
211
|
+
},
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
batch_items: List[ZendeskBatchItemTicket] = [
|
|
215
|
+
ZendeskBatchItemTicket(
|
|
216
|
+
identifier=str(ticket.id),
|
|
217
|
+
subject=str(ticket.subject),
|
|
218
|
+
description=str(ticket.description),
|
|
219
|
+
)
|
|
220
|
+
for ticket in sorted_batch
|
|
221
|
+
]
|
|
222
|
+
|
|
223
|
+
full_path = self._generate_fullpath(str(sorted_batch[0].id))
|
|
224
|
+
source_identifiers = SourceIdentifiers(filename=full_path.name, fullpath=str(full_path))
|
|
225
|
+
|
|
226
|
+
batched_file_data = ZendeskBatchFileData(
|
|
227
|
+
connector_type=self.connector_type,
|
|
228
|
+
metadata=metadata,
|
|
229
|
+
batch_items=batch_items,
|
|
230
|
+
additional_metadata=additional_metadata,
|
|
231
|
+
source_identifiers=source_identifiers,
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
yield batched_file_data
|
|
235
|
+
|
|
236
|
+
async def run_async(self, **kwargs: Any) -> AsyncGenerator[FileData, None]:
|
|
237
|
+
"""Determines item type and processes accordingly asynchronously."""
|
|
238
|
+
item_type = self.index_config.item_type
|
|
239
|
+
batch_size = self.index_config.batch_size
|
|
240
|
+
|
|
241
|
+
if item_type == "articles":
|
|
242
|
+
articles = await self._list_articles_async()
|
|
243
|
+
async for file_data in self.handle_articles_async(
|
|
244
|
+
articles, batch_size
|
|
245
|
+
): # Using async version
|
|
246
|
+
yield file_data
|
|
247
|
+
|
|
248
|
+
elif item_type == "tickets":
|
|
249
|
+
tickets = await self._list_tickets_async()
|
|
250
|
+
async for file_data in self.handle_tickets_async(
|
|
251
|
+
tickets, batch_size
|
|
252
|
+
): # Using async version
|
|
253
|
+
yield file_data
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
class ZendeskDownloaderConfig(DownloaderConfig, HtmlMixin):
|
|
257
|
+
pass
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
@dataclass
|
|
261
|
+
class ZendeskDownloader(Downloader):
|
|
262
|
+
download_config: ZendeskDownloaderConfig
|
|
263
|
+
connection_config: ZendeskConnectionConfig
|
|
264
|
+
connector_type: str = CONNECTOR_TYPE
|
|
265
|
+
|
|
266
|
+
def is_async(self) -> bool:
|
|
267
|
+
return True
|
|
268
|
+
|
|
269
|
+
def download_embedded_files(
|
|
270
|
+
self, session, html: str, current_file_data: FileData
|
|
271
|
+
) -> list[DownloadResponse]:
|
|
272
|
+
if not self.download_config.extract_files:
|
|
273
|
+
return []
|
|
274
|
+
url = current_file_data.metadata.url
|
|
275
|
+
if url is None:
|
|
276
|
+
logger.warning(
|
|
277
|
+
f"""Missing URL for file: {current_file_data.source_identifiers.filename}.
|
|
278
|
+
Skipping file extraction."""
|
|
279
|
+
)
|
|
280
|
+
return []
|
|
281
|
+
filepath = current_file_data.source_identifiers.relative_path
|
|
282
|
+
download_path = Path(self.download_dir) / filepath
|
|
283
|
+
download_dir = download_path.with_suffix("")
|
|
284
|
+
return self.download_config.extract_embedded_files(
|
|
285
|
+
url=url,
|
|
286
|
+
download_dir=download_dir,
|
|
287
|
+
original_filedata=current_file_data,
|
|
288
|
+
html=html,
|
|
289
|
+
session=session,
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
@requires_dependencies(["bs4", "aiofiles"], extras="zendesk")
|
|
293
|
+
async def handle_articles_async(
|
|
294
|
+
self, client: ZendeskClient, batch_file_data: ZendeskBatchFileData
|
|
295
|
+
):
|
|
296
|
+
"""
|
|
297
|
+
Processes the article information, downloads the attachments for each article,
|
|
298
|
+
and updates the content accordingly.
|
|
299
|
+
"""
|
|
300
|
+
import aiofiles
|
|
301
|
+
import bs4
|
|
302
|
+
|
|
303
|
+
# Determine the download path
|
|
304
|
+
download_path = self.get_download_path(batch_file_data)
|
|
305
|
+
|
|
306
|
+
if download_path is None:
|
|
307
|
+
raise ValueError("Download path could not be determined")
|
|
308
|
+
|
|
309
|
+
download_path.parent.mkdir(parents=True, exist_ok=True)
|
|
310
|
+
|
|
311
|
+
async with aiofiles.open(download_path, "a", encoding="utf8") as f:
|
|
312
|
+
for article in batch_file_data.batch_items:
|
|
313
|
+
html_data_str = article.content
|
|
314
|
+
soup = bs4.BeautifulSoup(html_data_str, "html.parser")
|
|
315
|
+
|
|
316
|
+
if self.download_config.extract_images:
|
|
317
|
+
# Get article attachments asynchronously
|
|
318
|
+
image_data_decoded: List = await client.get_article_attachments_async(
|
|
319
|
+
article_id=article.identifier
|
|
320
|
+
)
|
|
321
|
+
img_tags = soup.find_all("img")
|
|
322
|
+
|
|
323
|
+
# Ensure we don't exceed the available images
|
|
324
|
+
for img_tag, img_data in zip(img_tags, image_data_decoded):
|
|
325
|
+
img_tag["src"] = img_data.get("encoded_content", "")
|
|
326
|
+
|
|
327
|
+
await f.write(soup.prettify())
|
|
328
|
+
|
|
329
|
+
return super().generate_download_response(
|
|
330
|
+
file_data=batch_file_data, download_path=download_path
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
@requires_dependencies(["aiofiles"], extras="zendesk")
|
|
334
|
+
async def handle_tickets_async(
|
|
335
|
+
self, client: ZendeskClient, batch_file_data: ZendeskBatchFileData
|
|
336
|
+
) -> DownloadResponse:
|
|
337
|
+
"""
|
|
338
|
+
Processes a batch of tickets asynchronously, writing their details and comments to a file.
|
|
339
|
+
"""
|
|
340
|
+
import aiofiles
|
|
341
|
+
|
|
342
|
+
# Determine the download path
|
|
343
|
+
download_path = self.get_download_path(batch_file_data)
|
|
344
|
+
if download_path is None:
|
|
345
|
+
raise ValueError("Download path could not be determined")
|
|
346
|
+
|
|
347
|
+
download_path.parent.mkdir(parents=True, exist_ok=True)
|
|
348
|
+
|
|
349
|
+
# Process each ticket in the batch
|
|
350
|
+
async with aiofiles.open(download_path, "a", encoding="utf8") as f:
|
|
351
|
+
for batch_item in batch_file_data.batch_items:
|
|
352
|
+
ticket_identifier = batch_item.identifier
|
|
353
|
+
first_date = None
|
|
354
|
+
comments: List[dict] = []
|
|
355
|
+
|
|
356
|
+
# Fetch comments asynchronously
|
|
357
|
+
comments_list = await client.get_comments_async(ticket_id=int(ticket_identifier))
|
|
358
|
+
|
|
359
|
+
for comment in comments_list: # Iterate over the resolved list
|
|
360
|
+
date_created = (
|
|
361
|
+
comment.metadata["created_at"].isoformat()
|
|
362
|
+
if isinstance(comment.metadata["created_at"], datetime.datetime)
|
|
363
|
+
else str(comment.metadata["created_at"])
|
|
364
|
+
)
|
|
365
|
+
|
|
366
|
+
if first_date is None:
|
|
367
|
+
first_date = date_created
|
|
368
|
+
|
|
369
|
+
comments.append(
|
|
370
|
+
{
|
|
371
|
+
"comment_id": comment.id,
|
|
372
|
+
"author_id": comment.author_id,
|
|
373
|
+
"body": comment.body,
|
|
374
|
+
"date_created": date_created,
|
|
375
|
+
}
|
|
376
|
+
)
|
|
377
|
+
|
|
378
|
+
# Write ticket details to file
|
|
379
|
+
content = (
|
|
380
|
+
"\nticket\n"
|
|
381
|
+
f"{batch_item.identifier}\n"
|
|
382
|
+
f"{batch_file_data.metadata.record_locator.get('subject', '')}\n"
|
|
383
|
+
f"{batch_file_data.metadata.record_locator.get('description', '')}\n"
|
|
384
|
+
f"{first_date}\n"
|
|
385
|
+
)
|
|
386
|
+
|
|
387
|
+
# Append comments
|
|
388
|
+
for comment in comments:
|
|
389
|
+
content += (
|
|
390
|
+
"comment\n"
|
|
391
|
+
f"{comment.get('comment_id', '')}\n"
|
|
392
|
+
f"{comment.get('author_id', '')}\n"
|
|
393
|
+
f"{comment.get('body', '')}\n"
|
|
394
|
+
f"{comment.get('date_created', '')}\n"
|
|
395
|
+
)
|
|
396
|
+
|
|
397
|
+
await f.write(content)
|
|
398
|
+
|
|
399
|
+
return super().generate_download_response(
|
|
400
|
+
file_data=batch_file_data, download_path=download_path
|
|
401
|
+
)
|
|
402
|
+
|
|
403
|
+
async def run_async(self, file_data: ZendeskBatchFileData, **kwargs: Any) -> DownloadResponse:
|
|
404
|
+
|
|
405
|
+
zendesk_filedata: FileData = FileData.cast(file_data=file_data)
|
|
406
|
+
|
|
407
|
+
client = await self.connection_config.get_client_async()
|
|
408
|
+
item_type = zendesk_filedata.metadata.record_locator["item_type"]
|
|
409
|
+
|
|
410
|
+
if item_type == "articles":
|
|
411
|
+
return await self.handle_articles_async(client, file_data)
|
|
412
|
+
elif item_type == "tickets":
|
|
413
|
+
return await self.handle_tickets_async(client, file_data)
|
|
414
|
+
else:
|
|
415
|
+
raise RuntimeError(f"Item type {item_type} cannot be handled by the downloader")
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
# create entry
|
|
419
|
+
zendesk_source_entry = SourceRegistryEntry(
|
|
420
|
+
connection_config=ZendeskConnectionConfig,
|
|
421
|
+
indexer_config=ZendeskIndexerConfig,
|
|
422
|
+
indexer=ZendeskIndexer,
|
|
423
|
+
downloader=ZendeskDownloader,
|
|
424
|
+
downloader_config=ZendeskDownloaderConfig,
|
|
425
|
+
)
|