reportify-sdk 0.2.7__py3-none-any.whl → 0.2.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- reportify_sdk/__init__.py +1 -1
- reportify_sdk/agent.py +213 -0
- reportify_sdk/channels.py +126 -0
- reportify_sdk/chat.py +107 -0
- reportify_sdk/client.py +257 -57
- reportify_sdk/docs.py +351 -8
- reportify_sdk/kb.py +15 -0
- reportify_sdk/stock.py +231 -126
- reportify_sdk/timeline.py +34 -0
- reportify_sdk/user.py +44 -0
- {reportify_sdk-0.2.7.dist-info → reportify_sdk-0.2.9.dist-info}/METADATA +1 -1
- reportify_sdk-0.2.9.dist-info/RECORD +18 -0
- reportify_sdk-0.2.7.dist-info/RECORD +0 -14
- {reportify_sdk-0.2.7.dist-info → reportify_sdk-0.2.9.dist-info}/WHEEL +0 -0
- {reportify_sdk-0.2.7.dist-info → reportify_sdk-0.2.9.dist-info}/licenses/LICENSE +0 -0
- {reportify_sdk-0.2.7.dist-info → reportify_sdk-0.2.9.dist-info}/top_level.txt +0 -0
reportify_sdk/docs.py
CHANGED
|
@@ -86,21 +86,33 @@ class DocsModule:
|
|
|
86
86
|
*,
|
|
87
87
|
symbols: list[str] | None = None,
|
|
88
88
|
categories: list[str] | None = None,
|
|
89
|
+
markets: list[str] | None = None,
|
|
90
|
+
institutions: list[str] | None = None,
|
|
91
|
+
tags: dict[str, list] | None = None,
|
|
92
|
+
folder_ids: list[str] | None = None,
|
|
89
93
|
start_date: str | None = None,
|
|
90
94
|
end_date: str | None = None,
|
|
91
|
-
|
|
92
|
-
|
|
95
|
+
min_score: float | None = None,
|
|
96
|
+
extended_filters: list[dict] | None = None,
|
|
97
|
+
page_num: int = 1,
|
|
98
|
+
page_size: int = 10,
|
|
93
99
|
) -> dict[str, Any]:
|
|
94
100
|
"""
|
|
95
101
|
List documents with filters
|
|
96
102
|
|
|
97
103
|
Args:
|
|
98
104
|
symbols: Filter by stock symbols
|
|
99
|
-
categories: Filter by document categories
|
|
105
|
+
categories: Filter by document categories (financials, transcripts, reports, news, files, filings, socials)
|
|
106
|
+
markets: Filter by markets (cn, hk, us)
|
|
107
|
+
institutions: Filter by institutions
|
|
108
|
+
tags: Filter by tags (dict with key-value pairs)
|
|
109
|
+
folder_ids: Filter by folder IDs
|
|
100
110
|
start_date: Start date filter (YYYY-MM-DD)
|
|
101
111
|
end_date: End date filter (YYYY-MM-DD)
|
|
102
|
-
|
|
103
|
-
|
|
112
|
+
min_score: Minimum relevance score
|
|
113
|
+
extended_filters: Extended filter conditions
|
|
114
|
+
page_num: Page number (default: 1)
|
|
115
|
+
page_size: Number of items per page (default: 10)
|
|
104
116
|
|
|
105
117
|
Returns:
|
|
106
118
|
Dictionary with documents list and pagination info
|
|
@@ -111,17 +123,29 @@ class DocsModule:
|
|
|
111
123
|
... print(doc["title"])
|
|
112
124
|
"""
|
|
113
125
|
data: dict[str, Any] = {
|
|
114
|
-
"page_num":
|
|
126
|
+
"page_num": page_num,
|
|
115
127
|
"page_size": page_size,
|
|
116
128
|
}
|
|
117
129
|
if symbols:
|
|
118
130
|
data["symbols"] = symbols
|
|
119
131
|
if categories:
|
|
120
132
|
data["categories"] = categories
|
|
133
|
+
if markets:
|
|
134
|
+
data["markets"] = markets
|
|
135
|
+
if institutions:
|
|
136
|
+
data["institutions"] = institutions
|
|
137
|
+
if tags:
|
|
138
|
+
data["tags"] = tags
|
|
139
|
+
if folder_ids:
|
|
140
|
+
data["folder_ids"] = folder_ids
|
|
121
141
|
if start_date:
|
|
122
142
|
data["start_date"] = start_date
|
|
123
143
|
if end_date:
|
|
124
144
|
data["end_date"] = end_date
|
|
145
|
+
if min_score is not None:
|
|
146
|
+
data["min_score"] = min_score
|
|
147
|
+
if extended_filters:
|
|
148
|
+
data["extended_filters"] = extended_filters
|
|
125
149
|
|
|
126
150
|
return self._post("/v1/docs", json=data)
|
|
127
151
|
|
|
@@ -131,9 +155,19 @@ class DocsModule:
|
|
|
131
155
|
*,
|
|
132
156
|
symbols: list[str] | None = None,
|
|
133
157
|
categories: list[str] | None = None,
|
|
158
|
+
markets: list[str] | None = None,
|
|
159
|
+
institutions: list[str] | None = None,
|
|
160
|
+
tags: dict[str, list] | None = None,
|
|
161
|
+
folder_ids: list[str] | None = None,
|
|
162
|
+
doc_ids: list[str] | None = None,
|
|
134
163
|
start_date: str | None = None,
|
|
135
164
|
end_date: str | None = None,
|
|
165
|
+
min_score: float | None = None,
|
|
166
|
+
extended_filters: list[dict] | None = None,
|
|
136
167
|
num: int = 10,
|
|
168
|
+
include_doc_extra_details: bool = False,
|
|
169
|
+
refine_question: bool = False,
|
|
170
|
+
date_range: str | None = None,
|
|
137
171
|
) -> list[dict[str, Any]]:
|
|
138
172
|
"""
|
|
139
173
|
Search document chunks semantically
|
|
@@ -143,10 +177,20 @@ class DocsModule:
|
|
|
143
177
|
Args:
|
|
144
178
|
query: Search query string
|
|
145
179
|
symbols: Filter by stock symbols
|
|
146
|
-
categories: Filter by document categories
|
|
180
|
+
categories: Filter by document categories (financials, transcripts, reports, news, filings, socials)
|
|
181
|
+
markets: Filter by markets (cn, hk, us)
|
|
182
|
+
institutions: Filter by institutions
|
|
183
|
+
tags: Filter by tags (dict with key-value pairs)
|
|
184
|
+
folder_ids: Filter by folder IDs
|
|
185
|
+
doc_ids: Filter by document IDs
|
|
147
186
|
start_date: Start date filter (YYYY-MM-DD)
|
|
148
187
|
end_date: End date filter (YYYY-MM-DD)
|
|
149
|
-
|
|
188
|
+
min_score: Minimum relevance score
|
|
189
|
+
extended_filters: Extended filter conditions
|
|
190
|
+
num: Number of results to return (default: 10)
|
|
191
|
+
include_doc_extra_details: Include extra document details
|
|
192
|
+
refine_question: Refine the search question
|
|
193
|
+
date_range: Date range filter (h, d, w, m, y)
|
|
150
194
|
|
|
151
195
|
Returns:
|
|
152
196
|
List of matching chunks with document info
|
|
@@ -164,10 +208,309 @@ class DocsModule:
|
|
|
164
208
|
data["symbols"] = symbols
|
|
165
209
|
if categories:
|
|
166
210
|
data["categories"] = categories
|
|
211
|
+
if markets:
|
|
212
|
+
data["markets"] = markets
|
|
213
|
+
if institutions:
|
|
214
|
+
data["institutions"] = institutions
|
|
215
|
+
if tags:
|
|
216
|
+
data["tags"] = tags
|
|
217
|
+
if folder_ids:
|
|
218
|
+
data["folder_ids"] = folder_ids
|
|
219
|
+
if doc_ids:
|
|
220
|
+
data["doc_ids"] = doc_ids
|
|
167
221
|
if start_date:
|
|
168
222
|
data["start_date"] = start_date
|
|
169
223
|
if end_date:
|
|
170
224
|
data["end_date"] = end_date
|
|
225
|
+
if min_score is not None:
|
|
226
|
+
data["min_score"] = min_score
|
|
227
|
+
if extended_filters:
|
|
228
|
+
data["extended_filters"] = extended_filters
|
|
229
|
+
if include_doc_extra_details:
|
|
230
|
+
data["include_doc_extra_details"] = include_doc_extra_details
|
|
231
|
+
if refine_question:
|
|
232
|
+
data["refine_question"] = refine_question
|
|
233
|
+
if date_range:
|
|
234
|
+
data["date_range"] = date_range
|
|
171
235
|
|
|
172
236
|
response = self._post("/v1/search/chunks", json=data)
|
|
173
237
|
return response.get("chunks", [])
|
|
238
|
+
|
|
239
|
+
def query_by_symbols(
|
|
240
|
+
self,
|
|
241
|
+
symbols: list[str],
|
|
242
|
+
*,
|
|
243
|
+
categories: list[str] | None = None,
|
|
244
|
+
markets: list[str] | None = None,
|
|
245
|
+
start_date: str | None = None,
|
|
246
|
+
end_date: str | None = None,
|
|
247
|
+
page_num: int = 1,
|
|
248
|
+
page_size: int = 10,
|
|
249
|
+
) -> dict[str, Any]:
|
|
250
|
+
"""
|
|
251
|
+
Query documents by stock symbols
|
|
252
|
+
|
|
253
|
+
Args:
|
|
254
|
+
symbols: Stock symbols to filter by (required)
|
|
255
|
+
categories: Filter by document categories
|
|
256
|
+
markets: Filter by markets (cn, hk, us)
|
|
257
|
+
start_date: Start date filter (YYYY-MM-DD)
|
|
258
|
+
end_date: End date filter (YYYY-MM-DD)
|
|
259
|
+
page_num: Page number (default: 1)
|
|
260
|
+
page_size: Number of items per page (default: 10)
|
|
261
|
+
|
|
262
|
+
Returns:
|
|
263
|
+
Dictionary with documents list and pagination info
|
|
264
|
+
"""
|
|
265
|
+
data: dict[str, Any] = {
|
|
266
|
+
"symbols": symbols,
|
|
267
|
+
"page_num": page_num,
|
|
268
|
+
"page_size": page_size,
|
|
269
|
+
}
|
|
270
|
+
if categories:
|
|
271
|
+
data["categories"] = categories
|
|
272
|
+
if markets:
|
|
273
|
+
data["markets"] = markets
|
|
274
|
+
if start_date:
|
|
275
|
+
data["start_date"] = start_date
|
|
276
|
+
if end_date:
|
|
277
|
+
data["end_date"] = end_date
|
|
278
|
+
|
|
279
|
+
return self._post("/v1/docs/symbols", json=data)
|
|
280
|
+
|
|
281
|
+
def query_by_tags(
|
|
282
|
+
self,
|
|
283
|
+
tags: dict[str, list],
|
|
284
|
+
*,
|
|
285
|
+
categories: list[str] | None = None,
|
|
286
|
+
markets: list[str] | None = None,
|
|
287
|
+
start_date: str | None = None,
|
|
288
|
+
end_date: str | None = None,
|
|
289
|
+
page_num: int = 1,
|
|
290
|
+
page_size: int = 10,
|
|
291
|
+
) -> dict[str, Any]:
|
|
292
|
+
"""
|
|
293
|
+
Query documents by tags
|
|
294
|
+
|
|
295
|
+
Args:
|
|
296
|
+
tags: Tags to filter by (required, dict with key-value pairs)
|
|
297
|
+
categories: Filter by document categories
|
|
298
|
+
markets: Filter by markets (cn, hk, us)
|
|
299
|
+
start_date: Start date filter (YYYY-MM-DD)
|
|
300
|
+
end_date: End date filter (YYYY-MM-DD)
|
|
301
|
+
page_num: Page number (default: 1)
|
|
302
|
+
page_size: Number of items per page (default: 10)
|
|
303
|
+
|
|
304
|
+
Returns:
|
|
305
|
+
Dictionary with documents list and pagination info
|
|
306
|
+
"""
|
|
307
|
+
data: dict[str, Any] = {
|
|
308
|
+
"tags": tags,
|
|
309
|
+
"page_num": page_num,
|
|
310
|
+
"page_size": page_size,
|
|
311
|
+
}
|
|
312
|
+
if categories:
|
|
313
|
+
data["categories"] = categories
|
|
314
|
+
if markets:
|
|
315
|
+
data["markets"] = markets
|
|
316
|
+
if start_date:
|
|
317
|
+
data["start_date"] = start_date
|
|
318
|
+
if end_date:
|
|
319
|
+
data["end_date"] = end_date
|
|
320
|
+
|
|
321
|
+
return self._post("/v1/docs/tags", json=data)
|
|
322
|
+
|
|
323
|
+
def search(
|
|
324
|
+
self,
|
|
325
|
+
*,
|
|
326
|
+
query: str | None = None,
|
|
327
|
+
symbols: list[str] | None = None,
|
|
328
|
+
categories: list[str] | None = None,
|
|
329
|
+
markets: list[str] | None = None,
|
|
330
|
+
institutions: list[str] | None = None,
|
|
331
|
+
tags: dict[str, list] | None = None,
|
|
332
|
+
folder_ids: list[str] | None = None,
|
|
333
|
+
start_date: str | None = None,
|
|
334
|
+
end_date: str | None = None,
|
|
335
|
+
min_score: float | None = None,
|
|
336
|
+
extended_filters: list[dict] | None = None,
|
|
337
|
+
page_num: int = 1,
|
|
338
|
+
page_size: int = 10,
|
|
339
|
+
mode: str = "smart",
|
|
340
|
+
sort: str = "smart",
|
|
341
|
+
should_highlight: bool = False,
|
|
342
|
+
) -> dict[str, Any]:
|
|
343
|
+
"""
|
|
344
|
+
Search documents (v1 API)
|
|
345
|
+
|
|
346
|
+
Args:
|
|
347
|
+
query: Search query string
|
|
348
|
+
symbols: Filter by stock symbols
|
|
349
|
+
categories: Filter by document categories
|
|
350
|
+
markets: Filter by markets (cn, hk, us)
|
|
351
|
+
institutions: Filter by institutions
|
|
352
|
+
tags: Filter by tags
|
|
353
|
+
folder_ids: Filter by folder IDs
|
|
354
|
+
start_date: Start date filter (YYYY-MM-DD)
|
|
355
|
+
end_date: End date filter (YYYY-MM-DD)
|
|
356
|
+
min_score: Minimum relevance score
|
|
357
|
+
extended_filters: Extended filter conditions
|
|
358
|
+
page_num: Page number (default: 1)
|
|
359
|
+
page_size: Number of items per page (default: 10)
|
|
360
|
+
mode: Search mode ("smart", "semantic", "keywords")
|
|
361
|
+
sort: Sort order ("smart", "latest")
|
|
362
|
+
should_highlight: Whether to highlight matches
|
|
363
|
+
|
|
364
|
+
Returns:
|
|
365
|
+
Dictionary with documents list and pagination info
|
|
366
|
+
"""
|
|
367
|
+
data: dict[str, Any] = {
|
|
368
|
+
"page_num": page_num,
|
|
369
|
+
"page_size": page_size,
|
|
370
|
+
"mode": mode,
|
|
371
|
+
"sort": sort,
|
|
372
|
+
"should_highlight": should_highlight,
|
|
373
|
+
}
|
|
374
|
+
if query:
|
|
375
|
+
data["query"] = query
|
|
376
|
+
if symbols:
|
|
377
|
+
data["symbols"] = symbols
|
|
378
|
+
if categories:
|
|
379
|
+
data["categories"] = categories
|
|
380
|
+
if markets:
|
|
381
|
+
data["markets"] = markets
|
|
382
|
+
if institutions:
|
|
383
|
+
data["institutions"] = institutions
|
|
384
|
+
if tags:
|
|
385
|
+
data["tags"] = tags
|
|
386
|
+
if folder_ids:
|
|
387
|
+
data["folder_ids"] = folder_ids
|
|
388
|
+
if start_date:
|
|
389
|
+
data["start_date"] = start_date
|
|
390
|
+
if end_date:
|
|
391
|
+
data["end_date"] = end_date
|
|
392
|
+
if min_score is not None:
|
|
393
|
+
data["min_score"] = min_score
|
|
394
|
+
if extended_filters:
|
|
395
|
+
data["extended_filters"] = extended_filters
|
|
396
|
+
|
|
397
|
+
return self._post("/v1/search", json=data)
|
|
398
|
+
|
|
399
|
+
# -------------------------------------------------------------------------
|
|
400
|
+
# Folder Management
|
|
401
|
+
# -------------------------------------------------------------------------
|
|
402
|
+
|
|
403
|
+
def create_folder(self, name: str) -> dict[str, Any]:
|
|
404
|
+
"""
|
|
405
|
+
Create a new folder
|
|
406
|
+
|
|
407
|
+
Args:
|
|
408
|
+
name: Folder name
|
|
409
|
+
|
|
410
|
+
Returns:
|
|
411
|
+
Dictionary with folder_id
|
|
412
|
+
"""
|
|
413
|
+
return self._post("/v1/docs/folder/create", json={"name": name})
|
|
414
|
+
|
|
415
|
+
def delete_folder(self, folder_id: str) -> dict[str, Any]:
|
|
416
|
+
"""
|
|
417
|
+
Delete a folder and all files in it
|
|
418
|
+
|
|
419
|
+
Args:
|
|
420
|
+
folder_id: Folder ID to delete
|
|
421
|
+
|
|
422
|
+
Returns:
|
|
423
|
+
Dictionary with deleted doc_ids and folder_id
|
|
424
|
+
"""
|
|
425
|
+
return self._client._request(
|
|
426
|
+
"DELETE", "/v1/docs/folder/delete", json={"folder_id": folder_id}
|
|
427
|
+
)
|
|
428
|
+
|
|
429
|
+
# -------------------------------------------------------------------------
|
|
430
|
+
# Document Upload
|
|
431
|
+
# -------------------------------------------------------------------------
|
|
432
|
+
|
|
433
|
+
def upload(
|
|
434
|
+
self,
|
|
435
|
+
docs: list[dict[str, Any]],
|
|
436
|
+
*,
|
|
437
|
+
folder_id: str | None = None,
|
|
438
|
+
pdf_parsing_mode: int = 1,
|
|
439
|
+
) -> dict[str, Any]:
|
|
440
|
+
"""
|
|
441
|
+
Upload documents by URL
|
|
442
|
+
|
|
443
|
+
Args:
|
|
444
|
+
docs: List of document objects with url, name, metadatas, published_at, tags
|
|
445
|
+
folder_id: Folder ID to upload to (optional, uses default folder if not provided)
|
|
446
|
+
pdf_parsing_mode: PDF parsing mode (1: by page, 3: by logic)
|
|
447
|
+
|
|
448
|
+
Returns:
|
|
449
|
+
Dictionary with uploaded document IDs
|
|
450
|
+
|
|
451
|
+
Example:
|
|
452
|
+
>>> result = client.docs.upload([
|
|
453
|
+
... {"url": "https://example.com/doc.pdf", "name": "My Doc"}
|
|
454
|
+
... ])
|
|
455
|
+
"""
|
|
456
|
+
data: dict[str, Any] = {
|
|
457
|
+
"docs": docs,
|
|
458
|
+
"pdf_parsing_mode": pdf_parsing_mode,
|
|
459
|
+
}
|
|
460
|
+
if folder_id:
|
|
461
|
+
data["folder_id"] = folder_id
|
|
462
|
+
|
|
463
|
+
return self._post("/v1/docs/upload", json=data)
|
|
464
|
+
|
|
465
|
+
def upload_async(
|
|
466
|
+
self,
|
|
467
|
+
docs: list[dict[str, Any]],
|
|
468
|
+
*,
|
|
469
|
+
folder_id: str | None = None,
|
|
470
|
+
pdf_parsing_mode: int = 1,
|
|
471
|
+
) -> dict[str, Any]:
|
|
472
|
+
"""
|
|
473
|
+
Upload documents asynchronously by URL
|
|
474
|
+
|
|
475
|
+
Args:
|
|
476
|
+
docs: List of document objects with url, name, metadatas, published_at, tags
|
|
477
|
+
folder_id: Folder ID to upload to (optional)
|
|
478
|
+
pdf_parsing_mode: PDF parsing mode (1: by page, 3: by logic)
|
|
479
|
+
|
|
480
|
+
Returns:
|
|
481
|
+
Dictionary with uploaded document IDs
|
|
482
|
+
"""
|
|
483
|
+
data: dict[str, Any] = {
|
|
484
|
+
"docs": docs,
|
|
485
|
+
"pdf_parsing_mode": pdf_parsing_mode,
|
|
486
|
+
}
|
|
487
|
+
if folder_id:
|
|
488
|
+
data["folder_id"] = folder_id
|
|
489
|
+
|
|
490
|
+
return self._post("/v1/docs/upload/async", json=data)
|
|
491
|
+
|
|
492
|
+
def get_upload_status(self, doc_id: str) -> dict[str, Any]:
|
|
493
|
+
"""
|
|
494
|
+
Get document upload status
|
|
495
|
+
|
|
496
|
+
Args:
|
|
497
|
+
doc_id: Document ID
|
|
498
|
+
|
|
499
|
+
Returns:
|
|
500
|
+
Dictionary with id and status (pending, processing, completed)
|
|
501
|
+
"""
|
|
502
|
+
return self._get(f"/v1/docs/{doc_id}/upload/status")
|
|
503
|
+
|
|
504
|
+
def delete(self, doc_ids: list[str]) -> dict[str, Any]:
|
|
505
|
+
"""
|
|
506
|
+
Delete documents
|
|
507
|
+
|
|
508
|
+
Args:
|
|
509
|
+
doc_ids: List of document IDs to delete
|
|
510
|
+
|
|
511
|
+
Returns:
|
|
512
|
+
Dictionary with deleted doc_ids
|
|
513
|
+
"""
|
|
514
|
+
return self._client._request(
|
|
515
|
+
"DELETE", "/v1/docs/delete", json={"doc_ids": doc_ids}
|
|
516
|
+
)
|
reportify_sdk/kb.py
CHANGED
|
@@ -3,8 +3,13 @@ Knowledge Base Module
|
|
|
3
3
|
|
|
4
4
|
Provides access to user's personal knowledge base for searching
|
|
5
5
|
uploaded documents and folders.
|
|
6
|
+
|
|
7
|
+
NOTE: This module uses internal APIs that are not documented in the public OpenAPI spec.
|
|
8
|
+
These endpoints may change without notice. For production use, consider using
|
|
9
|
+
the documented docs.search_chunks() method instead.
|
|
6
10
|
"""
|
|
7
11
|
|
|
12
|
+
import warnings
|
|
8
13
|
from typing import TYPE_CHECKING, Any
|
|
9
14
|
|
|
10
15
|
if TYPE_CHECKING:
|
|
@@ -39,6 +44,10 @@ class KBModule:
|
|
|
39
44
|
"""
|
|
40
45
|
Search user's knowledge base
|
|
41
46
|
|
|
47
|
+
.. deprecated::
|
|
48
|
+
This method uses an internal API not documented in the public OpenAPI spec.
|
|
49
|
+
Consider using client.docs.search_chunks() instead.
|
|
50
|
+
|
|
42
51
|
Performs semantic search across documents the user has uploaded
|
|
43
52
|
to their personal knowledge base.
|
|
44
53
|
|
|
@@ -59,6 +68,12 @@ class KBModule:
|
|
|
59
68
|
... print(chunk["content"][:100])
|
|
60
69
|
... print(f"From: {chunk['doc']['title']}")
|
|
61
70
|
"""
|
|
71
|
+
warnings.warn(
|
|
72
|
+
"kb.search() uses an internal API not documented in the public OpenAPI spec. "
|
|
73
|
+
"Consider using docs.search_chunks() instead.",
|
|
74
|
+
DeprecationWarning,
|
|
75
|
+
stacklevel=2,
|
|
76
|
+
)
|
|
62
77
|
data: dict[str, Any] = {
|
|
63
78
|
"query": query,
|
|
64
79
|
"num": num,
|