notionary 0.2.16__py3-none-any.whl → 0.2.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- notionary/__init__.py +9 -5
- notionary/base_notion_client.py +18 -7
- notionary/blocks/__init__.py +2 -0
- notionary/blocks/document_element.py +194 -0
- notionary/database/__init__.py +4 -0
- notionary/database/database.py +481 -0
- notionary/database/{filter_builder.py → database_filter_builder.py} +27 -29
- notionary/database/{notion_database_provider.py → database_provider.py} +4 -4
- notionary/database/notion_database.py +45 -18
- notionary/file_upload/__init__.py +7 -0
- notionary/file_upload/client.py +254 -0
- notionary/file_upload/models.py +60 -0
- notionary/file_upload/notion_file_upload.py +387 -0
- notionary/page/notion_page.py +4 -3
- notionary/telemetry/views.py +15 -6
- notionary/user/__init__.py +11 -0
- notionary/user/base_notion_user.py +52 -0
- notionary/user/client.py +129 -0
- notionary/user/models.py +83 -0
- notionary/user/notion_bot_user.py +227 -0
- notionary/user/notion_user.py +256 -0
- notionary/user/notion_user_manager.py +173 -0
- notionary/user/notion_user_provider.py +1 -0
- notionary/util/__init__.py +3 -5
- notionary/util/{factory_decorator.py → factory_only.py} +9 -5
- notionary/util/fuzzy.py +74 -0
- notionary/util/logging_mixin.py +12 -12
- notionary/workspace.py +38 -2
- {notionary-0.2.16.dist-info → notionary-0.2.17.dist-info}/METADATA +2 -1
- {notionary-0.2.16.dist-info → notionary-0.2.17.dist-info}/RECORD +34 -20
- notionary/util/fuzzy_matcher.py +0 -82
- /notionary/database/{database_exceptions.py → exceptions.py} +0 -0
- /notionary/util/{singleton_decorator.py → singleton.py} +0 -0
- {notionary-0.2.16.dist-info → notionary-0.2.17.dist-info}/LICENSE +0 -0
- {notionary-0.2.16.dist-info → notionary-0.2.17.dist-info}/WHEEL +0 -0
@@ -0,0 +1,387 @@
|
|
1
|
+
import asyncio
|
2
|
+
import mimetypes
|
3
|
+
from typing import Optional
|
4
|
+
from pathlib import Path
|
5
|
+
from datetime import datetime, timedelta
|
6
|
+
from io import BytesIO
|
7
|
+
|
8
|
+
|
9
|
+
from notionary.file_upload.models import FileUploadResponse
|
10
|
+
from notionary.util import LoggingMixin
|
11
|
+
|
12
|
+
|
13
|
+
class NotionFileUpload(LoggingMixin):
|
14
|
+
"""
|
15
|
+
High-level service for managing Notion file uploads.
|
16
|
+
Handles both small file (single-part) and large file (multi-part) uploads.
|
17
|
+
"""
|
18
|
+
|
19
|
+
# Notion's file size limits
|
20
|
+
SINGLE_PART_MAX_SIZE = 20 * 1024 * 1024 # 20MB
|
21
|
+
MULTI_PART_CHUNK_SIZE = 10 * 1024 * 1024 # 10MB per part
|
22
|
+
MAX_FILENAME_BYTES = 900
|
23
|
+
|
24
|
+
def __init__(self, token: Optional[str] = None):
|
25
|
+
"""Initialize the file upload service."""
|
26
|
+
from notionary import NotionFileUploadClient
|
27
|
+
|
28
|
+
self.client = NotionFileUploadClient(token=token)
|
29
|
+
|
30
|
+
async def upload_file(
|
31
|
+
self, file_path: Path, filename: Optional[str] = None
|
32
|
+
) -> Optional[FileUploadResponse]:
|
33
|
+
"""
|
34
|
+
Upload a file to Notion, automatically choosing single-part or multi-part based on size.
|
35
|
+
|
36
|
+
Args:
|
37
|
+
file_path: Path to the file to upload
|
38
|
+
filename: Optional custom filename (defaults to file_path.name)
|
39
|
+
|
40
|
+
Returns:
|
41
|
+
FileUploadResponse if successful, None otherwise
|
42
|
+
"""
|
43
|
+
if not file_path.exists():
|
44
|
+
self.logger.error("File does not exist: %s", file_path)
|
45
|
+
return None
|
46
|
+
|
47
|
+
file_size = file_path.stat().st_size
|
48
|
+
filename = filename or file_path.name
|
49
|
+
|
50
|
+
# Validate filename length
|
51
|
+
if len(filename.encode("utf-8")) > self.MAX_FILENAME_BYTES:
|
52
|
+
self.logger.error(
|
53
|
+
"Filename too long: %d bytes (max %d)",
|
54
|
+
len(filename.encode("utf-8")),
|
55
|
+
self.MAX_FILENAME_BYTES,
|
56
|
+
)
|
57
|
+
return None
|
58
|
+
|
59
|
+
# Choose upload method based on file size
|
60
|
+
if file_size <= self.SINGLE_PART_MAX_SIZE:
|
61
|
+
return await self._upload_small_file(file_path, filename, file_size)
|
62
|
+
else:
|
63
|
+
return await self._upload_large_file(file_path, filename, file_size)
|
64
|
+
|
65
|
+
async def upload_from_bytes(
|
66
|
+
self, file_content: bytes, filename: str, content_type: Optional[str] = None
|
67
|
+
) -> Optional[FileUploadResponse]:
|
68
|
+
"""
|
69
|
+
Upload file content from bytes.
|
70
|
+
|
71
|
+
Args:
|
72
|
+
file_content: File content as bytes
|
73
|
+
filename: Name for the file
|
74
|
+
content_type: Optional MIME type
|
75
|
+
|
76
|
+
Returns:
|
77
|
+
FileUploadResponse if successful, None otherwise
|
78
|
+
"""
|
79
|
+
file_size = len(file_content)
|
80
|
+
|
81
|
+
# Validate filename length
|
82
|
+
if len(filename.encode("utf-8")) > self.MAX_FILENAME_BYTES:
|
83
|
+
self.logger.error(
|
84
|
+
"Filename too long: %d bytes (max %d)",
|
85
|
+
len(filename.encode("utf-8")),
|
86
|
+
self.MAX_FILENAME_BYTES,
|
87
|
+
)
|
88
|
+
return None
|
89
|
+
|
90
|
+
# Guess content type if not provided
|
91
|
+
if not content_type:
|
92
|
+
content_type, _ = mimetypes.guess_type(filename)
|
93
|
+
|
94
|
+
# Choose upload method based on size
|
95
|
+
if file_size <= self.SINGLE_PART_MAX_SIZE:
|
96
|
+
return await self._upload_small_file_from_bytes(
|
97
|
+
file_content, filename, content_type, file_size
|
98
|
+
)
|
99
|
+
else:
|
100
|
+
return await self._upload_large_file_from_bytes(
|
101
|
+
file_content, filename, content_type, file_size
|
102
|
+
)
|
103
|
+
|
104
|
+
async def get_upload_status(self, file_upload_id: str) -> Optional[str]:
|
105
|
+
"""
|
106
|
+
Get the current status of a file upload.
|
107
|
+
|
108
|
+
Args:
|
109
|
+
file_upload_id: ID of the file upload
|
110
|
+
|
111
|
+
Returns:
|
112
|
+
Status string ("pending", "uploaded", etc.) or None if failed
|
113
|
+
"""
|
114
|
+
upload_info = await self.client.retrieve_file_upload(file_upload_id)
|
115
|
+
return upload_info.status if upload_info else None
|
116
|
+
|
117
|
+
async def wait_for_upload_completion(
|
118
|
+
self, file_upload_id: str, timeout_seconds: int = 300, poll_interval: int = 2
|
119
|
+
) -> Optional[FileUploadResponse]:
|
120
|
+
"""
|
121
|
+
Wait for a file upload to complete.
|
122
|
+
|
123
|
+
Args:
|
124
|
+
file_upload_id: ID of the file upload
|
125
|
+
timeout_seconds: Maximum time to wait
|
126
|
+
poll_interval: Seconds between status checks
|
127
|
+
|
128
|
+
Returns:
|
129
|
+
FileUploadResponse when complete, None if timeout or failed
|
130
|
+
"""
|
131
|
+
start_time = datetime.now()
|
132
|
+
timeout_delta = timedelta(seconds=timeout_seconds)
|
133
|
+
|
134
|
+
while datetime.now() - start_time < timeout_delta:
|
135
|
+
upload_info = await self.client.retrieve_file_upload(file_upload_id)
|
136
|
+
|
137
|
+
if not upload_info:
|
138
|
+
self.logger.error(
|
139
|
+
"Failed to retrieve upload info for %s", file_upload_id
|
140
|
+
)
|
141
|
+
return None
|
142
|
+
|
143
|
+
if upload_info.status == "uploaded":
|
144
|
+
self.logger.info("Upload completed: %s", file_upload_id)
|
145
|
+
return upload_info
|
146
|
+
elif upload_info.status == "failed":
|
147
|
+
self.logger.error("Upload failed: %s", file_upload_id)
|
148
|
+
return None
|
149
|
+
|
150
|
+
await asyncio.sleep(poll_interval)
|
151
|
+
|
152
|
+
self.logger.warning("Upload timeout: %s", file_upload_id)
|
153
|
+
return None
|
154
|
+
|
155
|
+
async def list_recent_uploads(self, limit: int = 50) -> list[FileUploadResponse]:
|
156
|
+
"""
|
157
|
+
List recent file uploads.
|
158
|
+
|
159
|
+
Args:
|
160
|
+
limit: Maximum number of uploads to return
|
161
|
+
|
162
|
+
Returns:
|
163
|
+
List of FileUploadResponse objects
|
164
|
+
"""
|
165
|
+
uploads = []
|
166
|
+
start_cursor = None
|
167
|
+
remaining = limit
|
168
|
+
|
169
|
+
while remaining > 0:
|
170
|
+
page_size = min(remaining, 100) # API max per request
|
171
|
+
|
172
|
+
response = await self.client.list_file_uploads(
|
173
|
+
page_size=page_size, start_cursor=start_cursor
|
174
|
+
)
|
175
|
+
|
176
|
+
if not response or not response.results:
|
177
|
+
break
|
178
|
+
|
179
|
+
uploads.extend(response.results)
|
180
|
+
remaining -= len(response.results)
|
181
|
+
|
182
|
+
if not response.has_more or not response.next_cursor:
|
183
|
+
break
|
184
|
+
|
185
|
+
start_cursor = response.next_cursor
|
186
|
+
|
187
|
+
return uploads[:limit]
|
188
|
+
|
189
|
+
async def _upload_small_file(
|
190
|
+
self, file_path: Path, filename: str, file_size: int
|
191
|
+
) -> Optional[FileUploadResponse]:
|
192
|
+
"""Upload a small file using single-part upload."""
|
193
|
+
content_type, _ = mimetypes.guess_type(str(file_path))
|
194
|
+
|
195
|
+
# Create file upload
|
196
|
+
file_upload = await self.client.create_file_upload(
|
197
|
+
filename=filename,
|
198
|
+
content_type=content_type,
|
199
|
+
content_length=file_size,
|
200
|
+
mode="single_part",
|
201
|
+
)
|
202
|
+
|
203
|
+
if not file_upload:
|
204
|
+
self.logger.error("Failed to create file upload for %s", filename)
|
205
|
+
return None
|
206
|
+
|
207
|
+
# Send file content
|
208
|
+
success = await self.client.send_file_from_path(
|
209
|
+
file_upload_id=file_upload.id, file_path=file_path
|
210
|
+
)
|
211
|
+
|
212
|
+
if not success:
|
213
|
+
self.logger.error("Failed to send file content for %s", filename)
|
214
|
+
return None
|
215
|
+
|
216
|
+
self.logger.info(
|
217
|
+
"Successfully uploaded file: %s (ID: %s)", filename, file_upload.id
|
218
|
+
)
|
219
|
+
return file_upload
|
220
|
+
|
221
|
+
async def _upload_large_file(
|
222
|
+
self, file_path: Path, filename: str, file_size: int
|
223
|
+
) -> Optional[FileUploadResponse]:
|
224
|
+
"""Upload a large file using multi-part upload."""
|
225
|
+
content_type, _ = mimetypes.guess_type(str(file_path))
|
226
|
+
|
227
|
+
# Create file upload with multi-part mode
|
228
|
+
file_upload = await self.client.create_file_upload(
|
229
|
+
filename=filename,
|
230
|
+
content_type=content_type,
|
231
|
+
content_length=file_size,
|
232
|
+
mode="multi_part",
|
233
|
+
)
|
234
|
+
|
235
|
+
if not file_upload:
|
236
|
+
self.logger.error(
|
237
|
+
"Failed to create multi-part file upload for %s", filename
|
238
|
+
)
|
239
|
+
return None
|
240
|
+
|
241
|
+
# Upload file in parts
|
242
|
+
success = await self._upload_file_parts(file_upload.id, file_path, file_size)
|
243
|
+
|
244
|
+
if not success:
|
245
|
+
self.logger.error("Failed to upload file parts for %s", filename)
|
246
|
+
return None
|
247
|
+
|
248
|
+
# Complete the upload
|
249
|
+
completed_upload = await self.client.complete_file_upload(file_upload.id)
|
250
|
+
|
251
|
+
if not completed_upload:
|
252
|
+
self.logger.error("Failed to complete file upload for %s", filename)
|
253
|
+
return None
|
254
|
+
|
255
|
+
self.logger.info(
|
256
|
+
"Successfully uploaded large file: %s (ID: %s)", filename, file_upload.id
|
257
|
+
)
|
258
|
+
return completed_upload
|
259
|
+
|
260
|
+
async def _upload_small_file_from_bytes(
|
261
|
+
self,
|
262
|
+
file_content: bytes,
|
263
|
+
filename: str,
|
264
|
+
content_type: Optional[str],
|
265
|
+
file_size: int,
|
266
|
+
) -> Optional[FileUploadResponse]:
|
267
|
+
"""Upload small file from bytes."""
|
268
|
+
# Create file upload
|
269
|
+
file_upload = await self.client.create_file_upload(
|
270
|
+
filename=filename,
|
271
|
+
content_type=content_type,
|
272
|
+
content_length=file_size,
|
273
|
+
mode="single_part",
|
274
|
+
)
|
275
|
+
|
276
|
+
if not file_upload:
|
277
|
+
return None
|
278
|
+
|
279
|
+
# Send file content
|
280
|
+
from io import BytesIO
|
281
|
+
|
282
|
+
success = await self.client.send_file_upload(
|
283
|
+
file_upload_id=file_upload.id,
|
284
|
+
file_content=BytesIO(file_content),
|
285
|
+
filename=filename,
|
286
|
+
)
|
287
|
+
|
288
|
+
return file_upload if success else None
|
289
|
+
|
290
|
+
async def _upload_large_file_from_bytes(
|
291
|
+
self,
|
292
|
+
file_content: bytes,
|
293
|
+
filename: str,
|
294
|
+
content_type: Optional[str],
|
295
|
+
file_size: int,
|
296
|
+
) -> Optional[FileUploadResponse]:
|
297
|
+
"""Upload large file from bytes using multi-part."""
|
298
|
+
# Create file upload
|
299
|
+
file_upload = await self.client.create_file_upload(
|
300
|
+
filename=filename,
|
301
|
+
content_type=content_type,
|
302
|
+
content_length=file_size,
|
303
|
+
mode="multi_part",
|
304
|
+
)
|
305
|
+
|
306
|
+
if not file_upload:
|
307
|
+
return None
|
308
|
+
|
309
|
+
# Upload in chunks
|
310
|
+
success = await self._upload_bytes_parts(file_upload.id, file_content)
|
311
|
+
|
312
|
+
if not success:
|
313
|
+
return None
|
314
|
+
|
315
|
+
# Complete the upload
|
316
|
+
return await self.client.complete_file_upload(file_upload.id)
|
317
|
+
|
318
|
+
async def _upload_file_parts(
|
319
|
+
self, file_upload_id: str, file_path: Path, file_size: int
|
320
|
+
) -> bool:
|
321
|
+
"""Upload file in parts for multi-part upload."""
|
322
|
+
part_number = 1
|
323
|
+
total_parts = (
|
324
|
+
file_size + self.MULTI_PART_CHUNK_SIZE - 1
|
325
|
+
) // self.MULTI_PART_CHUNK_SIZE
|
326
|
+
|
327
|
+
try:
|
328
|
+
import aiofiles
|
329
|
+
|
330
|
+
async with aiofiles.open(file_path, "rb") as file:
|
331
|
+
while True:
|
332
|
+
chunk = await file.read(self.MULTI_PART_CHUNK_SIZE)
|
333
|
+
if not chunk:
|
334
|
+
break
|
335
|
+
|
336
|
+
success = await self.client.send_file_upload(
|
337
|
+
file_upload_id=file_upload_id,
|
338
|
+
file_content=BytesIO(chunk),
|
339
|
+
filename=file_path.name,
|
340
|
+
part_number=part_number,
|
341
|
+
)
|
342
|
+
|
343
|
+
if not success:
|
344
|
+
self.logger.error(
|
345
|
+
"Failed to upload part %d/%d", part_number, total_parts
|
346
|
+
)
|
347
|
+
return False
|
348
|
+
|
349
|
+
self.logger.debug("Uploaded part %d/%d", part_number, total_parts)
|
350
|
+
part_number += 1
|
351
|
+
|
352
|
+
self.logger.info("Successfully uploaded all %d parts", total_parts)
|
353
|
+
return True
|
354
|
+
|
355
|
+
except Exception as e:
|
356
|
+
self.logger.error("Error uploading file parts: %s", e)
|
357
|
+
return False
|
358
|
+
|
359
|
+
async def _upload_bytes_parts(
|
360
|
+
self, file_upload_id: str, file_content: bytes
|
361
|
+
) -> bool:
|
362
|
+
"""Upload bytes in parts for multi-part upload."""
|
363
|
+
part_number = 1
|
364
|
+
total_parts = (
|
365
|
+
len(file_content) + self.MULTI_PART_CHUNK_SIZE - 1
|
366
|
+
) // self.MULTI_PART_CHUNK_SIZE
|
367
|
+
|
368
|
+
for i in range(0, len(file_content), self.MULTI_PART_CHUNK_SIZE):
|
369
|
+
chunk = file_content[i : i + self.MULTI_PART_CHUNK_SIZE]
|
370
|
+
|
371
|
+
success = await self.client.send_file_upload(
|
372
|
+
file_upload_id=file_upload_id,
|
373
|
+
file_content=BytesIO(chunk),
|
374
|
+
part_number=part_number,
|
375
|
+
)
|
376
|
+
|
377
|
+
if not success:
|
378
|
+
self.logger.error(
|
379
|
+
"Failed to upload part %d/%d", part_number, total_parts
|
380
|
+
)
|
381
|
+
return False
|
382
|
+
|
383
|
+
self.logger.debug("Uploaded part %d/%d", part_number, total_parts)
|
384
|
+
part_number += 1
|
385
|
+
|
386
|
+
self.logger.info("Successfully uploaded all %d parts", total_parts)
|
387
|
+
return True
|
notionary/page/notion_page.py
CHANGED
@@ -14,7 +14,8 @@ from notionary.page.content.page_content_writer import PageContentWriter
|
|
14
14
|
from notionary.page.property_formatter import NotionPropertyFormatter
|
15
15
|
from notionary.page.utils import extract_property_value
|
16
16
|
|
17
|
-
from notionary.util import LoggingMixin, format_uuid,
|
17
|
+
from notionary.util import LoggingMixin, format_uuid, factory_only
|
18
|
+
from notionary.util.fuzzy import find_best_match
|
18
19
|
|
19
20
|
|
20
21
|
if TYPE_CHECKING:
|
@@ -101,7 +102,7 @@ class NotionPage(LoggingMixin):
|
|
101
102
|
cls.logger.warning("No pages found for name: %s", page_name)
|
102
103
|
raise ValueError(f"No pages found for name: {page_name}")
|
103
104
|
|
104
|
-
best_match =
|
105
|
+
best_match = find_best_match(
|
105
106
|
query=page_name,
|
106
107
|
items=search_results,
|
107
108
|
text_extractor=lambda page: page.title,
|
@@ -493,7 +494,7 @@ class NotionPage(LoggingMixin):
|
|
493
494
|
"""
|
494
495
|
Create NotionPage instance from API response.
|
495
496
|
"""
|
496
|
-
from notionary.database.
|
497
|
+
from notionary.database.database import NotionDatabase
|
497
498
|
|
498
499
|
title = cls._extract_title(page_response)
|
499
500
|
emoji = cls._extract_emoji(page_response)
|
notionary/telemetry/views.py
CHANGED
@@ -25,6 +25,7 @@ class DatabaseFactoryUsedEvent(BaseTelemetryEvent):
|
|
25
25
|
def name(self) -> str:
|
26
26
|
return "database_factory_used"
|
27
27
|
|
28
|
+
|
28
29
|
@dataclass
|
29
30
|
class QueryOperationEvent(BaseTelemetryEvent):
|
30
31
|
"""Event fired when a query operation is performed"""
|
@@ -34,7 +35,8 @@ class QueryOperationEvent(BaseTelemetryEvent):
|
|
34
35
|
@property
|
35
36
|
def name(self) -> str:
|
36
37
|
return "query_operation"
|
37
|
-
|
38
|
+
|
39
|
+
|
38
40
|
@dataclass
|
39
41
|
class NotionMarkdownSyntaxPromptEvent(BaseTelemetryEvent):
|
40
42
|
"""Event fired when Notion Markdown syntax is used"""
|
@@ -43,12 +45,16 @@ class NotionMarkdownSyntaxPromptEvent(BaseTelemetryEvent):
|
|
43
45
|
def name(self) -> str:
|
44
46
|
return "notion_markdown_syntax_used"
|
45
47
|
|
48
|
+
|
46
49
|
# Tracks markdown conversion
|
47
50
|
@dataclass
|
48
51
|
class MarkdownToNotionConversionEvent(BaseTelemetryEvent):
|
49
52
|
"""Event fired when markdown is converted to Notion blocks"""
|
50
|
-
|
51
|
-
|
53
|
+
|
54
|
+
handler_element_name: Optional[str] = (
|
55
|
+
None # e.g. "HeadingElement", "ParagraphElement"
|
56
|
+
)
|
57
|
+
|
52
58
|
@property
|
53
59
|
def name(self) -> str:
|
54
60
|
return "markdown_to_notion_conversion"
|
@@ -57,8 +63,11 @@ class MarkdownToNotionConversionEvent(BaseTelemetryEvent):
|
|
57
63
|
@dataclass
|
58
64
|
class NotionToMarkdownConversionEvent(BaseTelemetryEvent):
|
59
65
|
"""Event fired when Notion blocks are converted to markdown"""
|
60
|
-
|
61
|
-
|
66
|
+
|
67
|
+
handler_element_name: Optional[str] = (
|
68
|
+
None # e.g. "HeadingElement", "ParagraphElement"
|
69
|
+
)
|
70
|
+
|
62
71
|
@property
|
63
72
|
def name(self) -> str:
|
64
|
-
return "notion_to_markdown_conversion"
|
73
|
+
return "notion_to_markdown_conversion"
|
@@ -0,0 +1,11 @@
|
|
1
|
+
from .notion_user import NotionUser
|
2
|
+
from .notion_user_manager import NotionUserManager
|
3
|
+
from .client import NotionUserClient
|
4
|
+
from .notion_bot_user import NotionBotUser
|
5
|
+
|
6
|
+
__all__ = [
|
7
|
+
"NotionUser",
|
8
|
+
"NotionUserManager",
|
9
|
+
"NotionUserClient",
|
10
|
+
"NotionBotUser",
|
11
|
+
]
|
@@ -0,0 +1,52 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
from abc import ABC
|
3
|
+
from typing import Optional
|
4
|
+
from notionary.user.client import NotionUserClient
|
5
|
+
|
6
|
+
from notionary.util import LoggingMixin
|
7
|
+
|
8
|
+
|
9
|
+
class BaseNotionUser(LoggingMixin, ABC):
|
10
|
+
"""
|
11
|
+
Base class for all Notion user types with common functionality.
|
12
|
+
"""
|
13
|
+
|
14
|
+
def __init__(
|
15
|
+
self,
|
16
|
+
user_id: str,
|
17
|
+
name: Optional[str] = None,
|
18
|
+
avatar_url: Optional[str] = None,
|
19
|
+
token: Optional[str] = None,
|
20
|
+
):
|
21
|
+
"""Initialize base user properties."""
|
22
|
+
self._user_id = user_id
|
23
|
+
self._name = name
|
24
|
+
self._avatar_url = avatar_url
|
25
|
+
self.client = NotionUserClient(token=token)
|
26
|
+
|
27
|
+
@property
|
28
|
+
def id(self) -> str:
|
29
|
+
"""Get the user ID."""
|
30
|
+
return self._user_id
|
31
|
+
|
32
|
+
@property
|
33
|
+
def name(self) -> Optional[str]:
|
34
|
+
"""Get the user name."""
|
35
|
+
return self._name
|
36
|
+
|
37
|
+
@property
|
38
|
+
def avatar_url(self) -> Optional[str]:
|
39
|
+
"""Get the avatar URL."""
|
40
|
+
return self._avatar_url
|
41
|
+
|
42
|
+
def get_display_name(self) -> str:
|
43
|
+
"""Get a display name for the user."""
|
44
|
+
return self._name or f"User {self._user_id[:8]}"
|
45
|
+
|
46
|
+
def __str__(self) -> str:
|
47
|
+
"""String representation of the user."""
|
48
|
+
return f"{self.__class__.__name__}(name='{self.get_display_name()}', id='{self._user_id[:8]}...')"
|
49
|
+
|
50
|
+
def __repr__(self) -> str:
|
51
|
+
"""Detailed string representation."""
|
52
|
+
return self.__str__()
|
notionary/user/client.py
ADDED
@@ -0,0 +1,129 @@
|
|
1
|
+
from typing import Optional, List
|
2
|
+
from notionary.base_notion_client import BaseNotionClient
|
3
|
+
from notionary.user.models import (
|
4
|
+
NotionBotUserResponse,
|
5
|
+
NotionUserResponse,
|
6
|
+
NotionUsersListResponse,
|
7
|
+
)
|
8
|
+
from notionary.util import singleton
|
9
|
+
|
10
|
+
|
11
|
+
@singleton
|
12
|
+
class NotionUserClient(BaseNotionClient):
|
13
|
+
"""
|
14
|
+
Client for Notion user-specific operations.
|
15
|
+
Inherits base HTTP functionality from BaseNotionClient.
|
16
|
+
|
17
|
+
Note: The Notion API only supports individual user queries and bot user info.
|
18
|
+
List users endpoint is available but only returns workspace members (no guests).
|
19
|
+
"""
|
20
|
+
|
21
|
+
async def get_user(self, user_id: str) -> Optional[NotionUserResponse]:
|
22
|
+
"""
|
23
|
+
Retrieve a user by their ID.
|
24
|
+
"""
|
25
|
+
response = await self.get(f"users/{user_id}")
|
26
|
+
if response is None:
|
27
|
+
self.logger.error("Failed to fetch user %s - API returned None", user_id)
|
28
|
+
return None
|
29
|
+
|
30
|
+
try:
|
31
|
+
return NotionUserResponse.model_validate(response)
|
32
|
+
except Exception as e:
|
33
|
+
self.logger.error("Failed to validate user response for %s: %s", user_id, e)
|
34
|
+
return None
|
35
|
+
|
36
|
+
async def get_bot_user(self) -> Optional[NotionBotUserResponse]:
|
37
|
+
"""
|
38
|
+
Retrieve your token's bot user information.
|
39
|
+
"""
|
40
|
+
response = await self.get("users/me")
|
41
|
+
if response is None:
|
42
|
+
self.logger.error("Failed to fetch bot user - API returned None")
|
43
|
+
return None
|
44
|
+
|
45
|
+
try:
|
46
|
+
return NotionBotUserResponse.model_validate(response)
|
47
|
+
except Exception as e:
|
48
|
+
self.logger.error("Failed to validate bot user response: %s", e)
|
49
|
+
return None
|
50
|
+
|
51
|
+
async def list_users(
|
52
|
+
self, page_size: int = 100, start_cursor: Optional[str] = None
|
53
|
+
) -> Optional[NotionUsersListResponse]:
|
54
|
+
"""
|
55
|
+
List all users in the workspace (paginated).
|
56
|
+
|
57
|
+
Note: Guests are not included in the response.
|
58
|
+
"""
|
59
|
+
params = {"page_size": min(page_size, 100)} # API max is 100
|
60
|
+
if start_cursor:
|
61
|
+
params["start_cursor"] = start_cursor
|
62
|
+
|
63
|
+
response = await self.get("users", params=params)
|
64
|
+
if response is None:
|
65
|
+
self.logger.error("Failed to fetch users list - API returned None")
|
66
|
+
return None
|
67
|
+
|
68
|
+
try:
|
69
|
+
return NotionUsersListResponse.model_validate(response)
|
70
|
+
except Exception as e:
|
71
|
+
self.logger.error("Failed to validate users list response: %s", e)
|
72
|
+
return None
|
73
|
+
|
74
|
+
async def get_all_users(self) -> List[NotionUserResponse]:
|
75
|
+
"""
|
76
|
+
Get all users in the workspace by handling pagination automatically.
|
77
|
+
"""
|
78
|
+
all_users = []
|
79
|
+
start_cursor = None
|
80
|
+
|
81
|
+
while True:
|
82
|
+
try:
|
83
|
+
response = await self.list_users(
|
84
|
+
page_size=100, start_cursor=start_cursor
|
85
|
+
)
|
86
|
+
|
87
|
+
if not response or not response.results:
|
88
|
+
break
|
89
|
+
|
90
|
+
all_users.extend(response.results)
|
91
|
+
|
92
|
+
# Check if there are more pages
|
93
|
+
if not response.has_more or not response.next_cursor:
|
94
|
+
break
|
95
|
+
|
96
|
+
start_cursor = response.next_cursor
|
97
|
+
|
98
|
+
except Exception as e:
|
99
|
+
self.logger.error("Error fetching all users: %s", str(e))
|
100
|
+
break
|
101
|
+
|
102
|
+
self.logger.info("Retrieved %d total users from workspace", len(all_users))
|
103
|
+
return all_users
|
104
|
+
|
105
|
+
async def get_workspace_name(self) -> Optional[str]:
|
106
|
+
"""
|
107
|
+
Get the workspace name from the bot user.
|
108
|
+
"""
|
109
|
+
try:
|
110
|
+
bot_user = await self.get_bot_user()
|
111
|
+
if bot_user and bot_user.bot and bot_user.bot.workspace_name:
|
112
|
+
return bot_user.bot.workspace_name
|
113
|
+
return None
|
114
|
+
except Exception as e:
|
115
|
+
self.logger.error("Error fetching workspace name: %s", str(e))
|
116
|
+
return None
|
117
|
+
|
118
|
+
async def get_workspace_limits(self) -> Optional[dict]:
|
119
|
+
"""
|
120
|
+
Get workspace limits from the bot user.
|
121
|
+
"""
|
122
|
+
try:
|
123
|
+
bot_user = await self.get_bot_user()
|
124
|
+
if bot_user and bot_user.bot and bot_user.bot.workspace_limits:
|
125
|
+
return bot_user.bot.workspace_limits.model_dump()
|
126
|
+
return None
|
127
|
+
except Exception as e:
|
128
|
+
self.logger.error("Error fetching workspace limits: %s", str(e))
|
129
|
+
return None
|