notionary 0.2.15__py3-none-any.whl → 0.2.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. notionary/__init__.py +9 -5
  2. notionary/base_notion_client.py +19 -8
  3. notionary/blocks/__init__.py +2 -0
  4. notionary/blocks/document_element.py +194 -0
  5. notionary/blocks/registry/block_registry.py +27 -3
  6. notionary/database/__init__.py +4 -0
  7. notionary/database/database.py +481 -0
  8. notionary/database/{filter_builder.py → database_filter_builder.py} +27 -29
  9. notionary/database/{notion_database_provider.py → database_provider.py} +6 -10
  10. notionary/database/notion_database.py +73 -32
  11. notionary/file_upload/__init__.py +7 -0
  12. notionary/file_upload/client.py +254 -0
  13. notionary/file_upload/models.py +60 -0
  14. notionary/file_upload/notion_file_upload.py +387 -0
  15. notionary/page/notion_page.py +5 -6
  16. notionary/telemetry/__init__.py +19 -0
  17. notionary/telemetry/service.py +136 -0
  18. notionary/telemetry/views.py +73 -0
  19. notionary/user/__init__.py +11 -0
  20. notionary/user/base_notion_user.py +52 -0
  21. notionary/user/client.py +129 -0
  22. notionary/user/models.py +83 -0
  23. notionary/user/notion_bot_user.py +227 -0
  24. notionary/user/notion_user.py +256 -0
  25. notionary/user/notion_user_manager.py +173 -0
  26. notionary/user/notion_user_provider.py +1 -0
  27. notionary/util/__init__.py +5 -5
  28. notionary/util/{factory_decorator.py → factory_only.py} +9 -5
  29. notionary/util/fuzzy.py +74 -0
  30. notionary/util/logging_mixin.py +12 -12
  31. notionary/workspace.py +38 -2
  32. {notionary-0.2.15.dist-info → notionary-0.2.17.dist-info}/METADATA +3 -1
  33. {notionary-0.2.15.dist-info → notionary-0.2.17.dist-info}/RECORD +37 -20
  34. notionary/util/fuzzy_matcher.py +0 -82
  35. /notionary/database/{database_exceptions.py → exceptions.py} +0 -0
  36. /notionary/util/{singleton_decorator.py → singleton.py} +0 -0
  37. {notionary-0.2.15.dist-info → notionary-0.2.17.dist-info}/LICENSE +0 -0
  38. {notionary-0.2.15.dist-info → notionary-0.2.17.dist-info}/WHEEL +0 -0
@@ -0,0 +1,387 @@
1
+ import asyncio
2
+ import mimetypes
3
+ from typing import Optional
4
+ from pathlib import Path
5
+ from datetime import datetime, timedelta
6
+ from io import BytesIO
7
+
8
+
9
+ from notionary.file_upload.models import FileUploadResponse
10
+ from notionary.util import LoggingMixin
11
+
12
+
13
+ class NotionFileUpload(LoggingMixin):
14
+ """
15
+ High-level service for managing Notion file uploads.
16
+ Handles both small file (single-part) and large file (multi-part) uploads.
17
+ """
18
+
19
+ # Notion's file size limits
20
+ SINGLE_PART_MAX_SIZE = 20 * 1024 * 1024 # 20MB
21
+ MULTI_PART_CHUNK_SIZE = 10 * 1024 * 1024 # 10MB per part
22
+ MAX_FILENAME_BYTES = 900
23
+
24
+ def __init__(self, token: Optional[str] = None):
25
+ """Initialize the file upload service."""
26
+ from notionary import NotionFileUploadClient
27
+
28
+ self.client = NotionFileUploadClient(token=token)
29
+
30
+ async def upload_file(
31
+ self, file_path: Path, filename: Optional[str] = None
32
+ ) -> Optional[FileUploadResponse]:
33
+ """
34
+ Upload a file to Notion, automatically choosing single-part or multi-part based on size.
35
+
36
+ Args:
37
+ file_path: Path to the file to upload
38
+ filename: Optional custom filename (defaults to file_path.name)
39
+
40
+ Returns:
41
+ FileUploadResponse if successful, None otherwise
42
+ """
43
+ if not file_path.exists():
44
+ self.logger.error("File does not exist: %s", file_path)
45
+ return None
46
+
47
+ file_size = file_path.stat().st_size
48
+ filename = filename or file_path.name
49
+
50
+ # Validate filename length
51
+ if len(filename.encode("utf-8")) > self.MAX_FILENAME_BYTES:
52
+ self.logger.error(
53
+ "Filename too long: %d bytes (max %d)",
54
+ len(filename.encode("utf-8")),
55
+ self.MAX_FILENAME_BYTES,
56
+ )
57
+ return None
58
+
59
+ # Choose upload method based on file size
60
+ if file_size <= self.SINGLE_PART_MAX_SIZE:
61
+ return await self._upload_small_file(file_path, filename, file_size)
62
+ else:
63
+ return await self._upload_large_file(file_path, filename, file_size)
64
+
65
+ async def upload_from_bytes(
66
+ self, file_content: bytes, filename: str, content_type: Optional[str] = None
67
+ ) -> Optional[FileUploadResponse]:
68
+ """
69
+ Upload file content from bytes.
70
+
71
+ Args:
72
+ file_content: File content as bytes
73
+ filename: Name for the file
74
+ content_type: Optional MIME type
75
+
76
+ Returns:
77
+ FileUploadResponse if successful, None otherwise
78
+ """
79
+ file_size = len(file_content)
80
+
81
+ # Validate filename length
82
+ if len(filename.encode("utf-8")) > self.MAX_FILENAME_BYTES:
83
+ self.logger.error(
84
+ "Filename too long: %d bytes (max %d)",
85
+ len(filename.encode("utf-8")),
86
+ self.MAX_FILENAME_BYTES,
87
+ )
88
+ return None
89
+
90
+ # Guess content type if not provided
91
+ if not content_type:
92
+ content_type, _ = mimetypes.guess_type(filename)
93
+
94
+ # Choose upload method based on size
95
+ if file_size <= self.SINGLE_PART_MAX_SIZE:
96
+ return await self._upload_small_file_from_bytes(
97
+ file_content, filename, content_type, file_size
98
+ )
99
+ else:
100
+ return await self._upload_large_file_from_bytes(
101
+ file_content, filename, content_type, file_size
102
+ )
103
+
104
+ async def get_upload_status(self, file_upload_id: str) -> Optional[str]:
105
+ """
106
+ Get the current status of a file upload.
107
+
108
+ Args:
109
+ file_upload_id: ID of the file upload
110
+
111
+ Returns:
112
+ Status string ("pending", "uploaded", etc.) or None if failed
113
+ """
114
+ upload_info = await self.client.retrieve_file_upload(file_upload_id)
115
+ return upload_info.status if upload_info else None
116
+
117
+ async def wait_for_upload_completion(
118
+ self, file_upload_id: str, timeout_seconds: int = 300, poll_interval: int = 2
119
+ ) -> Optional[FileUploadResponse]:
120
+ """
121
+ Wait for a file upload to complete.
122
+
123
+ Args:
124
+ file_upload_id: ID of the file upload
125
+ timeout_seconds: Maximum time to wait
126
+ poll_interval: Seconds between status checks
127
+
128
+ Returns:
129
+ FileUploadResponse when complete, None if timeout or failed
130
+ """
131
+ start_time = datetime.now()
132
+ timeout_delta = timedelta(seconds=timeout_seconds)
133
+
134
+ while datetime.now() - start_time < timeout_delta:
135
+ upload_info = await self.client.retrieve_file_upload(file_upload_id)
136
+
137
+ if not upload_info:
138
+ self.logger.error(
139
+ "Failed to retrieve upload info for %s", file_upload_id
140
+ )
141
+ return None
142
+
143
+ if upload_info.status == "uploaded":
144
+ self.logger.info("Upload completed: %s", file_upload_id)
145
+ return upload_info
146
+ elif upload_info.status == "failed":
147
+ self.logger.error("Upload failed: %s", file_upload_id)
148
+ return None
149
+
150
+ await asyncio.sleep(poll_interval)
151
+
152
+ self.logger.warning("Upload timeout: %s", file_upload_id)
153
+ return None
154
+
155
+ async def list_recent_uploads(self, limit: int = 50) -> list[FileUploadResponse]:
156
+ """
157
+ List recent file uploads.
158
+
159
+ Args:
160
+ limit: Maximum number of uploads to return
161
+
162
+ Returns:
163
+ List of FileUploadResponse objects
164
+ """
165
+ uploads = []
166
+ start_cursor = None
167
+ remaining = limit
168
+
169
+ while remaining > 0:
170
+ page_size = min(remaining, 100) # API max per request
171
+
172
+ response = await self.client.list_file_uploads(
173
+ page_size=page_size, start_cursor=start_cursor
174
+ )
175
+
176
+ if not response or not response.results:
177
+ break
178
+
179
+ uploads.extend(response.results)
180
+ remaining -= len(response.results)
181
+
182
+ if not response.has_more or not response.next_cursor:
183
+ break
184
+
185
+ start_cursor = response.next_cursor
186
+
187
+ return uploads[:limit]
188
+
189
+ async def _upload_small_file(
190
+ self, file_path: Path, filename: str, file_size: int
191
+ ) -> Optional[FileUploadResponse]:
192
+ """Upload a small file using single-part upload."""
193
+ content_type, _ = mimetypes.guess_type(str(file_path))
194
+
195
+ # Create file upload
196
+ file_upload = await self.client.create_file_upload(
197
+ filename=filename,
198
+ content_type=content_type,
199
+ content_length=file_size,
200
+ mode="single_part",
201
+ )
202
+
203
+ if not file_upload:
204
+ self.logger.error("Failed to create file upload for %s", filename)
205
+ return None
206
+
207
+ # Send file content
208
+ success = await self.client.send_file_from_path(
209
+ file_upload_id=file_upload.id, file_path=file_path
210
+ )
211
+
212
+ if not success:
213
+ self.logger.error("Failed to send file content for %s", filename)
214
+ return None
215
+
216
+ self.logger.info(
217
+ "Successfully uploaded file: %s (ID: %s)", filename, file_upload.id
218
+ )
219
+ return file_upload
220
+
221
+ async def _upload_large_file(
222
+ self, file_path: Path, filename: str, file_size: int
223
+ ) -> Optional[FileUploadResponse]:
224
+ """Upload a large file using multi-part upload."""
225
+ content_type, _ = mimetypes.guess_type(str(file_path))
226
+
227
+ # Create file upload with multi-part mode
228
+ file_upload = await self.client.create_file_upload(
229
+ filename=filename,
230
+ content_type=content_type,
231
+ content_length=file_size,
232
+ mode="multi_part",
233
+ )
234
+
235
+ if not file_upload:
236
+ self.logger.error(
237
+ "Failed to create multi-part file upload for %s", filename
238
+ )
239
+ return None
240
+
241
+ # Upload file in parts
242
+ success = await self._upload_file_parts(file_upload.id, file_path, file_size)
243
+
244
+ if not success:
245
+ self.logger.error("Failed to upload file parts for %s", filename)
246
+ return None
247
+
248
+ # Complete the upload
249
+ completed_upload = await self.client.complete_file_upload(file_upload.id)
250
+
251
+ if not completed_upload:
252
+ self.logger.error("Failed to complete file upload for %s", filename)
253
+ return None
254
+
255
+ self.logger.info(
256
+ "Successfully uploaded large file: %s (ID: %s)", filename, file_upload.id
257
+ )
258
+ return completed_upload
259
+
260
+ async def _upload_small_file_from_bytes(
261
+ self,
262
+ file_content: bytes,
263
+ filename: str,
264
+ content_type: Optional[str],
265
+ file_size: int,
266
+ ) -> Optional[FileUploadResponse]:
267
+ """Upload small file from bytes."""
268
+ # Create file upload
269
+ file_upload = await self.client.create_file_upload(
270
+ filename=filename,
271
+ content_type=content_type,
272
+ content_length=file_size,
273
+ mode="single_part",
274
+ )
275
+
276
+ if not file_upload:
277
+ return None
278
+
279
+ # Send file content
280
+ from io import BytesIO
281
+
282
+ success = await self.client.send_file_upload(
283
+ file_upload_id=file_upload.id,
284
+ file_content=BytesIO(file_content),
285
+ filename=filename,
286
+ )
287
+
288
+ return file_upload if success else None
289
+
290
+ async def _upload_large_file_from_bytes(
291
+ self,
292
+ file_content: bytes,
293
+ filename: str,
294
+ content_type: Optional[str],
295
+ file_size: int,
296
+ ) -> Optional[FileUploadResponse]:
297
+ """Upload large file from bytes using multi-part."""
298
+ # Create file upload
299
+ file_upload = await self.client.create_file_upload(
300
+ filename=filename,
301
+ content_type=content_type,
302
+ content_length=file_size,
303
+ mode="multi_part",
304
+ )
305
+
306
+ if not file_upload:
307
+ return None
308
+
309
+ # Upload in chunks
310
+ success = await self._upload_bytes_parts(file_upload.id, file_content)
311
+
312
+ if not success:
313
+ return None
314
+
315
+ # Complete the upload
316
+ return await self.client.complete_file_upload(file_upload.id)
317
+
318
+ async def _upload_file_parts(
319
+ self, file_upload_id: str, file_path: Path, file_size: int
320
+ ) -> bool:
321
+ """Upload file in parts for multi-part upload."""
322
+ part_number = 1
323
+ total_parts = (
324
+ file_size + self.MULTI_PART_CHUNK_SIZE - 1
325
+ ) // self.MULTI_PART_CHUNK_SIZE
326
+
327
+ try:
328
+ import aiofiles
329
+
330
+ async with aiofiles.open(file_path, "rb") as file:
331
+ while True:
332
+ chunk = await file.read(self.MULTI_PART_CHUNK_SIZE)
333
+ if not chunk:
334
+ break
335
+
336
+ success = await self.client.send_file_upload(
337
+ file_upload_id=file_upload_id,
338
+ file_content=BytesIO(chunk),
339
+ filename=file_path.name,
340
+ part_number=part_number,
341
+ )
342
+
343
+ if not success:
344
+ self.logger.error(
345
+ "Failed to upload part %d/%d", part_number, total_parts
346
+ )
347
+ return False
348
+
349
+ self.logger.debug("Uploaded part %d/%d", part_number, total_parts)
350
+ part_number += 1
351
+
352
+ self.logger.info("Successfully uploaded all %d parts", total_parts)
353
+ return True
354
+
355
+ except Exception as e:
356
+ self.logger.error("Error uploading file parts: %s", e)
357
+ return False
358
+
359
+ async def _upload_bytes_parts(
360
+ self, file_upload_id: str, file_content: bytes
361
+ ) -> bool:
362
+ """Upload bytes in parts for multi-part upload."""
363
+ part_number = 1
364
+ total_parts = (
365
+ len(file_content) + self.MULTI_PART_CHUNK_SIZE - 1
366
+ ) // self.MULTI_PART_CHUNK_SIZE
367
+
368
+ for i in range(0, len(file_content), self.MULTI_PART_CHUNK_SIZE):
369
+ chunk = file_content[i : i + self.MULTI_PART_CHUNK_SIZE]
370
+
371
+ success = await self.client.send_file_upload(
372
+ file_upload_id=file_upload_id,
373
+ file_content=BytesIO(chunk),
374
+ part_number=part_number,
375
+ )
376
+
377
+ if not success:
378
+ self.logger.error(
379
+ "Failed to upload part %d/%d", part_number, total_parts
380
+ )
381
+ return False
382
+
383
+ self.logger.debug("Uploaded part %d/%d", part_number, total_parts)
384
+ part_number += 1
385
+
386
+ self.logger.info("Successfully uploaded all %d parts", total_parts)
387
+ return True
@@ -14,7 +14,8 @@ from notionary.page.content.page_content_writer import PageContentWriter
14
14
  from notionary.page.property_formatter import NotionPropertyFormatter
15
15
  from notionary.page.utils import extract_property_value
16
16
 
17
- from notionary.util import LoggingMixin, format_uuid, FuzzyMatcher, factory_only
17
+ from notionary.util import LoggingMixin, format_uuid, factory_only
18
+ from notionary.util.fuzzy import find_best_match
18
19
 
19
20
 
20
21
  if TYPE_CHECKING:
@@ -101,7 +102,7 @@ class NotionPage(LoggingMixin):
101
102
  cls.logger.warning("No pages found for name: %s", page_name)
102
103
  raise ValueError(f"No pages found for name: {page_name}")
103
104
 
104
- best_match = FuzzyMatcher.find_best_match(
105
+ best_match = find_best_match(
105
106
  query=page_name,
106
107
  items=search_results,
107
108
  text_extractor=lambda page: page.title,
@@ -493,16 +494,14 @@ class NotionPage(LoggingMixin):
493
494
  """
494
495
  Create NotionPage instance from API response.
495
496
  """
496
- from notionary.database.notion_database import NotionDatabase
497
+ from notionary.database.database import NotionDatabase
497
498
 
498
499
  title = cls._extract_title(page_response)
499
500
  emoji = cls._extract_emoji(page_response)
500
501
  parent_database_id = cls._extract_parent_database_id(page_response)
501
502
 
502
503
  parent_database = (
503
- await NotionDatabase.from_database_id(
504
- id=parent_database_id, token=token
505
- )
504
+ await NotionDatabase.from_database_id(id=parent_database_id, token=token)
506
505
  if parent_database_id
507
506
  else None
508
507
  )
@@ -0,0 +1,19 @@
1
+ from .service import ProductTelemetry
2
+ from .views import (
3
+ BaseTelemetryEvent,
4
+ DatabaseFactoryUsedEvent,
5
+ QueryOperationEvent,
6
+ NotionMarkdownSyntaxPromptEvent,
7
+ MarkdownToNotionConversionEvent,
8
+ NotionToMarkdownConversionEvent,
9
+ )
10
+
11
+ __all__ = [
12
+ "ProductTelemetry",
13
+ "BaseTelemetryEvent",
14
+ "DatabaseFactoryUsedEvent",
15
+ "QueryOperationEvent",
16
+ "NotionMarkdownSyntaxPromptEvent",
17
+ "MarkdownToNotionConversionEvent",
18
+ "NotionToMarkdownConversionEvent",
19
+ ]
@@ -0,0 +1,136 @@
1
+ import os
2
+ import uuid
3
+ from pathlib import Path
4
+ from typing import Dict, Any, Optional
5
+ from posthog import Posthog
6
+ from dotenv import load_dotenv
7
+
8
+ from notionary.telemetry.views import BaseTelemetryEvent
9
+ from notionary.util import SingletonMetaClass, LoggingMixin
10
+
11
+ load_dotenv()
12
+
13
+ POSTHOG_EVENT_SETTINGS = {
14
+ "process_person_profile": True,
15
+ }
16
+
17
+
18
+ class ProductTelemetry(LoggingMixin, metaclass=SingletonMetaClass):
19
+ """
20
+ Anonymous telemetry for Notionary - enabled by default.
21
+ Disable via: ANONYMIZED_NOTIONARY_TELEMETRY=false
22
+ """
23
+
24
+ USER_ID_PATH = str(Path.home() / ".cache" / "notionary" / "telemetry_user_id")
25
+ PROJECT_API_KEY = "phc_gItKOx21Tc0l07C1taD0QPpqFnbWgWjVfRjF6z24kke"
26
+ HOST = "https://eu.i.posthog.com"
27
+ UNKNOWN_USER_ID = "UNKNOWN"
28
+
29
+ _logged_init_message = False
30
+ _curr_user_id = None
31
+
32
+ def __init__(self):
33
+ # Default: enabled, disable via environment variable
34
+ telemetry_setting = os.getenv("ANONYMIZED_NOTIONARY_TELEMETRY", "true").lower()
35
+ telemetry_disabled = telemetry_setting == "false"
36
+ self.debug_logging = os.getenv("NOTIONARY_DEBUG", "false").lower() == "true"
37
+
38
+ if telemetry_disabled:
39
+ self._posthog_client = None
40
+ else:
41
+ if not self._logged_init_message:
42
+ self.logger.info(
43
+ "Anonymous telemetry enabled to improve Notionary. "
44
+ "To disable: export ANONYMIZED_NOTIONARY_TELEMETRY=false"
45
+ )
46
+ self._logged_init_message = True
47
+
48
+ self._posthog_client = Posthog(
49
+ project_api_key=self.PROJECT_API_KEY,
50
+ host=self.HOST,
51
+ disable_geoip=True,
52
+ enable_exception_autocapture=True,
53
+ )
54
+
55
+ # Silence posthog's logging unless debug mode
56
+ if not self.debug_logging:
57
+ import logging
58
+
59
+ posthog_logger = logging.getLogger("posthog")
60
+ posthog_logger.disabled = True
61
+
62
+ if self._posthog_client is None:
63
+ self.logger.debug("Telemetry disabled")
64
+
65
+ def capture(self, event: BaseTelemetryEvent) -> None:
66
+ """
67
+ Safe event tracking that never affects library functionality
68
+
69
+ Args:
70
+ event: BaseTelemetryEvent instance to capture
71
+ """
72
+ if self._posthog_client is None:
73
+ return
74
+
75
+ self._direct_capture(event)
76
+
77
+ def _direct_capture(self, event: BaseTelemetryEvent) -> None:
78
+ """
79
+ Direct capture method - PostHog handles threading internally
80
+ Should not be thread blocking because posthog magically handles it
81
+ """
82
+ if self._posthog_client is None:
83
+ return
84
+
85
+ try:
86
+ self._posthog_client.capture(
87
+ distinct_id=self.user_id,
88
+ event=event.name,
89
+ properties={
90
+ "library": "notionary",
91
+ **event.properties,
92
+ **POSTHOG_EVENT_SETTINGS,
93
+ },
94
+ )
95
+
96
+ except Exception as e:
97
+ self.logger.error(f"Failed to send telemetry event {event.name}: {e}")
98
+
99
+ def flush(self) -> None:
100
+ """
101
+ Flush pending events - simplified without threading complexity
102
+ """
103
+ if not self._posthog_client:
104
+ self.logger.debug("PostHog client not available, skipping flush.")
105
+ return
106
+
107
+ try:
108
+ self._posthog_client.flush()
109
+ self.logger.debug("PostHog client telemetry queue flushed.")
110
+ except Exception as e:
111
+ self.logger.error(f"Failed to flush PostHog client: {e}")
112
+
113
+ @property
114
+ def user_id(self) -> str:
115
+ """Anonymous, persistent user ID"""
116
+ if self._curr_user_id:
117
+ return self._curr_user_id
118
+
119
+ # File access may fail due to permissions or other reasons.
120
+ # We don't want to crash so we catch all exceptions.
121
+ try:
122
+ if not os.path.exists(self.USER_ID_PATH):
123
+ os.makedirs(os.path.dirname(self.USER_ID_PATH), exist_ok=True)
124
+ with open(self.USER_ID_PATH, "w") as f:
125
+ new_user_id = str(uuid.uuid4())
126
+ f.write(new_user_id)
127
+ self._curr_user_id = new_user_id
128
+ else:
129
+ with open(self.USER_ID_PATH, "r") as f:
130
+ self._curr_user_id = f.read().strip()
131
+
132
+ return self._curr_user_id
133
+ except Exception as e:
134
+ self.logger.debug(f"Error getting user ID: {e}")
135
+ self._curr_user_id = self.UNKNOWN_USER_ID
136
+ return self._curr_user_id
@@ -0,0 +1,73 @@
1
+ from abc import ABC, abstractmethod
2
+ from dataclasses import asdict, dataclass
3
+ from typing import Any, Optional
4
+
5
+
6
+ @dataclass
7
+ class BaseTelemetryEvent(ABC):
8
+ @property
9
+ @abstractmethod
10
+ def name(self) -> str:
11
+ pass
12
+
13
+ @property
14
+ def properties(self) -> dict[str, Any]:
15
+ return {k: v for k, v in asdict(self).items() if k != "name"}
16
+
17
+
18
+ @dataclass
19
+ class DatabaseFactoryUsedEvent(BaseTelemetryEvent):
20
+ """Event fired when a database factory method is used"""
21
+
22
+ factory_method: str
23
+
24
+ @property
25
+ def name(self) -> str:
26
+ return "database_factory_used"
27
+
28
+
29
+ @dataclass
30
+ class QueryOperationEvent(BaseTelemetryEvent):
31
+ """Event fired when a query operation is performed"""
32
+
33
+ query_type: str
34
+
35
+ @property
36
+ def name(self) -> str:
37
+ return "query_operation"
38
+
39
+
40
+ @dataclass
41
+ class NotionMarkdownSyntaxPromptEvent(BaseTelemetryEvent):
42
+ """Event fired when Notion Markdown syntax is used"""
43
+
44
+ @property
45
+ def name(self) -> str:
46
+ return "notion_markdown_syntax_used"
47
+
48
+
49
+ # Tracks markdown conversion
50
+ @dataclass
51
+ class MarkdownToNotionConversionEvent(BaseTelemetryEvent):
52
+ """Event fired when markdown is converted to Notion blocks"""
53
+
54
+ handler_element_name: Optional[str] = (
55
+ None # e.g. "HeadingElement", "ParagraphElement"
56
+ )
57
+
58
+ @property
59
+ def name(self) -> str:
60
+ return "markdown_to_notion_conversion"
61
+
62
+
63
+ @dataclass
64
+ class NotionToMarkdownConversionEvent(BaseTelemetryEvent):
65
+ """Event fired when Notion blocks are converted to markdown"""
66
+
67
+ handler_element_name: Optional[str] = (
68
+ None # e.g. "HeadingElement", "ParagraphElement"
69
+ )
70
+
71
+ @property
72
+ def name(self) -> str:
73
+ return "notion_to_markdown_conversion"
@@ -0,0 +1,11 @@
1
+ from .notion_user import NotionUser
2
+ from .notion_user_manager import NotionUserManager
3
+ from .client import NotionUserClient
4
+ from .notion_bot_user import NotionBotUser
5
+
6
+ __all__ = [
7
+ "NotionUser",
8
+ "NotionUserManager",
9
+ "NotionUserClient",
10
+ "NotionBotUser",
11
+ ]