tracktolib 0.67.0__py3-none-any.whl → 0.69.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,567 @@
1
+ """Notion utility functions for exporting and importing content."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import asyncio
6
+ from pathlib import Path
7
+ from typing import TYPE_CHECKING, Any, Protocol, TypedDict, cast
8
+
9
+ import niquests
10
+
11
+ from .markdown import (
12
+ blocks_to_markdown_with_comments,
13
+ comments_to_markdown,
14
+ markdown_to_blocks,
15
+ strip_comments_from_markdown,
16
+ )
17
+
18
+ if TYPE_CHECKING:
19
+ from .cache import NotionCache
20
+ from .models import Block, Comment, PartialBlock
21
+
22
+ from ..utils import get_chunks, run_coros
23
+ from .blocks import (
24
+ ExportResult,
25
+ find_divergence_index,
26
+ )
27
+ from .fetch import (
28
+ create_comment,
29
+ create_page,
30
+ delete_block,
31
+ fetch_append_block_children,
32
+ fetch_block_children,
33
+ fetch_comments,
34
+ fetch_user,
35
+ )
36
+
37
+ __all__ = [
38
+ "DEFAULT_CONCURRENCY",
39
+ "ClearResult",
40
+ "PageComment",
41
+ "ProgressCallback",
42
+ "UpdateResult",
43
+ "clear_page_blocks",
44
+ "download_page_to_markdown",
45
+ "export_markdown_to_page",
46
+ "fetch_all_page_blocks",
47
+ "fetch_all_page_comments",
48
+ "update_page_content",
49
+ ]
50
+
51
+
52
+ class ProgressCallback(Protocol):
53
+ """Protocol for progress callback functions."""
54
+
55
+ def __call__(self, current: int, total: int | None) -> None:
56
+ """Called after each operation to report progress.
57
+
58
+ Args:
59
+ current: Number of items processed so far
60
+ total: Total number of items to process, or None if unknown (e.g., during fetch)
61
+ """
62
+ ...
63
+
64
+
65
+ class ClearResult(TypedDict):
66
+ """Result of clearing page blocks."""
67
+
68
+ deleted: int
69
+ """Number of blocks deleted."""
70
+
71
+
72
+ class PageComment(TypedDict):
73
+ """Comment with block context."""
74
+
75
+ id: str
76
+ """Comment ID."""
77
+ discussion_id: str
78
+ """Discussion thread ID."""
79
+ block_id: str
80
+ """ID of the block this comment is attached to."""
81
+ block_type: str
82
+ """Type of the block (e.g., 'paragraph', 'code')."""
83
+ author_name: str
84
+ """Name of the comment author."""
85
+ created_time: str
86
+ """ISO 8601 timestamp when the comment was created."""
87
+ text: str
88
+ """Plain text content of the comment."""
89
+
90
+
91
+ class UpdateResult(TypedDict):
92
+ """Result of updating page content."""
93
+
94
+ preserved: int
95
+ """Number of blocks preserved (unchanged from prefix)."""
96
+ deleted: int
97
+ """Number of blocks deleted."""
98
+ created: int
99
+ """Number of new blocks created."""
100
+
101
+
102
+ NOTION_BLOCK_LIMIT = 100
103
+ """Maximum number of blocks per Notion API request."""
104
+
105
+ DEFAULT_CONCURRENCY = 50
106
+ """Default concurrency limit for parallel API requests."""
107
+
108
+
109
+ async def export_markdown_to_page(
110
+ session: niquests.AsyncSession,
111
+ *,
112
+ database_id: str,
113
+ content: str,
114
+ title: str,
115
+ properties: dict[str, Any] | None = None,
116
+ comments: list[str] | None = None,
117
+ ) -> ExportResult:
118
+ """Export markdown content to a Notion database as a new page.
119
+
120
+ Handles large documents by chunking blocks (Notion API limit: 100 blocks per request).
121
+
122
+ Args:
123
+ session: Authenticated niquests session with Notion headers
124
+ database_id: ID of the Notion database to create the page in
125
+ content: Markdown content to convert to Notion blocks
126
+ title: Page title (Name property)
127
+ properties: Additional page properties (optional)
128
+ comments: List of comment strings to add to the page (optional)
129
+
130
+ Returns:
131
+ ExportResult with count of blocks created and page URL
132
+ """
133
+ if not content.strip():
134
+ return {"count": 0, "url": None}
135
+
136
+ all_blocks = markdown_to_blocks(content)
137
+
138
+ # Build properties with title
139
+ page_properties: dict[str, Any] = {
140
+ "Name": {"title": [{"text": {"content": title}}]},
141
+ }
142
+ if properties:
143
+ page_properties.update(properties)
144
+
145
+ # Create page with first chunk of blocks (max 100)
146
+ first_chunk = all_blocks[:NOTION_BLOCK_LIMIT]
147
+ page = await create_page(
148
+ session,
149
+ parent={"database_id": database_id},
150
+ properties=page_properties,
151
+ children=first_chunk,
152
+ )
153
+
154
+ url = page.get("url") if page else None
155
+ page_id = page.get("id") if page else None
156
+
157
+ # Append remaining blocks in chunks
158
+ if page_id and len(all_blocks) > NOTION_BLOCK_LIMIT:
159
+ remaining_blocks = all_blocks[NOTION_BLOCK_LIMIT:]
160
+ for i in range(0, len(remaining_blocks), NOTION_BLOCK_LIMIT):
161
+ chunk = remaining_blocks[i : i + NOTION_BLOCK_LIMIT]
162
+ await fetch_append_block_children(session, page_id, chunk)
163
+
164
+ # Add comments if provided
165
+ if comments and page_id:
166
+ for comment_text in comments:
167
+ await create_comment(
168
+ session,
169
+ parent={"page_id": page_id},
170
+ rich_text=[{"type": "text", "text": {"content": comment_text}}],
171
+ )
172
+
173
+ return {"count": len(all_blocks), "url": url}
174
+
175
+
176
+ async def download_page_to_markdown(
177
+ session: niquests.AsyncSession,
178
+ page_id: str,
179
+ output_path: str | Path,
180
+ *,
181
+ include_comments: bool = False,
182
+ semaphore: asyncio.Semaphore | None = None,
183
+ on_progress: ProgressCallback | None = None,
184
+ ) -> int:
185
+ """Download a Notion page to a local markdown file.
186
+
187
+ Uses TaskGroup with Semaphore for parallel fetching of comments.
188
+
189
+ Args:
190
+ session: Authenticated niquests session with Notion headers
191
+ page_id: ID of the Notion page to download
192
+ output_path: Path to save the markdown file
193
+ include_comments: Whether to include comments (both page-level and inline block comments)
194
+ semaphore: Optional semaphore for rate limiting (default: Semaphore(50))
195
+ on_progress: Optional callback called after each batch of blocks is fetched.
196
+ Receives (current, total) where total is None (unknown during fetch).
197
+
198
+ Returns:
199
+ Number of blocks converted
200
+ """
201
+ # Fetch all blocks from the page
202
+ all_blocks: list[Block | PartialBlock] = []
203
+ cursor: str | None = None
204
+
205
+ while True:
206
+ response = await fetch_block_children(session, page_id, start_cursor=cursor)
207
+ all_blocks.extend(response.get("results", []))
208
+
209
+ has_more = response.get("has_more", False)
210
+ if on_progress:
211
+ on_progress(len(all_blocks), None)
212
+
213
+ if not has_more:
214
+ break
215
+ cursor = response.get("next_cursor")
216
+
217
+ # Fetch comments if requested
218
+ block_comments: dict[str, list[Comment]] = {}
219
+ page_comments: list[Comment] = []
220
+
221
+ if include_comments:
222
+ # Collect all block IDs to fetch comments for (including page itself)
223
+ block_ids = [page_id] + [b.get("id") for b in all_blocks if b.get("id")]
224
+
225
+ # Fetch comments in parallel
226
+ sem = semaphore or asyncio.Semaphore(DEFAULT_CONCURRENCY)
227
+ block_id_to_comments: dict[str, list[Comment]] = {}
228
+ user_ids: set[str] = set()
229
+
230
+ async def fetch_block_comments(bid: str) -> tuple[str, list[Comment]]:
231
+ data = await fetch_comments(session, block_id=bid)
232
+ comments_list = data.get("results", [])
233
+ if comments_list:
234
+ # Use actual parent block_id from comment to avoid race condition
235
+ actual_block_id = comments_list[0].get("parent", {}).get("block_id", bid)
236
+ return actual_block_id, comments_list
237
+ return bid, []
238
+
239
+ async for actual_block_id, comments_list in run_coros((fetch_block_comments(bid) for bid in block_ids), sem):
240
+ if comments_list:
241
+ block_id_to_comments[actual_block_id] = comments_list
242
+ for comment in comments_list:
243
+ user_id = comment.get("created_by", {}).get("id")
244
+ if user_id:
245
+ user_ids.add(user_id)
246
+
247
+ # Fetch all user names in parallel
248
+ user_cache: dict[str, str] = {}
249
+
250
+ async for uid, name in run_coros((_fetch_user_with_id(session, uid) for uid in user_ids), sem):
251
+ user_cache[uid] = name
252
+
253
+ # Apply user names to comments
254
+ for comments_list in block_id_to_comments.values():
255
+ for comment in comments_list:
256
+ created_by = cast(dict[str, Any], comment.get("created_by", {}))
257
+ uid = created_by.get("id")
258
+ if uid and uid in user_cache:
259
+ created_by["name"] = user_cache[uid]
260
+
261
+ # Separate page comments from block comments
262
+ page_comments = block_id_to_comments.pop(page_id, [])
263
+ block_comments = block_id_to_comments
264
+
265
+ # Convert blocks to markdown with inline comments
266
+ markdown_content = blocks_to_markdown_with_comments(all_blocks, block_comments)
267
+
268
+ # Append page-level comments at the end
269
+ if page_comments:
270
+ comments_md = comments_to_markdown(page_comments)
271
+ markdown_content = f"{markdown_content}\n\n{comments_md}"
272
+
273
+ # Write to file with trailing newline
274
+ output = Path(output_path)
275
+ output.write_text(f"{markdown_content}\n", encoding="utf-8")
276
+
277
+ return len(all_blocks)
278
+
279
+
280
+ async def clear_page_blocks(
281
+ session: niquests.AsyncSession,
282
+ page_id: str,
283
+ *,
284
+ cache: NotionCache | None = None,
285
+ semaphore: asyncio.Semaphore | None = None,
286
+ on_progress: ProgressCallback | None = None,
287
+ ) -> ClearResult:
288
+ """Delete all blocks from a Notion page.
289
+
290
+ Uses TaskGroup for parallel deletion with Semaphore for rate limiting.
291
+ Default concurrency is 50 if no semaphore is provided.
292
+
293
+ Args:
294
+ session: Authenticated niquests session with Notion headers
295
+ page_id: ID of the Notion page to clear
296
+ cache: Optional cache to invalidate after clearing
297
+ semaphore: Optional semaphore for rate limiting (default: Semaphore(50))
298
+ on_progress: Optional callback called after each block is deleted.
299
+ Receives (deleted_count, total_count).
300
+
301
+ Returns:
302
+ ClearResult with count of blocks deleted
303
+ """
304
+ # First, fetch all blocks to get the total count
305
+ all_block_ids: list[str] = []
306
+ cursor: str | None = None
307
+
308
+ while True:
309
+ response = await fetch_block_children(session, page_id, start_cursor=cursor)
310
+ blocks = response.get("results", [])
311
+ for block in blocks:
312
+ block_id = block.get("id")
313
+ if block_id:
314
+ all_block_ids.append(block_id)
315
+
316
+ if not response.get("has_more", False):
317
+ break
318
+ cursor = response.get("next_cursor")
319
+
320
+ if not all_block_ids:
321
+ if cache:
322
+ cache.delete_page_blocks(page_id)
323
+ return {"deleted": 0}
324
+
325
+ total = len(all_block_ids)
326
+ deleted_count = 0
327
+ sem = semaphore or asyncio.Semaphore(DEFAULT_CONCURRENCY)
328
+
329
+ async def delete_one(block_id: str) -> None:
330
+ nonlocal deleted_count
331
+ async with sem:
332
+ await delete_block(session, block_id)
333
+ deleted_count += 1
334
+ if on_progress:
335
+ on_progress(deleted_count, total)
336
+
337
+ async with asyncio.TaskGroup() as tg:
338
+ for block_id in all_block_ids:
339
+ tg.create_task(delete_one(block_id))
340
+
341
+ if cache:
342
+ cache.delete_page_blocks(page_id)
343
+
344
+ return {"deleted": deleted_count}
345
+
346
+
347
+ async def fetch_all_page_blocks(
348
+ session: niquests.AsyncSession,
349
+ page_id: str,
350
+ *,
351
+ cache: NotionCache | None = None,
352
+ on_progress: ProgressCallback | None = None,
353
+ ) -> list[Block | PartialBlock]:
354
+ """Fetch all blocks from a Notion page.
355
+
356
+ Args:
357
+ session: Authenticated niquests session with Notion headers
358
+ page_id: ID of the Notion page
359
+ cache: Optional cache to read from and write to
360
+ on_progress: Optional callback called after each batch of blocks is fetched.
361
+ Receives (fetched_count, None) since total is unknown during fetch.
362
+
363
+ Returns:
364
+ List of all blocks in the page
365
+ """
366
+ # Try cache first
367
+ if cache:
368
+ cached = cache.get_page_blocks(page_id)
369
+ if cached is not None:
370
+ return cached # type: ignore[return-value]
371
+
372
+ # Fetch from API
373
+ all_blocks: list[Block | PartialBlock] = []
374
+ cursor: str | None = None
375
+
376
+ while True:
377
+ response = await fetch_block_children(session, page_id, start_cursor=cursor)
378
+ all_blocks.extend(response.get("results", []))
379
+
380
+ if on_progress:
381
+ on_progress(len(all_blocks), None)
382
+
383
+ if not response.get("has_more", False):
384
+ break
385
+ cursor = response.get("next_cursor")
386
+
387
+ # Update cache
388
+ if cache:
389
+ cache.set_page_blocks(page_id, all_blocks) # type: ignore[arg-type]
390
+
391
+ return all_blocks
392
+
393
+
394
+ async def _fetch_block_comments(
395
+ session: niquests.AsyncSession, block: Block | PartialBlock
396
+ ) -> list[tuple[str, str, Comment]]:
397
+ block_id = block.get("id", "")
398
+ block_type = block.get("type", "unknown")
399
+ resp = await fetch_comments(session, block_id)
400
+ return [(block_id, block_type, c) for c in resp.get("results", [])]
401
+
402
+
403
+ async def _fetch_user_with_id(session: niquests.AsyncSession, uid: str) -> tuple[str, str]:
404
+ user = await fetch_user(session, uid)
405
+ return uid, user.get("name") or uid
406
+
407
+
408
+ async def fetch_all_page_comments(
409
+ session: niquests.AsyncSession,
410
+ page_id: str,
411
+ *,
412
+ cache: NotionCache | None = None,
413
+ concurrency: int = DEFAULT_CONCURRENCY,
414
+ ) -> list[PageComment]:
415
+ """Fetch all comments from a page and its blocks.
416
+
417
+ Args:
418
+ session: Authenticated niquests session with Notion headers
419
+ page_id: The page to fetch comments from
420
+ cache: Optional cache to read from and write to
421
+ concurrency: Max concurrent requests (default 50)
422
+
423
+ Returns:
424
+ List of comments with block context, ordered by block position
425
+ """
426
+ # Try cache first
427
+ if cache:
428
+ cached = cache.get_page_comments(page_id)
429
+ if cached is not None:
430
+ return cached
431
+
432
+ blocks = await fetch_all_page_blocks(session, page_id, cache=cache)
433
+ sem = asyncio.Semaphore(concurrency)
434
+
435
+ # Fetch comments for all blocks
436
+ raw_comments: list[tuple[str, str, Comment]] = []
437
+ user_ids: set[str] = set()
438
+
439
+ async for result in run_coros((_fetch_block_comments(session, b) for b in blocks), sem):
440
+ for block_id, block_type, c in result:
441
+ raw_comments.append((block_id, block_type, c))
442
+ user_id = c.get("created_by", {}).get("id")
443
+ if user_id:
444
+ user_ids.add(user_id)
445
+
446
+ # Fetch user names in parallel
447
+ user_ids_list = list(user_ids)
448
+ user_cache: dict[str, str] = {}
449
+
450
+ async for uid, name in run_coros((_fetch_user_with_id(session, uid) for uid in user_ids_list), sem):
451
+ user_cache[uid] = name
452
+
453
+ # Build final comments with resolved user names
454
+ comments: list[PageComment] = []
455
+ for block_id, block_type, c in raw_comments:
456
+ user_id = c.get("created_by", {}).get("id", "")
457
+ author_name = user_cache.get(user_id, "Unknown")
458
+ comments.append(
459
+ {
460
+ "id": c["id"],
461
+ "discussion_id": c["discussion_id"],
462
+ "block_id": block_id,
463
+ "block_type": block_type,
464
+ "author_name": author_name,
465
+ "created_time": c["created_time"],
466
+ "text": "".join(rt.get("plain_text", "") for rt in c.get("rich_text", [])),
467
+ }
468
+ )
469
+
470
+ # Update cache
471
+ if cache:
472
+ cache.set_page_comments(page_id, comments)
473
+
474
+ return comments
475
+
476
+
477
+ async def update_page_content(
478
+ session: niquests.AsyncSession,
479
+ page_id: str,
480
+ content: str,
481
+ *,
482
+ cache: NotionCache | None = None,
483
+ semaphore: asyncio.Semaphore | None = None,
484
+ on_progress: ProgressCallback | None = None,
485
+ ) -> UpdateResult:
486
+ """Update a Notion page using smart prefix-preserving diff.
487
+
488
+ Only deletes and recreates blocks that have changed, preserving:
489
+ - Block IDs for unchanged blocks
490
+ - Inline comments attached to unchanged blocks
491
+ - Reduces API calls for edits at the end of documents
492
+
493
+ Uses TaskGroup for parallel deletion with Semaphore for rate limiting.
494
+ Default concurrency is 50 if no semaphore is provided.
495
+
496
+ Comment blockquotes (> 💬) are automatically stripped from the content
497
+ to preserve existing comments on the page.
498
+
499
+ Args:
500
+ session: Authenticated niquests session with Notion headers
501
+ page_id: ID of the Notion page to update
502
+ content: Markdown content to replace the page content with
503
+ cache: Optional cache for existing blocks (avoids fetch if cached)
504
+ semaphore: Optional semaphore for rate limiting (default: Semaphore(50))
505
+ on_progress: Optional callback called after each block is deleted.
506
+ Receives (deleted_count, total_to_delete).
507
+
508
+ Returns:
509
+ UpdateResult with counts of preserved, deleted, and created blocks
510
+ """
511
+ content = strip_comments_from_markdown(content)
512
+
513
+ # Handle empty content
514
+ if not content.strip():
515
+ result = await clear_page_blocks(session, page_id, cache=cache, semaphore=semaphore, on_progress=on_progress)
516
+ return {"preserved": 0, "deleted": result["deleted"], "created": 0}
517
+
518
+ new_blocks = markdown_to_blocks(content)
519
+
520
+ # Fetch existing blocks (from cache or API)
521
+ existing_blocks = await fetch_all_page_blocks(session, page_id, cache=cache)
522
+
523
+ # Find where content diverges
524
+ divergence_idx = find_divergence_index(existing_blocks, new_blocks)
525
+
526
+ # Count preserved blocks
527
+ preserved = divergence_idx
528
+
529
+ # Delete blocks from divergence point onward
530
+ blocks_to_delete = existing_blocks[divergence_idx:]
531
+ block_ids_to_delete = [b.get("id") for b in blocks_to_delete if b.get("id")]
532
+ total_to_delete = len(block_ids_to_delete)
533
+ deleted_count = 0
534
+
535
+ if block_ids_to_delete:
536
+ sem = semaphore or asyncio.Semaphore(DEFAULT_CONCURRENCY)
537
+
538
+ async def delete_one(block_id: str) -> None:
539
+ nonlocal deleted_count
540
+ async with sem:
541
+ await delete_block(session, block_id)
542
+ deleted_count += 1
543
+ if on_progress:
544
+ on_progress(deleted_count, total_to_delete)
545
+
546
+ async with asyncio.TaskGroup() as tg:
547
+ for block_id in block_ids_to_delete:
548
+ tg.create_task(delete_one(block_id))
549
+
550
+ # Append new blocks from divergence point onward
551
+ blocks_to_create = new_blocks[divergence_idx:]
552
+ created = 0
553
+ if blocks_to_create:
554
+ # Notion's append API adds to the end, which is what we want
555
+ # since we deleted everything after the preserved blocks
556
+ for chunk in get_chunks(blocks_to_create, NOTION_BLOCK_LIMIT):
557
+ await fetch_append_block_children(session, page_id, chunk)
558
+ created += len(chunk)
559
+
560
+ # Update cache with the new state
561
+ if cache:
562
+ # Build the new block list: preserved blocks + newly created
563
+ # Note: newly created blocks don't have IDs yet, so we need to refetch
564
+ # or we can just invalidate the cache
565
+ cache.delete_page_blocks(page_id)
566
+
567
+ return {"preserved": preserved, "deleted": deleted_count, "created": created}
tracktolib/pg/__init__.py CHANGED
@@ -1,17 +1,17 @@
1
1
  from .query import (
2
- insert_many,
3
- insert_one,
2
+ Conflict,
3
+ OnConflict,
4
+ PGConflictQuery,
4
5
  PGInsertQuery,
5
6
  PGReturningQuery,
6
- PGConflictQuery,
7
- insert_returning,
8
- Conflict,
9
- fetch_count,
10
7
  PGUpdateQuery,
11
- update_returning,
12
- update_one,
8
+ fetch_count,
9
+ insert_many,
10
+ insert_one,
13
11
  insert_pg,
14
- OnConflict,
12
+ insert_returning,
15
13
  update_many,
14
+ update_one,
15
+ update_returning,
16
16
  )
17
- from .utils import iterate_pg, upsert_csv, safe_pg, safe_pg_context, PGError, PGException
17
+ from .utils import PGError, PGException, iterate_pg, safe_pg, safe_pg_context, upsert_csv
tracktolib/pg/query.py CHANGED
@@ -1,6 +1,6 @@
1
1
  import typing
2
2
  from dataclasses import dataclass, field
3
- from typing import Iterable, Callable, Iterator, TypeAlias, overload, Any, Literal
3
+ from typing import Any, Callable, Iterable, Iterator, Literal, TypeAlias, overload
4
4
 
5
5
  from ..pg_utils import get_conflict_query
6
6
 
tracktolib/pg/utils.py CHANGED
@@ -2,11 +2,11 @@ import csv
2
2
  import datetime as dt
3
3
  import functools
4
4
  import logging
5
- from pathlib import Path
6
- from typing import AsyncIterator, Iterable, cast, NamedTuple, Sequence
7
- from typing_extensions import LiteralString
8
- from dataclasses import dataclass
9
5
  from contextlib import contextmanager
6
+ from dataclasses import dataclass
7
+ from pathlib import Path
8
+ from typing import AsyncIterator, Iterable, LiteralString, NamedTuple, Sequence, cast
9
+
10
10
  from ..pg_utils import get_conflict_query
11
11
 
12
12
  try:
@@ -21,8 +21,8 @@ from asyncpg.exceptions import (
21
21
  UniqueViolationError,
22
22
  )
23
23
 
24
- from tracktolib.utils import get_chunks
25
24
  from tracktolib.pg_utils import get_tmp_table_query
25
+ from tracktolib.utils import get_chunks
26
26
 
27
27
  logger = logging.Logger("tracktolib-pg")
28
28
 
tracktolib/pg_sync.py CHANGED
@@ -1,13 +1,11 @@
1
1
  from pathlib import Path
2
- from typing import Iterable, Any, overload, Literal, cast, Optional, Mapping, Sequence
3
-
4
- from typing_extensions import LiteralString
2
+ from typing import Any, Iterable, Literal, LiteralString, Mapping, Optional, Sequence, cast, overload
5
3
 
6
4
  try:
7
5
  from psycopg import Connection, Cursor
8
- from psycopg.abc import Query
6
+ from psycopg.abc import Query, QueryNoTemplate
9
7
  from psycopg.errors import InvalidCatalogName
10
- from psycopg.rows import dict_row, DictRow, TupleRow
8
+ from psycopg.rows import DictRow, TupleRow, dict_row
11
9
  from psycopg.types.json import Json
12
10
  except ImportError:
13
11
  raise ImportError('Please install psycopg or tracktolib with "pg-sync" to use this module')
@@ -22,9 +20,9 @@ __all__ = (
22
20
  "fetch_one",
23
21
  "get_insert_data",
24
22
  "get_tables",
23
+ "insert_csv",
25
24
  "insert_many",
26
25
  "insert_one",
27
- "insert_csv",
28
26
  "set_seq_max",
29
27
  )
30
28
 
@@ -60,7 +58,7 @@ def fetch_one(engine: Connection, query: LiteralString, *args) -> dict | None: .
60
58
 
61
59
  def fetch_one(engine: Connection, query: LiteralString, *args, required: bool = False) -> dict | None:
62
60
  with engine.cursor(row_factory=dict_row) as cur:
63
- _data = cur.execute(query, args).fetchone()
61
+ _data = cur.execute(cast(QueryNoTemplate, query), args).fetchone()
64
62
  engine.commit()
65
63
  if required and not _data:
66
64
  raise ValueError("No value found for query")
tracktolib/pg_utils.py CHANGED
@@ -1,7 +1,4 @@
1
- from typing import Iterable
2
- from typing import cast
3
-
4
- from typing_extensions import LiteralString
1
+ from typing import Iterable, LiteralString, cast
5
2
 
6
3
 
7
4
  def get_tmp_table_query(
tracktolib/s3/minio.py CHANGED
@@ -1,8 +1,8 @@
1
1
  from pathlib import Path
2
2
 
3
3
  try:
4
- from minio.deleteobjects import DeleteObject
5
4
  from minio import Minio
5
+ from minio.deleteobjects import DeleteObject
6
6
  except ImportError:
7
7
  raise ImportError('Please install minio or tracktolib with "s3-minio" to use this module')
8
8