gatsby-source-notion-churnotion 1.2.1 → 1.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -0
- package/dist/rust-bindings.d.ts +5 -7
- package/dist/rust-bindings.js +5 -45
- package/dist/util/blocks/blockProcessor.d.ts +2 -5
- package/dist/util/blocks/blockProcessorRegistry.js +2 -0
- package/dist/util/blocks/index.d.ts +1 -0
- package/dist/util/blocks/index.js +1 -0
- package/dist/util/blocks/structureBlockProcessor.js +0 -4
- package/dist/util/blocks/tableOfContentsBlockProcessor.d.ts +6 -0
- package/dist/util/blocks/tableOfContentsBlockProcessor.js +23 -0
- package/dist/util/processor.d.ts +2 -5
- package/dist/util/processor.js +4 -1
- package/dist/util/tableOfContent.d.ts +2 -5
- package/dist/util/tableOfContent.js +24 -10
- package/dist/util/tocHelper.d.ts +18 -0
- package/dist/util/tocHelper.js +43 -0
- package/package.json +2 -3
package/README.md
CHANGED
@@ -62,6 +62,15 @@ If you're considering Notion as your CMS for Gatsby, this plugin could be a grea
|
|
62
62
|
- Better error handling
|
63
63
|
- Improved type safety
|
64
64
|
|
65
|
+
## Features
|
66
|
+
|
67
|
+
- **Recursive Database Fetching**: Connect with a Notion database and fetch all entries, including nested entries.
|
68
|
+
- **Type Safe**: All data types are converted to TypeScript types.
|
69
|
+
- **Powerful TypeScript Implementation**: Uses efficient, battle-tested TypeScript code for processing Notion API requests in parallel.
|
70
|
+
- **Customizable**: Allows for custom filtering and properties selection.
|
71
|
+
|
72
|
+
> **Note**: The plugin currently uses a pure TypeScript implementation for parallel processing of Notion API requests. A Rust-based implementation was initially planned for high-performance use cases but has been put on hold due to cross-platform compilation issues.
|
73
|
+
|
65
74
|
## Install
|
66
75
|
|
67
76
|
```shell
|
package/dist/rust-bindings.d.ts
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
/**
|
2
|
-
* This file provides TypeScript bindings to the
|
3
|
-
*
|
2
|
+
* This file provides TypeScript bindings to the Notion API processing.
|
3
|
+
* Currently using the TypeScript implementation as the default.
|
4
4
|
*/
|
5
5
|
import { Reporter } from "gatsby";
|
6
6
|
/**
|
@@ -17,9 +17,8 @@ export interface RustNotionServiceOptions {
|
|
17
17
|
enableCaching?: boolean;
|
18
18
|
}
|
19
19
|
/**
|
20
|
-
* A wrapper class that provides access to the
|
21
|
-
*
|
22
|
-
* TypeScript implementation if the Rust module is not available.
|
20
|
+
* A wrapper class that provides access to the TypeScript implementation
|
21
|
+
* for parallel processing of Notion API requests.
|
23
22
|
*/
|
24
23
|
export declare class RustNotionService {
|
25
24
|
private reporter;
|
@@ -27,14 +26,13 @@ export declare class RustNotionService {
|
|
27
26
|
private parallelLimit;
|
28
27
|
private enableCaching;
|
29
28
|
private cache;
|
30
|
-
private rustInstance;
|
31
29
|
constructor(options: RustNotionServiceOptions);
|
32
30
|
/**
|
33
31
|
* Get blocks for a single page, with recursive fetching of child blocks
|
34
32
|
*/
|
35
33
|
getPageBlocks(pageId: string): Promise<NotionBlock[]>;
|
36
34
|
/**
|
37
|
-
* Get blocks for multiple pages in parallel
|
35
|
+
* Get blocks for multiple pages in parallel
|
38
36
|
*/
|
39
37
|
getMultiplePagesBlocks(pageIds: string[]): Promise<{
|
40
38
|
[id: string]: NotionBlock[];
|
package/dist/rust-bindings.js
CHANGED
@@ -34,19 +34,9 @@ var __importStar = (this && this.__importStar) || (function () {
|
|
34
34
|
})();
|
35
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
36
36
|
exports.RustNotionService = void 0;
|
37
|
-
// Use dynamic import with try-catch to handle module loading
|
38
|
-
let notionParallel = null;
|
39
|
-
try {
|
40
|
-
// When the compiled Rust library exists, load it
|
41
|
-
notionParallel = require("../rust/notion-parallel/index.node");
|
42
|
-
}
|
43
|
-
catch (error) {
|
44
|
-
console.warn("Rust bindings for notion-parallel not found. Using TypeScript fallback implementation.");
|
45
|
-
}
|
46
37
|
/**
|
47
|
-
* A wrapper class that provides access to the
|
48
|
-
*
|
49
|
-
* TypeScript implementation if the Rust module is not available.
|
38
|
+
* A wrapper class that provides access to the TypeScript implementation
|
39
|
+
* for parallel processing of Notion API requests.
|
50
40
|
*/
|
51
41
|
class RustNotionService {
|
52
42
|
reporter;
|
@@ -54,24 +44,13 @@ class RustNotionService {
|
|
54
44
|
parallelLimit;
|
55
45
|
enableCaching;
|
56
46
|
cache;
|
57
|
-
rustInstance = null;
|
58
47
|
constructor(options) {
|
59
48
|
this.reporter = options.reporter;
|
60
49
|
this.notionApiKey = options.notionApiKey;
|
61
50
|
this.parallelLimit = options.parallelLimit || 5;
|
62
51
|
this.enableCaching = options.enableCaching !== false;
|
63
52
|
this.cache = new Map();
|
64
|
-
|
65
|
-
if (notionParallel) {
|
66
|
-
try {
|
67
|
-
this.rustInstance = new notionParallel.NotionParallel(this.notionApiKey, this.parallelLimit);
|
68
|
-
this.reporter.info(`[RUST] Initialized Rust-based parallel processing with limit: ${this.parallelLimit}`);
|
69
|
-
}
|
70
|
-
catch (error) {
|
71
|
-
this.reporter.warn(`[RUST] Failed to initialize Rust implementation: ${error}`);
|
72
|
-
this.rustInstance = null;
|
73
|
-
}
|
74
|
-
}
|
53
|
+
this.reporter.info(`[NOTION] Initialized TypeScript-based parallel processing with limit: ${this.parallelLimit}`);
|
75
54
|
}
|
76
55
|
/**
|
77
56
|
* Get blocks for a single page, with recursive fetching of child blocks
|
@@ -82,8 +61,6 @@ class RustNotionService {
|
|
82
61
|
this.reporter.info(`[CACHE] Using cached page blocks for ${pageId}`);
|
83
62
|
return this.cache.get(cacheKey);
|
84
63
|
}
|
85
|
-
// Currently we don't have a direct Rust counterpart for a single page
|
86
|
-
// So we use the multiple pages method with a single ID
|
87
64
|
const results = await this.getMultiplePagesBlocks([pageId]);
|
88
65
|
const blocks = results[pageId] || [];
|
89
66
|
if (this.enableCaching) {
|
@@ -92,24 +69,11 @@ class RustNotionService {
|
|
92
69
|
return blocks;
|
93
70
|
}
|
94
71
|
/**
|
95
|
-
* Get blocks for multiple pages in parallel
|
72
|
+
* Get blocks for multiple pages in parallel
|
96
73
|
*/
|
97
74
|
async getMultiplePagesBlocks(pageIds) {
|
98
75
|
this.reporter.info(`[NOTION] Fetching blocks for ${pageIds.length} pages in parallel (limit: ${this.parallelLimit})`);
|
99
|
-
//
|
100
|
-
if (this.rustInstance) {
|
101
|
-
try {
|
102
|
-
this.reporter.info(`[RUST] Using Rust implementation for parallel processing`);
|
103
|
-
const results = await this.rustInstance.getMultiplePagesBlocks(pageIds);
|
104
|
-
return results;
|
105
|
-
}
|
106
|
-
catch (error) {
|
107
|
-
this.reporter.warn(`[RUST] Error using Rust implementation, falling back to TypeScript: ${error}`);
|
108
|
-
// Fall back to TypeScript implementation
|
109
|
-
}
|
110
|
-
}
|
111
|
-
// Fallback to TypeScript implementation
|
112
|
-
// Import the TypeScript implementation dynamically
|
76
|
+
// Import the TypeScript implementation
|
113
77
|
const { NotionService } = await Promise.resolve().then(() => __importStar(require("./api/service/notionService")));
|
114
78
|
// Create a NotionService instance with the same configuration
|
115
79
|
const tsService = new NotionService({
|
@@ -125,10 +89,6 @@ class RustNotionService {
|
|
125
89
|
setParallelLimit(limit) {
|
126
90
|
this.reporter.info(`[NOTION] Updated parallel request limit to ${limit}`);
|
127
91
|
this.parallelLimit = limit;
|
128
|
-
// Update the Rust instance if available
|
129
|
-
if (this.rustInstance) {
|
130
|
-
this.rustInstance.setParallelLimit(limit);
|
131
|
-
}
|
132
92
|
}
|
133
93
|
}
|
134
94
|
exports.RustNotionService = RustNotionService;
|
@@ -1,6 +1,7 @@
|
|
1
1
|
import { BaseContentBlock } from "notion-types";
|
2
2
|
import { Actions, GatsbyCache, Reporter } from "gatsby";
|
3
3
|
import { CustomImageBlock } from "../../types";
|
4
|
+
import { TocEntry } from "../tocHelper";
|
4
5
|
export interface BlockProcessorContext {
|
5
6
|
actions: Actions;
|
6
7
|
getCache: (this: void, id: string) => GatsbyCache;
|
@@ -12,11 +13,7 @@ export interface ProcessBlockResult {
|
|
12
13
|
thumbnail?: string | null;
|
13
14
|
plainText?: string;
|
14
15
|
updatedBlock?: BaseContentBlock;
|
15
|
-
tableOfContents?:
|
16
|
-
type: string;
|
17
|
-
hash: string;
|
18
|
-
title: string;
|
19
|
-
}[];
|
16
|
+
tableOfContents?: TocEntry[];
|
20
17
|
}
|
21
18
|
export declare abstract class BlockProcessor {
|
22
19
|
protected context: BlockProcessorContext;
|
@@ -5,6 +5,7 @@ const textBlockProcessor_1 = require("./textBlockProcessor");
|
|
5
5
|
const imageBlockProcessor_1 = require("./imageBlockProcessor");
|
6
6
|
const mediaBlockProcessor_1 = require("./mediaBlockProcessor");
|
7
7
|
const structureBlockProcessor_1 = require("./structureBlockProcessor");
|
8
|
+
const tableOfContentsBlockProcessor_1 = require("./tableOfContentsBlockProcessor");
|
8
9
|
class BlockProcessorRegistry {
|
9
10
|
processors = [];
|
10
11
|
context;
|
@@ -17,6 +18,7 @@ class BlockProcessorRegistry {
|
|
17
18
|
this.registerProcessor(new textBlockProcessor_1.TextBlockProcessor(this.context));
|
18
19
|
this.registerProcessor(new imageBlockProcessor_1.ImageBlockProcessor(this.context));
|
19
20
|
this.registerProcessor(new mediaBlockProcessor_1.MediaBlockProcessor(this.context));
|
21
|
+
this.registerProcessor(new tableOfContentsBlockProcessor_1.TableOfContentsBlockProcessor(this.context));
|
20
22
|
this.registerProcessor(new structureBlockProcessor_1.StructureBlockProcessor(this.context));
|
21
23
|
}
|
22
24
|
registerProcessor(processor) {
|
@@ -20,3 +20,4 @@ __exportStar(require("./imageBlockProcessor"), exports);
|
|
20
20
|
__exportStar(require("./mediaBlockProcessor"), exports);
|
21
21
|
__exportStar(require("./structureBlockProcessor"), exports);
|
22
22
|
__exportStar(require("./textBlockProcessor"), exports);
|
23
|
+
__exportStar(require("./tableOfContentsBlockProcessor"), exports);
|
@@ -11,7 +11,6 @@ class StructureBlockProcessor extends blockProcessor_1.BlockProcessor {
|
|
11
11
|
"table_row",
|
12
12
|
"divider",
|
13
13
|
"breadcrumb",
|
14
|
-
"table_of_contents",
|
15
14
|
"equation",
|
16
15
|
"synced_block",
|
17
16
|
"template",
|
@@ -44,9 +43,6 @@ class StructureBlockProcessor extends blockProcessor_1.BlockProcessor {
|
|
44
43
|
case "breadcrumb":
|
45
44
|
reporter.info(`Processing breadcrumb block`);
|
46
45
|
break;
|
47
|
-
case "table_of_contents":
|
48
|
-
reporter.info(`Processing table_of_contents block`);
|
49
|
-
break;
|
50
46
|
case "equation":
|
51
47
|
reporter.info(`Processing equation block: ${JSON.stringify(block.equation?.expression)}`);
|
52
48
|
break;
|
@@ -0,0 +1,6 @@
|
|
1
|
+
import { BaseContentBlock } from "notion-types";
|
2
|
+
import { BlockProcessor, ProcessBlockResult } from "./blockProcessor";
|
3
|
+
export declare class TableOfContentsBlockProcessor extends BlockProcessor {
|
4
|
+
canProcess(block: BaseContentBlock): boolean;
|
5
|
+
process(block: BaseContentBlock): Promise<ProcessBlockResult>;
|
6
|
+
}
|
@@ -0,0 +1,23 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.TableOfContentsBlockProcessor = void 0;
|
4
|
+
const blockProcessor_1 = require("./blockProcessor");
|
5
|
+
class TableOfContentsBlockProcessor extends blockProcessor_1.BlockProcessor {
|
6
|
+
canProcess(block) {
|
7
|
+
return block.type === "table_of_contents";
|
8
|
+
}
|
9
|
+
async process(block) {
|
10
|
+
const { reporter } = this.context;
|
11
|
+
reporter.info(`Processing table_of_contents block with id: ${block.id}`);
|
12
|
+
// Table of contents blocks don't have any content by themselves
|
13
|
+
// They are used as placeholders, and the actual TOC data is generated separately
|
14
|
+
// and passed through the GraphQL schema
|
15
|
+
return {
|
16
|
+
plainText: "",
|
17
|
+
updatedBlock: block,
|
18
|
+
// We don't return tableOfContents from here because that's handled
|
19
|
+
// at a higher level in the processor.ts file
|
20
|
+
};
|
21
|
+
}
|
22
|
+
}
|
23
|
+
exports.TableOfContentsBlockProcessor = TableOfContentsBlockProcessor;
|
package/dist/util/processor.d.ts
CHANGED
@@ -1,7 +1,4 @@
|
|
1
1
|
import { Actions, GatsbyCache, Reporter } from "gatsby";
|
2
2
|
import { BaseContentBlock } from "notion-types";
|
3
|
-
|
4
|
-
|
5
|
-
hash: string;
|
6
|
-
title: string;
|
7
|
-
}[], BaseContentBlock[], string]>;
|
3
|
+
import { TocEntry } from "./tocHelper";
|
4
|
+
export declare const processor: (blocks: BaseContentBlock[], actions: Actions, getCache: (this: void, id: string) => GatsbyCache, createNodeId: (this: void, input: string) => string, reporter: Reporter, cache: GatsbyCache) => Promise<[string | null, TocEntry[], BaseContentBlock[], string]>;
|
package/dist/util/processor.js
CHANGED
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.processor = void 0;
|
4
4
|
const metadataProcessor_1 = require("./metadataProcessor");
|
5
5
|
const blocks_1 = require("./blocks");
|
6
|
+
const tocHelper_1 = require("./tocHelper");
|
6
7
|
const processor = async (blocks, actions, getCache, createNodeId, reporter, cache) => {
|
7
8
|
const { thumbnail, tableOfContents, updatedBlocks, rawText } = await processBlocksForContent(blocks, actions, getCache, createNodeId, reporter, cache);
|
8
9
|
await (0, metadataProcessor_1.processMetadata)(blocks, actions, createNodeId, reporter, cache);
|
@@ -42,6 +43,8 @@ const processBlocksForContent = async (blocks, actions, getCache, createNodeId,
|
|
42
43
|
}
|
43
44
|
return result;
|
44
45
|
}));
|
46
|
+
// 목차 최적화
|
47
|
+
const processedToc = (0, tocHelper_1.optimizeTocArray)(tableOfContents, reporter);
|
45
48
|
// 업데이트된 블록 적용
|
46
49
|
processResults.forEach((result, index) => {
|
47
50
|
if (result.updatedBlock) {
|
@@ -51,5 +54,5 @@ const processBlocksForContent = async (blocks, actions, getCache, createNodeId,
|
|
51
54
|
updatedBlocks[index] = blocks[index];
|
52
55
|
}
|
53
56
|
});
|
54
|
-
return { thumbnail, tableOfContents, updatedBlocks, rawText };
|
57
|
+
return { thumbnail, tableOfContents: processedToc, updatedBlocks, rawText };
|
55
58
|
};
|
@@ -1,6 +1,3 @@
|
|
1
1
|
import { BaseContentBlock } from "notion-types";
|
2
|
-
|
3
|
-
|
4
|
-
hash: string;
|
5
|
-
title: string;
|
6
|
-
}[]) => Promise<void>;
|
2
|
+
import { TocEntry } from "./tocHelper";
|
3
|
+
export declare const processTableOfContents: (block: BaseContentBlock, tableOfContents: TocEntry[]) => Promise<void>;
|
@@ -2,21 +2,35 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.processTableOfContents = void 0;
|
4
4
|
const crypto_1 = require("crypto");
|
5
|
+
// Hash 생성 시 사용할 캐시
|
6
|
+
const hashCache = new Map();
|
5
7
|
const processTableOfContents = async (block, tableOfContents) => {
|
6
8
|
if (["heading_1", "heading_2", "heading_3"].includes(block.type) &&
|
7
9
|
block[block.type]?.rich_text?.length > 0) {
|
8
10
|
const plainText = block[block.type]?.rich_text?.[0]?.plain_text || "";
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
.
|
13
|
-
|
11
|
+
// 이미 처리된 플레인텍스트인지 확인
|
12
|
+
let hash;
|
13
|
+
if (hashCache.has(plainText)) {
|
14
|
+
hash = hashCache.get(plainText);
|
15
|
+
}
|
16
|
+
else {
|
17
|
+
hash = `link-${plainText
|
18
|
+
.replace(/[^a-zA-Z0-9가-힣\s-_]/g, "")
|
19
|
+
.trim()
|
20
|
+
.replace(/\s+/g, "-")
|
21
|
+
.toLowerCase()}-${(0, crypto_1.randomUUID)().substring(0, 4)}`;
|
22
|
+
hashCache.set(plainText, hash);
|
23
|
+
}
|
14
24
|
block.hash = hash;
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
25
|
+
// 중복 체크 - 동일한 hash가 이미 존재하는지 확인
|
26
|
+
const existingTocIndex = tableOfContents.findIndex((toc) => toc.hash === hash);
|
27
|
+
if (existingTocIndex === -1) {
|
28
|
+
tableOfContents.push({
|
29
|
+
type: block.type,
|
30
|
+
hash,
|
31
|
+
title: plainText,
|
32
|
+
});
|
33
|
+
}
|
20
34
|
}
|
21
35
|
};
|
22
36
|
exports.processTableOfContents = processTableOfContents;
|
@@ -0,0 +1,18 @@
|
|
1
|
+
import { Reporter } from "gatsby";
|
2
|
+
/**
|
3
|
+
* TocEntry interface representing a table of contents entry
|
4
|
+
*/
|
5
|
+
export interface TocEntry {
|
6
|
+
type: string;
|
7
|
+
hash: string;
|
8
|
+
title: string;
|
9
|
+
}
|
10
|
+
/**
|
11
|
+
* Utility function to efficiently handle large table of contents arrays
|
12
|
+
* by removing duplicates and optionally limiting size
|
13
|
+
*/
|
14
|
+
export declare const optimizeTocArray: (tocEntries: TocEntry[], reporter: Reporter, options?: {
|
15
|
+
maxSize?: number;
|
16
|
+
warnThreshold?: number;
|
17
|
+
removeDuplicates?: boolean;
|
18
|
+
}) => TocEntry[];
|
@@ -0,0 +1,43 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.optimizeTocArray = void 0;
|
4
|
+
/**
|
5
|
+
* Utility function to efficiently handle large table of contents arrays
|
6
|
+
* by removing duplicates and optionally limiting size
|
7
|
+
*/
|
8
|
+
const optimizeTocArray = (tocEntries, reporter, options = {}) => {
|
9
|
+
const { maxSize = 1000, // Maximum entries to include
|
10
|
+
warnThreshold = 300, // Threshold to issue warning
|
11
|
+
removeDuplicates = true, // Whether to remove duplicates
|
12
|
+
} = options;
|
13
|
+
if (!tocEntries || tocEntries.length === 0) {
|
14
|
+
return [];
|
15
|
+
}
|
16
|
+
// Track memory usage for large TOCs
|
17
|
+
if (tocEntries.length > warnThreshold) {
|
18
|
+
reporter.warn(`Large table of contents detected (${tocEntries.length} items). This might affect performance.`);
|
19
|
+
}
|
20
|
+
// Remove duplicates if requested
|
21
|
+
let processedToc = tocEntries;
|
22
|
+
if (removeDuplicates) {
|
23
|
+
const startTime = Date.now();
|
24
|
+
const uniqueMap = new Map();
|
25
|
+
// Use a map for faster duplicate removal - O(n) instead of O(n²)
|
26
|
+
for (const entry of tocEntries) {
|
27
|
+
uniqueMap.set(entry.hash, entry);
|
28
|
+
}
|
29
|
+
processedToc = Array.from(uniqueMap.values());
|
30
|
+
const removedCount = tocEntries.length - processedToc.length;
|
31
|
+
const processTime = Date.now() - startTime;
|
32
|
+
if (removedCount > 0) {
|
33
|
+
reporter.info(`Removed ${removedCount} duplicate TOC entries in ${processTime}ms.`);
|
34
|
+
}
|
35
|
+
}
|
36
|
+
// Limit size if necessary
|
37
|
+
if (processedToc.length > maxSize) {
|
38
|
+
reporter.warn(`Table of contents exceeds maximum size (${processedToc.length} > ${maxSize}). Truncating.`);
|
39
|
+
processedToc = processedToc.slice(0, maxSize);
|
40
|
+
}
|
41
|
+
return processedToc;
|
42
|
+
};
|
43
|
+
exports.optimizeTocArray = optimizeTocArray;
|
package/package.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
{
|
2
2
|
"name": "gatsby-source-notion-churnotion",
|
3
3
|
"description": "Gatsby plugin that can connect with One Notion Database RECURSIVELY using official API",
|
4
|
-
"version": "1.2.
|
4
|
+
"version": "1.2.3",
|
5
5
|
"skipLibCheck": true,
|
6
6
|
"license": "0BSD",
|
7
7
|
"main": "./dist/gatsby-node.js",
|
@@ -11,8 +11,7 @@
|
|
11
11
|
],
|
12
12
|
"scripts": {
|
13
13
|
"clean": "del-cli dist",
|
14
|
-
"build": "
|
15
|
-
"build:rust": "cd rust/notion-parallel && (cargo build --release || cargo build --release --target x86_64-pc-windows-gnu || echo 'Rust build failed, using TypeScript fallback') && cd ../..",
|
14
|
+
"build": "tsc",
|
16
15
|
"develop": "tsc --watch",
|
17
16
|
"test": "jest",
|
18
17
|
"prepare": "npm run clean && npm run build"
|