gatsby-source-notion-churnotion 1.0.74 → 1.0.75

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,2 +1,2 @@
1
1
  import { IGetBooksParams } from "../types";
2
- export declare const getBooks: ({ bookDatabaseId, reporter, createNode, createNodeId, getNode, }: IGetBooksParams) => Promise<void>;
2
+ export declare const getBooks: ({ bookDatabaseId, reporter, createNode, createNodeId, getNode, cache, }: IGetBooksParams) => Promise<void>;
@@ -7,16 +7,26 @@ exports.getBooks = void 0;
7
7
  const crypto_1 = __importDefault(require("crypto"));
8
8
  const constants_1 = require("../constants");
9
9
  const fetchData_1 = require("../util/fetchData");
10
- const getBooks = async ({ bookDatabaseId, reporter, createNode, createNodeId, getNode, }) => {
10
+ const getBooks = async ({ bookDatabaseId, reporter, createNode, createNodeId, getNode, cache, }) => {
11
11
  const databaseUrl = `databases/${bookDatabaseId}/query`;
12
- const body = {};
13
- const result = await (0, fetchData_1.fetchPostWithRetry)(databaseUrl, body);
12
+ const cacheKey = `booksDatabase-${bookDatabaseId}`;
13
+ let result = await cache.get(cacheKey);
14
+ if (!result) {
15
+ const body = {};
16
+ result = await (0, fetchData_1.fetchPostWithRetry)(databaseUrl, body);
17
+ await cache.set(cacheKey, result);
18
+ }
14
19
  if (result?.results?.length) {
15
20
  reporter.info(`[SUCCESS] total BOOK pages > ${result.results.length}`);
16
21
  }
17
22
  for (const page of result.results) {
18
23
  reporter.info(`[CHECK] BOOK page: ${page.id}`);
19
24
  const nodeId = createNodeId(`${page.id}-book`);
25
+ const cachedNode = await cache.get(nodeId);
26
+ if (cachedNode) {
27
+ reporter.info(`[CACHE HIT] Skipping already created node: ${nodeId}`);
28
+ continue;
29
+ }
20
30
  const slug = page.properties?.slug?.rich_text?.[0]?.plain_text || `unnamed-slug`;
21
31
  const categoryId = page.properties?.category?.relation?.[0]?.id || null;
22
32
  let book_category = null;
@@ -43,6 +53,7 @@ const getBooks = async ({ bookDatabaseId, reporter, createNode, createNodeId, ge
43
53
  };
44
54
  reporter.info(`[DEBUG] Book ${bookNode.book_name} has book_category: ${bookNode.book_category}`);
45
55
  createNode(bookNode);
56
+ await cache.set(nodeId, bookNode);
46
57
  }
47
58
  };
48
59
  exports.getBooks = getBooks;
@@ -1,2 +1,2 @@
1
1
  import { IGetPagesParams } from "../types";
2
- export declare const getPages: ({ databaseId, reporter, getCache, actions, createNode, createNodeId, createParentChildLink, getNode, }: IGetPagesParams) => Promise<void>;
2
+ export declare const getPages: ({ databaseId, reporter, getCache, actions, createNode, createNodeId, createParentChildLink, getNode, cache, }: IGetPagesParams) => Promise<void>;
@@ -7,9 +7,9 @@ exports.getPages = void 0;
7
7
  const crypto_1 = __importDefault(require("crypto"));
8
8
  const constants_1 = require("../constants");
9
9
  const fetchData_1 = require("../util/fetchData");
10
- const slugify_1 = require("../util/slugify");
11
10
  const processor_1 = require("../util/processor");
12
- const getPages = async ({ databaseId, reporter, getCache, actions, createNode, createNodeId, createParentChildLink, getNode, }) => {
11
+ const slugify_1 = require("../util/slugify");
12
+ const getPages = async ({ databaseId, reporter, getCache, actions, createNode, createNodeId, createParentChildLink, getNode, cache, }) => {
13
13
  /**
14
14
  * 데이터베이스 내에 페이지들을 읽어서 재귀적으로 추가하는 서브 메서드드
15
15
  * @param databaseId 데이터베이스 아이디
@@ -139,7 +139,7 @@ const getPages = async ({ databaseId, reporter, getCache, actions, createNode, c
139
139
  });
140
140
  }
141
141
  const bookId = page.properties?.book?.relation?.[0]?.id || null;
142
- const [imageNode, tableOfContents, updatedBlocks, rawText] = await (0, processor_1.processor)(pageData.results, actions, getCache, createNodeId, reporter);
142
+ const [imageNode, tableOfContents, updatedBlocks, rawText] = await (0, processor_1.processor)(pageData.results, actions, getCache, createNodeId, reporter, cache);
143
143
  const postNode = {
144
144
  id: nodeId,
145
145
  category: parentCategoryId,
@@ -4,7 +4,7 @@ exports.sourceNodes = void 0;
4
4
  const getPages_1 = require("./api/getPages");
5
5
  const getBooks_1 = require("./api/getBooks");
6
6
  const sourceNodes = async (gatsbyApi, options) => {
7
- const { actions, reporter, createNodeId, getNode, getCache } = gatsbyApi;
7
+ const { actions, reporter, createNodeId, getNode, getCache, cache } = gatsbyApi;
8
8
  const { createNode, createParentChildLink } = actions;
9
9
  const { token, databaseId, bookDatabaseId } = options;
10
10
  if (!token || !databaseId) {
@@ -19,6 +19,7 @@ const sourceNodes = async (gatsbyApi, options) => {
19
19
  createNode,
20
20
  createNodeId,
21
21
  getNode,
22
+ cache,
22
23
  });
23
24
  await (0, getPages_1.getPages)({
24
25
  token,
@@ -30,6 +31,7 @@ const sourceNodes = async (gatsbyApi, options) => {
30
31
  createNodeId,
31
32
  createParentChildLink,
32
33
  getNode,
34
+ cache,
33
35
  });
34
36
  }
35
37
  catch (e) {
@@ -1,6 +1,6 @@
1
1
  import { Actions, GatsbyCache, Reporter } from "gatsby";
2
2
  import { BaseContentBlock } from "notion-types";
3
- export declare const processor: (blocks: BaseContentBlock[], actions: Actions, getCache: (this: void, id: string) => GatsbyCache, createNodeId: (this: void, input: string) => string, reporter: Reporter) => Promise<[string | null, {
3
+ export declare const processor: (blocks: BaseContentBlock[], actions: Actions, getCache: (this: void, id: string) => GatsbyCache, createNodeId: (this: void, input: string) => string, reporter: Reporter, cache: GatsbyCache) => Promise<[string | null, {
4
4
  type: string;
5
5
  hash: string;
6
6
  title: string;
@@ -4,13 +4,13 @@ exports.processor = void 0;
4
4
  const gatsby_source_filesystem_1 = require("gatsby-source-filesystem");
5
5
  const metadataProcessor_1 = require("./metadataProcessor");
6
6
  const tableOfContent_1 = require("./tableOfContent");
7
- const processor = async (blocks, actions, getCache, createNodeId, reporter) => {
8
- const { thumbnail, tableOfContents, updatedBlocks, rawText } = await processBlocksForContent(blocks, actions, getCache, createNodeId, reporter);
7
+ const processor = async (blocks, actions, getCache, createNodeId, reporter, cache) => {
8
+ const { thumbnail, tableOfContents, updatedBlocks, rawText } = await processBlocksForContent(blocks, actions, getCache, createNodeId, reporter, cache);
9
9
  await (0, metadataProcessor_1.processMetadata)(blocks, actions, createNodeId, reporter);
10
10
  return [thumbnail, tableOfContents, updatedBlocks, rawText];
11
11
  };
12
12
  exports.processor = processor;
13
- const processBlocksForContent = async (blocks, actions, getCache, createNodeId, reporter) => {
13
+ const processBlocksForContent = async (blocks, actions, getCache, createNodeId, reporter, cache) => {
14
14
  const tableOfContents = [];
15
15
  let thumbnail = null;
16
16
  let rawText = "";
@@ -21,7 +21,7 @@ const processBlocksForContent = async (blocks, actions, getCache, createNodeId,
21
21
  rawText += plainText + " ";
22
22
  }
23
23
  if (isImageBlock(block)) {
24
- const updatedBlock = await processImageBlock(block, actions, getCache, createNodeId, reporter);
24
+ const updatedBlock = await processImageBlock(block, actions, getCache, createNodeId, reporter, cache);
25
25
  if (!thumbnail && updatedBlock?.image?.fileId) {
26
26
  thumbnail = updatedBlock.image.fileId;
27
27
  }
@@ -58,7 +58,7 @@ const extractPlainText = (block) => {
58
58
  const isImageBlock = (block) => {
59
59
  return block.type === "image" && "image" in block;
60
60
  };
61
- const processImageBlock = async (block, actions, getCache, createNodeId, reporter) => {
61
+ const processImageBlock = async (block, actions, getCache, createNodeId, reporter, cache) => {
62
62
  const { createNode } = actions;
63
63
  if (block.type === "image" && "image" in block) {
64
64
  const imageSourceType = block.image.type;
@@ -67,6 +67,19 @@ const processImageBlock = async (block, actions, getCache, createNodeId, reporte
67
67
  : block.image?.file?.url;
68
68
  if (!imageUrl)
69
69
  return null;
70
+ const cacheKey = `${imageUrl}-post-image`;
71
+ const cachedFileNodeId = await cache.get(cacheKey);
72
+ if (cachedFileNodeId) {
73
+ reporter.info(`[CACHE HIT] Image already processed: ${imageUrl}`);
74
+ const updatedBlock = {
75
+ ...block,
76
+ image: {
77
+ fileId: cachedFileNodeId,
78
+ caption: block.image.caption,
79
+ },
80
+ };
81
+ return updatedBlock;
82
+ }
70
83
  try {
71
84
  const fileNode = await (0, gatsby_source_filesystem_1.createRemoteFileNode)({
72
85
  url: imageUrl,
@@ -84,6 +97,7 @@ const processImageBlock = async (block, actions, getCache, createNodeId, reporte
84
97
  },
85
98
  };
86
99
  reporter.info(`[SUCCESS] Image processed: ${fileNode.id}`);
100
+ await cache.set(cacheKey, fileNode.id);
87
101
  return updatedBlock;
88
102
  }
89
103
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "gatsby-source-notion-churnotion",
3
3
  "description": "Gatsby plugin that can connect with One Notion Database RECURSIVELY using official API",
4
- "version": "1.0.74",
4
+ "version": "1.0.75",
5
5
  "skipLibCheck": true,
6
6
  "license": "0BSD",
7
7
  "main": "./dist/gatsby-node.js",
@@ -41,7 +41,6 @@
41
41
  "gatsby-source-filesystem": "^5.14.0",
42
42
  "gatsby-transformer-json": "^5.14.0",
43
43
  "gatsby-transformer-sharp": "^5.14.0",
44
- "mecab-ya": "^0.1.1",
45
44
  "metascraper": "^5.45.25",
46
45
  "metascraper-description": "^5.45.25",
47
46
  "metascraper-image": "^5.45.27",