astroplugin-logseq 0.0.2 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -33,9 +33,22 @@ __export(index_exports, {
33
33
  default: () => logseqIntegration
34
34
  });
35
35
  module.exports = __toCommonJS(index_exports);
36
- var import_date_fns = require("date-fns");
37
36
  var import_wretch = __toESM(require("wretch"), 1);
38
37
 
38
+ // src/utils/has-content-changed.ts
39
+ var import_promises = __toESM(require("fs/promises"), 1);
40
+ var hasContentChanged = async (path2, newContent) => {
41
+ try {
42
+ const currentContent = await import_promises.default.readFile(path2, "utf-8");
43
+ return currentContent !== newContent;
44
+ } catch {
45
+ return true;
46
+ }
47
+ };
48
+
49
+ // src/utils/process-tag-group.ts
50
+ var import_date_fns = require("date-fns");
51
+
39
52
  // src/api/get-page-blocks-tree.ts
40
53
  var getPageBlocksTree = async (api, item, logger) => {
41
54
  try {
@@ -74,15 +87,23 @@ var getRawResponse = async (api, tag, logger) => {
74
87
  }
75
88
  };
76
89
 
77
- // src/utils/has-content-changed.ts
78
- var import_promises = __toESM(require("fs/promises"), 1);
79
- var hasContentChanged = async (path2, newContent) => {
80
- try {
81
- const currentContent = await import_promises.default.readFile(path2, "utf-8");
82
- return currentContent !== newContent;
83
- } catch {
84
- return true;
90
+ // src/utils/process-tag-group.ts
91
+ var processTagGroup = async (api, target, logger) => {
92
+ const { tag, directory } = target;
93
+ const rawResponse = await getRawResponse(api, tag, logger);
94
+ if (!rawResponse || rawResponse.length === 0) return;
95
+ const mappedResponse = [];
96
+ for (const item of rawResponse.flat()) {
97
+ const pbt = await getPageBlocksTree(api, item, logger);
98
+ if (!pbt) continue;
99
+ mappedResponse.push({
100
+ createdAt: (0, import_date_fns.format)(item["created-at"], "yyyy-MM-dd"),
101
+ updatedAt: (0, import_date_fns.format)(item["updated-at"], "yyyy-MM-dd"),
102
+ pageTitle: item.title,
103
+ content: recursivelyGetContent(pbt)
104
+ });
85
105
  }
106
+ await writeToMd(directory, mappedResponse, logger);
86
107
  };
87
108
 
88
109
  // src/utils/recursively-get-content.ts
@@ -142,10 +163,9 @@ ${page.content}`;
142
163
  function logseqIntegration(options) {
143
164
  const {
144
165
  token,
166
+ targets,
145
167
  apiUrl = "http://127.0.0.1:12315/api",
146
- pollingInterval = 1e3,
147
- directory = "src/content/docs/blog",
148
- tag = "public"
168
+ pollingInterval = 1e3
149
169
  } = options;
150
170
  return {
151
171
  name: "astro-logseq-publish",
@@ -158,22 +178,11 @@ function logseqIntegration(options) {
158
178
  });
159
179
  setInterval(async () => {
160
180
  try {
161
- const rawResponse = await getRawResponse(api, tag, logger);
162
- if (!rawResponse) return;
163
- const mappedResponse = [];
164
- for (const item of rawResponse.flat()) {
165
- const pbt = await getPageBlocksTree(api, item, logger);
166
- if (!pbt) continue;
167
- mappedResponse.push({
168
- createdAt: (0, import_date_fns.format)(item["created-at"], "yyyy-MM-dd"),
169
- updatedAt: (0, import_date_fns.format)(item["updated-at"], "yyyy-MM-dd"),
170
- pageTitle: item.title,
171
- content: recursivelyGetContent(pbt)
172
- });
173
- }
174
- await writeToMd(directory, mappedResponse, logger);
181
+ await Promise.all(
182
+ targets.map((target) => processTagGroup(api, target, logger))
183
+ );
175
184
  } catch (e) {
176
- logger.info(e.message || String(e));
185
+ logger.error(e.message || String(e));
177
186
  }
178
187
  }, pollingInterval);
179
188
  },
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/api/get-page-blocks-tree.ts","../src/api/get-raw-response.ts","../src/utils/has-content-changed.ts","../src/utils/recursively-get-content.ts","../src/utils/write-to-md.ts","../src/utils/get-clean-slug.ts"],"sourcesContent":["import { AstroIntegration } from 'astro'\nimport { format } from 'date-fns'\nimport wretch from 'wretch'\n\nimport { getPageBlocksTree, getRawResponse } from './api'\nimport { LogseqIntegrationOptions, MappedResponse } from './types'\nimport { recursivelyGetContent, writeToMd } from './utils'\n\nexport default function logseqIntegration(\n options: LogseqIntegrationOptions,\n): AstroIntegration {\n const {\n token,\n apiUrl = 'http://127.0.0.1:12315/api',\n pollingInterval = 1000,\n directory = 'src/content/docs/blog',\n tag = 'public',\n } = options\n\n return {\n name: 'astro-logseq-publish',\n hooks: {\n 'astro:server:setup': ({ logger }) => {\n logger.info('🚀 Logseq Poller Started (Every 3s)')\n\n const api = wretch()\n .url(apiUrl)\n .headers({\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${token}`,\n })\n\n setInterval(async () => {\n try {\n const rawResponse = await getRawResponse(api, tag, logger)\n if (!rawResponse) return\n\n const mappedResponse: MappedResponse[] = []\n for (const item of rawResponse.flat()) {\n const pbt = await getPageBlocksTree(api, item, logger)\n if (!pbt) continue\n\n mappedResponse.push({\n createdAt: format(item['created-at'], 'yyyy-MM-dd'),\n updatedAt: format(item['updated-at'], 'yyyy-MM-dd'),\n pageTitle: item.title,\n content: recursivelyGetContent(pbt),\n })\n }\n await writeToMd(directory, mappedResponse, logger)\n } catch (e: any) {\n logger.info(e.message || String(e))\n }\n }, pollingInterval)\n },\n 'astro:build:setup': async ({ logger }) => {\n logger.info('Building from Logseq...')\n },\n },\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { ContentBlock, LogseqResponse } from '../types'\n\nexport const getPageBlocksTree = async (\n api: Wretch,\n item: LogseqResponse,\n logger: AstroIntegrationLogger,\n) => {\n try {\n return await api\n .post({\n method: 'logseq.Editor.getPageBlocksTree',\n args: [item.title.toLowerCase()],\n })\n .json<ContentBlock[]>()\n } catch (e) {\n logger.info(`Unable to get page blocks tree: ${String(e)}`)\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { LogseqResponse } from '../types'\n\nexport const getRawResponse = async (\n api: Wretch,\n tag: string,\n logger: AstroIntegrationLogger,\n) => {\n const query = `\n [:find (pull ?p\n [:block/name\n :block/full-title\n :block/created-at\n :block/updated-at\n :block/title\n {:block/_parent ...}])\n :where\n [?p :block/name]\n [?p :block/tags ?t]\n [?t :block/name \"${tag}\"]]`\n\n try {\n return (\n (await api\n .post({\n method: 'logseq.DB.datascriptQuery',\n args: [query],\n })\n .json<LogseqResponse[][]>()) ?? []\n )\n } catch (e) {\n logger.info(\n `Unable to query Logseq. Check if API server is running. ${String(e)}`,\n )\n }\n}\n","import fs from \"node:fs/promises\";\n\nexport const hasContentChanged = async (path: string, newContent: string) => {\n try {\n const currentContent = await fs.readFile(path, \"utf-8\");\n return currentContent !== newContent;\n } catch {\n return true;\n }\n};\n","import { ContentBlock } from \"../types\";\n\nexport const recursivelyGetContent = (\n contentBlocks: ContentBlock[],\n depth = 0,\n) => {\n let content = \"\";\n const indent = \" \".repeat(depth);\n for (const block of contentBlocks) {\n const text = block.title || \"\";\n if (depth === 0) {\n content += `\\n\\n${text}`;\n } else {\n content += `\\n${indent}- ${text}`;\n }\n if (block.children && block.children.length > 0) {\n content += recursivelyGetContent(block.children, depth + 1);\n }\n }\n return content;\n};\n","import fs from 'node:fs/promises'\nimport path from 'node:path'\n\nimport { AstroIntegrationLogger } from 'astro'\n\nimport { MappedResponse } from '../types'\nimport { hasContentChanged } from '.'\nimport { getCleanSlug } from './get-clean-slug'\n\nexport const writeToMd = async (\n directory: string,\n mappedResponse: MappedResponse[],\n logger: AstroIntegrationLogger,\n) => {\n const targetDir = path.resolve(process.cwd(), directory)\n\n try {\n await fs.mkdir(targetDir, { recursive: true })\n await Promise.all(\n mappedResponse.map(async (page) => {\n const cleanSlug = getCleanSlug(page)\n const filePath = path.join(targetDir, `${cleanSlug}.md`)\n const fileContent = `---\ntitle: ${page.pageTitle}\ndate: ${page.createdAt}\n---\n${page.content}`\n const contentToSave = fileContent.trim()\n if (await hasContentChanged(filePath, contentToSave)) {\n await fs.writeFile(filePath, contentToSave, 'utf-8')\n }\n }),\n )\n } catch (e) {\n logger.info(`Unable to create MD files: ${String(e)}`)\n }\n}\n","import { MappedResponse } from '../types'\n\nexport const getCleanSlug = (page: MappedResponse) =>\n page.pageTitle\n .toLowerCase()\n .replace(/[^a-z0-9]+/g, '-')\n .replace(/(^-|-$)/g, '')\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,sBAAuB;AACvB,oBAAmB;;;ACGZ,IAAM,oBAAoB,OAC/B,KACA,MACA,WACG;AACH,MAAI;AACF,WAAO,MAAM,IACV,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK,MAAM,YAAY,CAAC;AAAA,IACjC,CAAC,EACA,KAAqB;AAAA,EAC1B,SAAS,GAAG;AACV,WAAO,KAAK,mCAAmC,OAAO,CAAC,CAAC,EAAE;AAAA,EAC5D;AACF;;;ACfO,IAAM,iBAAiB,OAC5B,KACA,KACA,WACG;AACH,QAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAWoB,GAAG;AAErC,MAAI;AACF,WACG,MAAM,IACJ,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK;AAAA,IACd,CAAC,EACA,KAAyB,KAAM,CAAC;AAAA,EAEvC,SAAS,GAAG;AACV,WAAO;AAAA,MACL,2DAA2D,OAAO,CAAC,CAAC;AAAA,IACtE;AAAA,EACF;AACF;;;ACrCA,sBAAe;AAER,IAAM,oBAAoB,OAAOA,OAAc,eAAuB;AAC3E,MAAI;AACF,UAAM,iBAAiB,MAAM,gBAAAC,QAAG,SAASD,OAAM,OAAO;AACtD,WAAO,mBAAmB;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ACPO,IAAM,wBAAwB,CACnC,eACA,QAAQ,MACL;AACH,MAAI,UAAU;AACd,QAAM,SAAS,KAAK,OAAO,KAAK;AAChC,aAAW,SAAS,eAAe;AACjC,UAAM,OAAO,MAAM,SAAS;AAC5B,QAAI,UAAU,GAAG;AACf,iBAAW;AAAA;AAAA,EAAO,IAAI;AAAA,IACxB,OAAO;AACL,iBAAW;AAAA,EAAK,MAAM,KAAK,IAAI;AAAA,IACjC;AACA,QAAI,MAAM,YAAY,MAAM,SAAS,SAAS,GAAG;AAC/C,iBAAW,sBAAsB,MAAM,UAAU,QAAQ,CAAC;AAAA,IAC5D;AAAA,EACF;AACA,SAAO;AACT;;;ACpBA,IAAAE,mBAAe;AACf,uBAAiB;;;ACCV,IAAM,eAAe,CAAC,SAC3B,KAAK,UACF,YAAY,EACZ,QAAQ,eAAe,GAAG,EAC1B,QAAQ,YAAY,EAAE;;;ADGpB,IAAM,YAAY,OACvB,WACA,gBACA,WACG;AACH,QAAM,YAAY,iBAAAC,QAAK,QAAQ,QAAQ,IAAI,GAAG,SAAS;AAEvD,MAAI;AACF,UAAM,iBAAAC,QAAG,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAC7C,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,OAAO,SAAS;AACjC,cAAM,YAAY,aAAa,IAAI;AACnC,cAAM,WAAW,iBAAAD,QAAK,KAAK,WAAW,GAAG,SAAS,KAAK;AACvD,cAAM,cAAc;AAAA,SACnB,KAAK,SAAS;AAAA,QACf,KAAK,SAAS;AAAA;AAAA,EAEpB,KAAK,OAAO;AACN,cAAM,gBAAgB,YAAY,KAAK;AACvC,YAAI,MAAM,kBAAkB,UAAU,aAAa,GAAG;AACpD,gBAAM,iBAAAC,QAAG,UAAU,UAAU,eAAe,OAAO;AAAA,QACrD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,SAAS,GAAG;AACV,WAAO,KAAK,8BAA8B,OAAO,CAAC,CAAC,EAAE;AAAA,EACvD;AACF;;;AL5Be,SAAR,kBACL,SACkB;AAClB,QAAM;AAAA,IACJ;AAAA,IACA,SAAS;AAAA,IACT,kBAAkB;AAAA,IAClB,YAAY;AAAA,IACZ,MAAM;AAAA,EACR,IAAI;AAEJ,SAAO;AAAA,IACL,MAAM;AAAA,IACN,OAAO;AAAA,MACL,sBAAsB,CAAC,EAAE,OAAO,MAAM;AACpC,eAAO,KAAK,4CAAqC;AAEjD,cAAM,UAAM,cAAAC,SAAO,EAChB,IAAI,MAAM,EACV,QAAQ;AAAA,UACP,gBAAgB;AAAA,UAChB,eAAe,UAAU,KAAK;AAAA,QAChC,CAAC;AAEH,oBAAY,YAAY;AACtB,cAAI;AACF,kBAAM,cAAc,MAAM,eAAe,KAAK,KAAK,MAAM;AACzD,gBAAI,CAAC,YAAa;AAElB,kBAAM,iBAAmC,CAAC;AAC1C,uBAAW,QAAQ,YAAY,KAAK,GAAG;AACrC,oBAAM,MAAM,MAAM,kBAAkB,KAAK,MAAM,MAAM;AACrD,kBAAI,CAAC,IAAK;AAEV,6BAAe,KAAK;AAAA,gBAClB,eAAW,wBAAO,KAAK,YAAY,GAAG,YAAY;AAAA,gBAClD,eAAW,wBAAO,KAAK,YAAY,GAAG,YAAY;AAAA,gBAClD,WAAW,KAAK;AAAA,gBAChB,SAAS,sBAAsB,GAAG;AAAA,cACpC,CAAC;AAAA,YACH;AACA,kBAAM,UAAU,WAAW,gBAAgB,MAAM;AAAA,UACnD,SAAS,GAAQ;AACf,mBAAO,KAAK,EAAE,WAAW,OAAO,CAAC,CAAC;AAAA,UACpC;AAAA,QACF,GAAG,eAAe;AAAA,MACpB;AAAA,MACA,qBAAqB,OAAO,EAAE,OAAO,MAAM;AACzC,eAAO,KAAK,yBAAyB;AAAA,MACvC;AAAA,IACF;AAAA,EACF;AACF;","names":["path","fs","import_promises","path","fs","wretch"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/utils/has-content-changed.ts","../src/utils/process-tag-group.ts","../src/api/get-page-blocks-tree.ts","../src/api/get-raw-response.ts","../src/utils/recursively-get-content.ts","../src/utils/write-to-md.ts","../src/utils/get-clean-slug.ts"],"sourcesContent":["import { AstroIntegration } from 'astro'\nimport wretch from 'wretch'\n\nimport { LogseqIntegrationOptions } from './types'\nimport { processTagGroup } from './utils'\n\nexport default function logseqIntegration(\n options: LogseqIntegrationOptions,\n): AstroIntegration {\n const {\n token,\n targets,\n apiUrl = 'http://127.0.0.1:12315/api',\n pollingInterval = 1000,\n } = options\n\n return {\n name: 'astro-logseq-publish',\n hooks: {\n 'astro:server:setup': ({ logger }) => {\n logger.info('🚀 Logseq Poller Started (Every 3s)')\n\n const api = wretch()\n .url(apiUrl)\n .headers({\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${token}`,\n })\n\n setInterval(async () => {\n try {\n await Promise.all(\n targets.map((target) => processTagGroup(api, target, logger)),\n )\n } catch (e: any) {\n logger.error(e.message || String(e))\n }\n }, pollingInterval)\n },\n 'astro:build:setup': async ({ logger }) => {\n logger.info('Building from Logseq...')\n },\n },\n }\n}\n","import fs from \"node:fs/promises\";\n\nexport const hasContentChanged = async (path: string, newContent: string) => {\n try {\n const currentContent = await fs.readFile(path, \"utf-8\");\n return currentContent !== newContent;\n } catch {\n return true;\n }\n};\n","import { format } from 'date-fns'\nimport { getPageBlocksTree, getRawResponse } from 'src/api'\nimport { MappedResponse, TagTarget } from 'src/types'\nimport { Wretch } from 'wretch/types'\n\nimport { recursivelyGetContent, writeToMd } from '.'\n\nexport const processTagGroup = async (\n api: Wretch,\n target: TagTarget,\n logger: any,\n) => {\n const { tag, directory } = target\n\n const rawResponse = await getRawResponse(api, tag, logger)\n if (!rawResponse || rawResponse.length === 0) return\n\n const mappedResponse: MappedResponse[] = []\n\n for (const item of rawResponse.flat()) {\n const pbt = await getPageBlocksTree(api, item, logger)\n if (!pbt) continue\n\n mappedResponse.push({\n createdAt: format(item['created-at'], 'yyyy-MM-dd'),\n updatedAt: format(item['updated-at'], 'yyyy-MM-dd'),\n pageTitle: item.title,\n content: recursivelyGetContent(pbt),\n })\n }\n\n await writeToMd(directory, mappedResponse, logger)\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { ContentBlock, LogseqResponse } from '../types'\n\nexport const getPageBlocksTree = async (\n api: Wretch,\n item: LogseqResponse,\n logger: AstroIntegrationLogger,\n) => {\n try {\n return await api\n .post({\n method: 'logseq.Editor.getPageBlocksTree',\n args: [item.title.toLowerCase()],\n })\n .json<ContentBlock[]>()\n } catch (e) {\n logger.info(`Unable to get page blocks tree: ${String(e)}`)\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { LogseqResponse } from '../types'\n\nexport const getRawResponse = async (\n api: Wretch,\n tag: string,\n logger: AstroIntegrationLogger,\n) => {\n const query = `\n [:find (pull ?p\n [:block/name\n :block/full-title\n :block/created-at\n :block/updated-at\n :block/title\n {:block/_parent ...}])\n :where\n [?p :block/name]\n [?p :block/tags ?t]\n [?t :block/name \"${tag}\"]]`\n\n try {\n return (\n (await api\n .post({\n method: 'logseq.DB.datascriptQuery',\n args: [query],\n })\n .json<LogseqResponse[][]>()) ?? []\n )\n } catch (e) {\n logger.info(\n `Unable to query Logseq. Check if API server is running. ${String(e)}`,\n )\n }\n}\n","import { ContentBlock } from \"../types\";\n\nexport const recursivelyGetContent = (\n contentBlocks: ContentBlock[],\n depth = 0,\n) => {\n let content = \"\";\n const indent = \" \".repeat(depth);\n for (const block of contentBlocks) {\n const text = block.title || \"\";\n if (depth === 0) {\n content += `\\n\\n${text}`;\n } else {\n content += `\\n${indent}- ${text}`;\n }\n if (block.children && block.children.length > 0) {\n content += recursivelyGetContent(block.children, depth + 1);\n }\n }\n return content;\n};\n","import fs from 'node:fs/promises'\nimport path from 'node:path'\n\nimport { AstroIntegrationLogger } from 'astro'\n\nimport { MappedResponse } from '../types'\nimport { hasContentChanged } from '.'\nimport { getCleanSlug } from './get-clean-slug'\n\nexport const writeToMd = async (\n directory: string,\n mappedResponse: MappedResponse[],\n logger: AstroIntegrationLogger,\n) => {\n const targetDir = path.resolve(process.cwd(), directory)\n\n try {\n await fs.mkdir(targetDir, { recursive: true })\n await Promise.all(\n mappedResponse.map(async (page) => {\n const cleanSlug = getCleanSlug(page)\n const filePath = path.join(targetDir, `${cleanSlug}.md`)\n const fileContent = `---\ntitle: ${page.pageTitle}\ndate: ${page.createdAt}\n---\n${page.content}`\n const contentToSave = fileContent.trim()\n if (await hasContentChanged(filePath, contentToSave)) {\n await fs.writeFile(filePath, contentToSave, 'utf-8')\n }\n }),\n )\n } catch (e) {\n logger.info(`Unable to create MD files: ${String(e)}`)\n }\n}\n","import { MappedResponse } from '../types'\n\nexport const getCleanSlug = (page: MappedResponse) =>\n page.pageTitle\n .toLowerCase()\n .replace(/[^a-z0-9]+/g, '-')\n .replace(/(^-|-$)/g, '')\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAAmB;;;ACDnB,sBAAe;AAER,IAAM,oBAAoB,OAAOA,OAAc,eAAuB;AAC3E,MAAI;AACF,UAAM,iBAAiB,MAAM,gBAAAC,QAAG,SAASD,OAAM,OAAO;AACtD,WAAO,mBAAmB;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ACTA,sBAAuB;;;ACKhB,IAAM,oBAAoB,OAC/B,KACA,MACA,WACG;AACH,MAAI;AACF,WAAO,MAAM,IACV,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK,MAAM,YAAY,CAAC;AAAA,IACjC,CAAC,EACA,KAAqB;AAAA,EAC1B,SAAS,GAAG;AACV,WAAO,KAAK,mCAAmC,OAAO,CAAC,CAAC,EAAE;AAAA,EAC5D;AACF;;;ACfO,IAAM,iBAAiB,OAC5B,KACA,KACA,WACG;AACH,QAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAWoB,GAAG;AAErC,MAAI;AACF,WACG,MAAM,IACJ,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK;AAAA,IACd,CAAC,EACA,KAAyB,KAAM,CAAC;AAAA,EAEvC,SAAS,GAAG;AACV,WAAO;AAAA,MACL,2DAA2D,OAAO,CAAC,CAAC;AAAA,IACtE;AAAA,EACF;AACF;;;AF9BO,IAAM,kBAAkB,OAC7B,KACA,QACA,WACG;AACH,QAAM,EAAE,KAAK,UAAU,IAAI;AAE3B,QAAM,cAAc,MAAM,eAAe,KAAK,KAAK,MAAM;AACzD,MAAI,CAAC,eAAe,YAAY,WAAW,EAAG;AAE9C,QAAM,iBAAmC,CAAC;AAE1C,aAAW,QAAQ,YAAY,KAAK,GAAG;AACrC,UAAM,MAAM,MAAM,kBAAkB,KAAK,MAAM,MAAM;AACrD,QAAI,CAAC,IAAK;AAEV,mBAAe,KAAK;AAAA,MAClB,eAAW,wBAAO,KAAK,YAAY,GAAG,YAAY;AAAA,MAClD,eAAW,wBAAO,KAAK,YAAY,GAAG,YAAY;AAAA,MAClD,WAAW,KAAK;AAAA,MAChB,SAAS,sBAAsB,GAAG;AAAA,IACpC,CAAC;AAAA,EACH;AAEA,QAAM,UAAU,WAAW,gBAAgB,MAAM;AACnD;;;AG9BO,IAAM,wBAAwB,CACnC,eACA,QAAQ,MACL;AACH,MAAI,UAAU;AACd,QAAM,SAAS,KAAK,OAAO,KAAK;AAChC,aAAW,SAAS,eAAe;AACjC,UAAM,OAAO,MAAM,SAAS;AAC5B,QAAI,UAAU,GAAG;AACf,iBAAW;AAAA;AAAA,EAAO,IAAI;AAAA,IACxB,OAAO;AACL,iBAAW;AAAA,EAAK,MAAM,KAAK,IAAI;AAAA,IACjC;AACA,QAAI,MAAM,YAAY,MAAM,SAAS,SAAS,GAAG;AAC/C,iBAAW,sBAAsB,MAAM,UAAU,QAAQ,CAAC;AAAA,IAC5D;AAAA,EACF;AACA,SAAO;AACT;;;ACpBA,IAAAE,mBAAe;AACf,uBAAiB;;;ACCV,IAAM,eAAe,CAAC,SAC3B,KAAK,UACF,YAAY,EACZ,QAAQ,eAAe,GAAG,EAC1B,QAAQ,YAAY,EAAE;;;ADGpB,IAAM,YAAY,OACvB,WACA,gBACA,WACG;AACH,QAAM,YAAY,iBAAAC,QAAK,QAAQ,QAAQ,IAAI,GAAG,SAAS;AAEvD,MAAI;AACF,UAAM,iBAAAC,QAAG,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAC7C,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,OAAO,SAAS;AACjC,cAAM,YAAY,aAAa,IAAI;AACnC,cAAM,WAAW,iBAAAD,QAAK,KAAK,WAAW,GAAG,SAAS,KAAK;AACvD,cAAM,cAAc;AAAA,SACnB,KAAK,SAAS;AAAA,QACf,KAAK,SAAS;AAAA;AAAA,EAEpB,KAAK,OAAO;AACN,cAAM,gBAAgB,YAAY,KAAK;AACvC,YAAI,MAAM,kBAAkB,UAAU,aAAa,GAAG;AACpD,gBAAM,iBAAAC,QAAG,UAAU,UAAU,eAAe,OAAO;AAAA,QACrD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,SAAS,GAAG;AACV,WAAO,KAAK,8BAA8B,OAAO,CAAC,CAAC,EAAE;AAAA,EACvD;AACF;;;AN9Be,SAAR,kBACL,SACkB;AAClB,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,SAAS;AAAA,IACT,kBAAkB;AAAA,EACpB,IAAI;AAEJ,SAAO;AAAA,IACL,MAAM;AAAA,IACN,OAAO;AAAA,MACL,sBAAsB,CAAC,EAAE,OAAO,MAAM;AACpC,eAAO,KAAK,4CAAqC;AAEjD,cAAM,UAAM,cAAAC,SAAO,EAChB,IAAI,MAAM,EACV,QAAQ;AAAA,UACP,gBAAgB;AAAA,UAChB,eAAe,UAAU,KAAK;AAAA,QAChC,CAAC;AAEH,oBAAY,YAAY;AACtB,cAAI;AACF,kBAAM,QAAQ;AAAA,cACZ,QAAQ,IAAI,CAAC,WAAW,gBAAgB,KAAK,QAAQ,MAAM,CAAC;AAAA,YAC9D;AAAA,UACF,SAAS,GAAQ;AACf,mBAAO,MAAM,EAAE,WAAW,OAAO,CAAC,CAAC;AAAA,UACrC;AAAA,QACF,GAAG,eAAe;AAAA,MACpB;AAAA,MACA,qBAAqB,OAAO,EAAE,OAAO,MAAM;AACzC,eAAO,KAAK,yBAAyB;AAAA,MACvC;AAAA,IACF;AAAA,EACF;AACF;","names":["path","fs","import_promises","path","fs","wretch"]}
package/dist/index.d.cts CHANGED
@@ -1,11 +1,14 @@
1
1
  import { AstroIntegration } from 'astro';
2
2
 
3
+ interface TagTarget {
4
+ tag: string;
5
+ directory: string;
6
+ }
3
7
  interface LogseqIntegrationOptions {
4
8
  token: string;
9
+ targets: TagTarget[];
5
10
  apiUrl?: string;
6
11
  pollingInterval?: number;
7
- directory?: string;
8
- tag: string;
9
12
  }
10
13
 
11
14
  declare function logseqIntegration(options: LogseqIntegrationOptions): AstroIntegration;
package/dist/index.d.ts CHANGED
@@ -1,11 +1,14 @@
1
1
  import { AstroIntegration } from 'astro';
2
2
 
3
+ interface TagTarget {
4
+ tag: string;
5
+ directory: string;
6
+ }
3
7
  interface LogseqIntegrationOptions {
4
8
  token: string;
9
+ targets: TagTarget[];
5
10
  apiUrl?: string;
6
11
  pollingInterval?: number;
7
- directory?: string;
8
- tag: string;
9
12
  }
10
13
 
11
14
  declare function logseqIntegration(options: LogseqIntegrationOptions): AstroIntegration;
package/dist/index.js CHANGED
@@ -1,7 +1,20 @@
1
1
  // src/index.ts
2
- import { format } from "date-fns";
3
2
  import wretch from "wretch";
4
3
 
4
+ // src/utils/has-content-changed.ts
5
+ import fs from "fs/promises";
6
+ var hasContentChanged = async (path2, newContent) => {
7
+ try {
8
+ const currentContent = await fs.readFile(path2, "utf-8");
9
+ return currentContent !== newContent;
10
+ } catch {
11
+ return true;
12
+ }
13
+ };
14
+
15
+ // src/utils/process-tag-group.ts
16
+ import { format } from "date-fns";
17
+
5
18
  // src/api/get-page-blocks-tree.ts
6
19
  var getPageBlocksTree = async (api, item, logger) => {
7
20
  try {
@@ -40,15 +53,23 @@ var getRawResponse = async (api, tag, logger) => {
40
53
  }
41
54
  };
42
55
 
43
- // src/utils/has-content-changed.ts
44
- import fs from "fs/promises";
45
- var hasContentChanged = async (path2, newContent) => {
46
- try {
47
- const currentContent = await fs.readFile(path2, "utf-8");
48
- return currentContent !== newContent;
49
- } catch {
50
- return true;
56
+ // src/utils/process-tag-group.ts
57
+ var processTagGroup = async (api, target, logger) => {
58
+ const { tag, directory } = target;
59
+ const rawResponse = await getRawResponse(api, tag, logger);
60
+ if (!rawResponse || rawResponse.length === 0) return;
61
+ const mappedResponse = [];
62
+ for (const item of rawResponse.flat()) {
63
+ const pbt = await getPageBlocksTree(api, item, logger);
64
+ if (!pbt) continue;
65
+ mappedResponse.push({
66
+ createdAt: format(item["created-at"], "yyyy-MM-dd"),
67
+ updatedAt: format(item["updated-at"], "yyyy-MM-dd"),
68
+ pageTitle: item.title,
69
+ content: recursivelyGetContent(pbt)
70
+ });
51
71
  }
72
+ await writeToMd(directory, mappedResponse, logger);
52
73
  };
53
74
 
54
75
  // src/utils/recursively-get-content.ts
@@ -108,10 +129,9 @@ ${page.content}`;
108
129
  function logseqIntegration(options) {
109
130
  const {
110
131
  token,
132
+ targets,
111
133
  apiUrl = "http://127.0.0.1:12315/api",
112
- pollingInterval = 1e3,
113
- directory = "src/content/docs/blog",
114
- tag = "public"
134
+ pollingInterval = 1e3
115
135
  } = options;
116
136
  return {
117
137
  name: "astro-logseq-publish",
@@ -124,22 +144,11 @@ function logseqIntegration(options) {
124
144
  });
125
145
  setInterval(async () => {
126
146
  try {
127
- const rawResponse = await getRawResponse(api, tag, logger);
128
- if (!rawResponse) return;
129
- const mappedResponse = [];
130
- for (const item of rawResponse.flat()) {
131
- const pbt = await getPageBlocksTree(api, item, logger);
132
- if (!pbt) continue;
133
- mappedResponse.push({
134
- createdAt: format(item["created-at"], "yyyy-MM-dd"),
135
- updatedAt: format(item["updated-at"], "yyyy-MM-dd"),
136
- pageTitle: item.title,
137
- content: recursivelyGetContent(pbt)
138
- });
139
- }
140
- await writeToMd(directory, mappedResponse, logger);
147
+ await Promise.all(
148
+ targets.map((target) => processTagGroup(api, target, logger))
149
+ );
141
150
  } catch (e) {
142
- logger.info(e.message || String(e));
151
+ logger.error(e.message || String(e));
143
152
  }
144
153
  }, pollingInterval);
145
154
  },
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/api/get-page-blocks-tree.ts","../src/api/get-raw-response.ts","../src/utils/has-content-changed.ts","../src/utils/recursively-get-content.ts","../src/utils/write-to-md.ts","../src/utils/get-clean-slug.ts"],"sourcesContent":["import { AstroIntegration } from 'astro'\nimport { format } from 'date-fns'\nimport wretch from 'wretch'\n\nimport { getPageBlocksTree, getRawResponse } from './api'\nimport { LogseqIntegrationOptions, MappedResponse } from './types'\nimport { recursivelyGetContent, writeToMd } from './utils'\n\nexport default function logseqIntegration(\n options: LogseqIntegrationOptions,\n): AstroIntegration {\n const {\n token,\n apiUrl = 'http://127.0.0.1:12315/api',\n pollingInterval = 1000,\n directory = 'src/content/docs/blog',\n tag = 'public',\n } = options\n\n return {\n name: 'astro-logseq-publish',\n hooks: {\n 'astro:server:setup': ({ logger }) => {\n logger.info('🚀 Logseq Poller Started (Every 3s)')\n\n const api = wretch()\n .url(apiUrl)\n .headers({\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${token}`,\n })\n\n setInterval(async () => {\n try {\n const rawResponse = await getRawResponse(api, tag, logger)\n if (!rawResponse) return\n\n const mappedResponse: MappedResponse[] = []\n for (const item of rawResponse.flat()) {\n const pbt = await getPageBlocksTree(api, item, logger)\n if (!pbt) continue\n\n mappedResponse.push({\n createdAt: format(item['created-at'], 'yyyy-MM-dd'),\n updatedAt: format(item['updated-at'], 'yyyy-MM-dd'),\n pageTitle: item.title,\n content: recursivelyGetContent(pbt),\n })\n }\n await writeToMd(directory, mappedResponse, logger)\n } catch (e: any) {\n logger.info(e.message || String(e))\n }\n }, pollingInterval)\n },\n 'astro:build:setup': async ({ logger }) => {\n logger.info('Building from Logseq...')\n },\n },\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { ContentBlock, LogseqResponse } from '../types'\n\nexport const getPageBlocksTree = async (\n api: Wretch,\n item: LogseqResponse,\n logger: AstroIntegrationLogger,\n) => {\n try {\n return await api\n .post({\n method: 'logseq.Editor.getPageBlocksTree',\n args: [item.title.toLowerCase()],\n })\n .json<ContentBlock[]>()\n } catch (e) {\n logger.info(`Unable to get page blocks tree: ${String(e)}`)\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { LogseqResponse } from '../types'\n\nexport const getRawResponse = async (\n api: Wretch,\n tag: string,\n logger: AstroIntegrationLogger,\n) => {\n const query = `\n [:find (pull ?p\n [:block/name\n :block/full-title\n :block/created-at\n :block/updated-at\n :block/title\n {:block/_parent ...}])\n :where\n [?p :block/name]\n [?p :block/tags ?t]\n [?t :block/name \"${tag}\"]]`\n\n try {\n return (\n (await api\n .post({\n method: 'logseq.DB.datascriptQuery',\n args: [query],\n })\n .json<LogseqResponse[][]>()) ?? []\n )\n } catch (e) {\n logger.info(\n `Unable to query Logseq. Check if API server is running. ${String(e)}`,\n )\n }\n}\n","import fs from \"node:fs/promises\";\n\nexport const hasContentChanged = async (path: string, newContent: string) => {\n try {\n const currentContent = await fs.readFile(path, \"utf-8\");\n return currentContent !== newContent;\n } catch {\n return true;\n }\n};\n","import { ContentBlock } from \"../types\";\n\nexport const recursivelyGetContent = (\n contentBlocks: ContentBlock[],\n depth = 0,\n) => {\n let content = \"\";\n const indent = \" \".repeat(depth);\n for (const block of contentBlocks) {\n const text = block.title || \"\";\n if (depth === 0) {\n content += `\\n\\n${text}`;\n } else {\n content += `\\n${indent}- ${text}`;\n }\n if (block.children && block.children.length > 0) {\n content += recursivelyGetContent(block.children, depth + 1);\n }\n }\n return content;\n};\n","import fs from 'node:fs/promises'\nimport path from 'node:path'\n\nimport { AstroIntegrationLogger } from 'astro'\n\nimport { MappedResponse } from '../types'\nimport { hasContentChanged } from '.'\nimport { getCleanSlug } from './get-clean-slug'\n\nexport const writeToMd = async (\n directory: string,\n mappedResponse: MappedResponse[],\n logger: AstroIntegrationLogger,\n) => {\n const targetDir = path.resolve(process.cwd(), directory)\n\n try {\n await fs.mkdir(targetDir, { recursive: true })\n await Promise.all(\n mappedResponse.map(async (page) => {\n const cleanSlug = getCleanSlug(page)\n const filePath = path.join(targetDir, `${cleanSlug}.md`)\n const fileContent = `---\ntitle: ${page.pageTitle}\ndate: ${page.createdAt}\n---\n${page.content}`\n const contentToSave = fileContent.trim()\n if (await hasContentChanged(filePath, contentToSave)) {\n await fs.writeFile(filePath, contentToSave, 'utf-8')\n }\n }),\n )\n } catch (e) {\n logger.info(`Unable to create MD files: ${String(e)}`)\n }\n}\n","import { MappedResponse } from '../types'\n\nexport const getCleanSlug = (page: MappedResponse) =>\n page.pageTitle\n .toLowerCase()\n .replace(/[^a-z0-9]+/g, '-')\n .replace(/(^-|-$)/g, '')\n"],"mappings":";AACA,SAAS,cAAc;AACvB,OAAO,YAAY;;;ACGZ,IAAM,oBAAoB,OAC/B,KACA,MACA,WACG;AACH,MAAI;AACF,WAAO,MAAM,IACV,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK,MAAM,YAAY,CAAC;AAAA,IACjC,CAAC,EACA,KAAqB;AAAA,EAC1B,SAAS,GAAG;AACV,WAAO,KAAK,mCAAmC,OAAO,CAAC,CAAC,EAAE;AAAA,EAC5D;AACF;;;ACfO,IAAM,iBAAiB,OAC5B,KACA,KACA,WACG;AACH,QAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAWoB,GAAG;AAErC,MAAI;AACF,WACG,MAAM,IACJ,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK;AAAA,IACd,CAAC,EACA,KAAyB,KAAM,CAAC;AAAA,EAEvC,SAAS,GAAG;AACV,WAAO;AAAA,MACL,2DAA2D,OAAO,CAAC,CAAC;AAAA,IACtE;AAAA,EACF;AACF;;;ACrCA,OAAO,QAAQ;AAER,IAAM,oBAAoB,OAAOA,OAAc,eAAuB;AAC3E,MAAI;AACF,UAAM,iBAAiB,MAAM,GAAG,SAASA,OAAM,OAAO;AACtD,WAAO,mBAAmB;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ACPO,IAAM,wBAAwB,CACnC,eACA,QAAQ,MACL;AACH,MAAI,UAAU;AACd,QAAM,SAAS,KAAK,OAAO,KAAK;AAChC,aAAW,SAAS,eAAe;AACjC,UAAM,OAAO,MAAM,SAAS;AAC5B,QAAI,UAAU,GAAG;AACf,iBAAW;AAAA;AAAA,EAAO,IAAI;AAAA,IACxB,OAAO;AACL,iBAAW;AAAA,EAAK,MAAM,KAAK,IAAI;AAAA,IACjC;AACA,QAAI,MAAM,YAAY,MAAM,SAAS,SAAS,GAAG;AAC/C,iBAAW,sBAAsB,MAAM,UAAU,QAAQ,CAAC;AAAA,IAC5D;AAAA,EACF;AACA,SAAO;AACT;;;ACpBA,OAAOC,SAAQ;AACf,OAAO,UAAU;;;ACCV,IAAM,eAAe,CAAC,SAC3B,KAAK,UACF,YAAY,EACZ,QAAQ,eAAe,GAAG,EAC1B,QAAQ,YAAY,EAAE;;;ADGpB,IAAM,YAAY,OACvB,WACA,gBACA,WACG;AACH,QAAM,YAAY,KAAK,QAAQ,QAAQ,IAAI,GAAG,SAAS;AAEvD,MAAI;AACF,UAAMC,IAAG,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAC7C,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,OAAO,SAAS;AACjC,cAAM,YAAY,aAAa,IAAI;AACnC,cAAM,WAAW,KAAK,KAAK,WAAW,GAAG,SAAS,KAAK;AACvD,cAAM,cAAc;AAAA,SACnB,KAAK,SAAS;AAAA,QACf,KAAK,SAAS;AAAA;AAAA,EAEpB,KAAK,OAAO;AACN,cAAM,gBAAgB,YAAY,KAAK;AACvC,YAAI,MAAM,kBAAkB,UAAU,aAAa,GAAG;AACpD,gBAAMA,IAAG,UAAU,UAAU,eAAe,OAAO;AAAA,QACrD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,SAAS,GAAG;AACV,WAAO,KAAK,8BAA8B,OAAO,CAAC,CAAC,EAAE;AAAA,EACvD;AACF;;;AL5Be,SAAR,kBACL,SACkB;AAClB,QAAM;AAAA,IACJ;AAAA,IACA,SAAS;AAAA,IACT,kBAAkB;AAAA,IAClB,YAAY;AAAA,IACZ,MAAM;AAAA,EACR,IAAI;AAEJ,SAAO;AAAA,IACL,MAAM;AAAA,IACN,OAAO;AAAA,MACL,sBAAsB,CAAC,EAAE,OAAO,MAAM;AACpC,eAAO,KAAK,4CAAqC;AAEjD,cAAM,MAAM,OAAO,EAChB,IAAI,MAAM,EACV,QAAQ;AAAA,UACP,gBAAgB;AAAA,UAChB,eAAe,UAAU,KAAK;AAAA,QAChC,CAAC;AAEH,oBAAY,YAAY;AACtB,cAAI;AACF,kBAAM,cAAc,MAAM,eAAe,KAAK,KAAK,MAAM;AACzD,gBAAI,CAAC,YAAa;AAElB,kBAAM,iBAAmC,CAAC;AAC1C,uBAAW,QAAQ,YAAY,KAAK,GAAG;AACrC,oBAAM,MAAM,MAAM,kBAAkB,KAAK,MAAM,MAAM;AACrD,kBAAI,CAAC,IAAK;AAEV,6BAAe,KAAK;AAAA,gBAClB,WAAW,OAAO,KAAK,YAAY,GAAG,YAAY;AAAA,gBAClD,WAAW,OAAO,KAAK,YAAY,GAAG,YAAY;AAAA,gBAClD,WAAW,KAAK;AAAA,gBAChB,SAAS,sBAAsB,GAAG;AAAA,cACpC,CAAC;AAAA,YACH;AACA,kBAAM,UAAU,WAAW,gBAAgB,MAAM;AAAA,UACnD,SAAS,GAAQ;AACf,mBAAO,KAAK,EAAE,WAAW,OAAO,CAAC,CAAC;AAAA,UACpC;AAAA,QACF,GAAG,eAAe;AAAA,MACpB;AAAA,MACA,qBAAqB,OAAO,EAAE,OAAO,MAAM;AACzC,eAAO,KAAK,yBAAyB;AAAA,MACvC;AAAA,IACF;AAAA,EACF;AACF;","names":["path","fs","fs"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/utils/has-content-changed.ts","../src/utils/process-tag-group.ts","../src/api/get-page-blocks-tree.ts","../src/api/get-raw-response.ts","../src/utils/recursively-get-content.ts","../src/utils/write-to-md.ts","../src/utils/get-clean-slug.ts"],"sourcesContent":["import { AstroIntegration } from 'astro'\nimport wretch from 'wretch'\n\nimport { LogseqIntegrationOptions } from './types'\nimport { processTagGroup } from './utils'\n\nexport default function logseqIntegration(\n options: LogseqIntegrationOptions,\n): AstroIntegration {\n const {\n token,\n targets,\n apiUrl = 'http://127.0.0.1:12315/api',\n pollingInterval = 1000,\n } = options\n\n return {\n name: 'astro-logseq-publish',\n hooks: {\n 'astro:server:setup': ({ logger }) => {\n logger.info('🚀 Logseq Poller Started (Every 3s)')\n\n const api = wretch()\n .url(apiUrl)\n .headers({\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${token}`,\n })\n\n setInterval(async () => {\n try {\n await Promise.all(\n targets.map((target) => processTagGroup(api, target, logger)),\n )\n } catch (e: any) {\n logger.error(e.message || String(e))\n }\n }, pollingInterval)\n },\n 'astro:build:setup': async ({ logger }) => {\n logger.info('Building from Logseq...')\n },\n },\n }\n}\n","import fs from \"node:fs/promises\";\n\nexport const hasContentChanged = async (path: string, newContent: string) => {\n try {\n const currentContent = await fs.readFile(path, \"utf-8\");\n return currentContent !== newContent;\n } catch {\n return true;\n }\n};\n","import { format } from 'date-fns'\nimport { getPageBlocksTree, getRawResponse } from 'src/api'\nimport { MappedResponse, TagTarget } from 'src/types'\nimport { Wretch } from 'wretch/types'\n\nimport { recursivelyGetContent, writeToMd } from '.'\n\nexport const processTagGroup = async (\n api: Wretch,\n target: TagTarget,\n logger: any,\n) => {\n const { tag, directory } = target\n\n const rawResponse = await getRawResponse(api, tag, logger)\n if (!rawResponse || rawResponse.length === 0) return\n\n const mappedResponse: MappedResponse[] = []\n\n for (const item of rawResponse.flat()) {\n const pbt = await getPageBlocksTree(api, item, logger)\n if (!pbt) continue\n\n mappedResponse.push({\n createdAt: format(item['created-at'], 'yyyy-MM-dd'),\n updatedAt: format(item['updated-at'], 'yyyy-MM-dd'),\n pageTitle: item.title,\n content: recursivelyGetContent(pbt),\n })\n }\n\n await writeToMd(directory, mappedResponse, logger)\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { ContentBlock, LogseqResponse } from '../types'\n\nexport const getPageBlocksTree = async (\n api: Wretch,\n item: LogseqResponse,\n logger: AstroIntegrationLogger,\n) => {\n try {\n return await api\n .post({\n method: 'logseq.Editor.getPageBlocksTree',\n args: [item.title.toLowerCase()],\n })\n .json<ContentBlock[]>()\n } catch (e) {\n logger.info(`Unable to get page blocks tree: ${String(e)}`)\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { LogseqResponse } from '../types'\n\nexport const getRawResponse = async (\n api: Wretch,\n tag: string,\n logger: AstroIntegrationLogger,\n) => {\n const query = `\n [:find (pull ?p\n [:block/name\n :block/full-title\n :block/created-at\n :block/updated-at\n :block/title\n {:block/_parent ...}])\n :where\n [?p :block/name]\n [?p :block/tags ?t]\n [?t :block/name \"${tag}\"]]`\n\n try {\n return (\n (await api\n .post({\n method: 'logseq.DB.datascriptQuery',\n args: [query],\n })\n .json<LogseqResponse[][]>()) ?? []\n )\n } catch (e) {\n logger.info(\n `Unable to query Logseq. Check if API server is running. ${String(e)}`,\n )\n }\n}\n","import { ContentBlock } from \"../types\";\n\nexport const recursivelyGetContent = (\n contentBlocks: ContentBlock[],\n depth = 0,\n) => {\n let content = \"\";\n const indent = \" \".repeat(depth);\n for (const block of contentBlocks) {\n const text = block.title || \"\";\n if (depth === 0) {\n content += `\\n\\n${text}`;\n } else {\n content += `\\n${indent}- ${text}`;\n }\n if (block.children && block.children.length > 0) {\n content += recursivelyGetContent(block.children, depth + 1);\n }\n }\n return content;\n};\n","import fs from 'node:fs/promises'\nimport path from 'node:path'\n\nimport { AstroIntegrationLogger } from 'astro'\n\nimport { MappedResponse } from '../types'\nimport { hasContentChanged } from '.'\nimport { getCleanSlug } from './get-clean-slug'\n\nexport const writeToMd = async (\n directory: string,\n mappedResponse: MappedResponse[],\n logger: AstroIntegrationLogger,\n) => {\n const targetDir = path.resolve(process.cwd(), directory)\n\n try {\n await fs.mkdir(targetDir, { recursive: true })\n await Promise.all(\n mappedResponse.map(async (page) => {\n const cleanSlug = getCleanSlug(page)\n const filePath = path.join(targetDir, `${cleanSlug}.md`)\n const fileContent = `---\ntitle: ${page.pageTitle}\ndate: ${page.createdAt}\n---\n${page.content}`\n const contentToSave = fileContent.trim()\n if (await hasContentChanged(filePath, contentToSave)) {\n await fs.writeFile(filePath, contentToSave, 'utf-8')\n }\n }),\n )\n } catch (e) {\n logger.info(`Unable to create MD files: ${String(e)}`)\n }\n}\n","import { MappedResponse } from '../types'\n\nexport const getCleanSlug = (page: MappedResponse) =>\n page.pageTitle\n .toLowerCase()\n .replace(/[^a-z0-9]+/g, '-')\n .replace(/(^-|-$)/g, '')\n"],"mappings":";AACA,OAAO,YAAY;;;ACDnB,OAAO,QAAQ;AAER,IAAM,oBAAoB,OAAOA,OAAc,eAAuB;AAC3E,MAAI;AACF,UAAM,iBAAiB,MAAM,GAAG,SAASA,OAAM,OAAO;AACtD,WAAO,mBAAmB;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ACTA,SAAS,cAAc;;;ACKhB,IAAM,oBAAoB,OAC/B,KACA,MACA,WACG;AACH,MAAI;AACF,WAAO,MAAM,IACV,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK,MAAM,YAAY,CAAC;AAAA,IACjC,CAAC,EACA,KAAqB;AAAA,EAC1B,SAAS,GAAG;AACV,WAAO,KAAK,mCAAmC,OAAO,CAAC,CAAC,EAAE;AAAA,EAC5D;AACF;;;ACfO,IAAM,iBAAiB,OAC5B,KACA,KACA,WACG;AACH,QAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAWoB,GAAG;AAErC,MAAI;AACF,WACG,MAAM,IACJ,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK;AAAA,IACd,CAAC,EACA,KAAyB,KAAM,CAAC;AAAA,EAEvC,SAAS,GAAG;AACV,WAAO;AAAA,MACL,2DAA2D,OAAO,CAAC,CAAC;AAAA,IACtE;AAAA,EACF;AACF;;;AF9BO,IAAM,kBAAkB,OAC7B,KACA,QACA,WACG;AACH,QAAM,EAAE,KAAK,UAAU,IAAI;AAE3B,QAAM,cAAc,MAAM,eAAe,KAAK,KAAK,MAAM;AACzD,MAAI,CAAC,eAAe,YAAY,WAAW,EAAG;AAE9C,QAAM,iBAAmC,CAAC;AAE1C,aAAW,QAAQ,YAAY,KAAK,GAAG;AACrC,UAAM,MAAM,MAAM,kBAAkB,KAAK,MAAM,MAAM;AACrD,QAAI,CAAC,IAAK;AAEV,mBAAe,KAAK;AAAA,MAClB,WAAW,OAAO,KAAK,YAAY,GAAG,YAAY;AAAA,MAClD,WAAW,OAAO,KAAK,YAAY,GAAG,YAAY;AAAA,MAClD,WAAW,KAAK;AAAA,MAChB,SAAS,sBAAsB,GAAG;AAAA,IACpC,CAAC;AAAA,EACH;AAEA,QAAM,UAAU,WAAW,gBAAgB,MAAM;AACnD;;;AG9BO,IAAM,wBAAwB,CACnC,eACA,QAAQ,MACL;AACH,MAAI,UAAU;AACd,QAAM,SAAS,KAAK,OAAO,KAAK;AAChC,aAAW,SAAS,eAAe;AACjC,UAAM,OAAO,MAAM,SAAS;AAC5B,QAAI,UAAU,GAAG;AACf,iBAAW;AAAA;AAAA,EAAO,IAAI;AAAA,IACxB,OAAO;AACL,iBAAW;AAAA,EAAK,MAAM,KAAK,IAAI;AAAA,IACjC;AACA,QAAI,MAAM,YAAY,MAAM,SAAS,SAAS,GAAG;AAC/C,iBAAW,sBAAsB,MAAM,UAAU,QAAQ,CAAC;AAAA,IAC5D;AAAA,EACF;AACA,SAAO;AACT;;;ACpBA,OAAOC,SAAQ;AACf,OAAO,UAAU;;;ACCV,IAAM,eAAe,CAAC,SAC3B,KAAK,UACF,YAAY,EACZ,QAAQ,eAAe,GAAG,EAC1B,QAAQ,YAAY,EAAE;;;ADGpB,IAAM,YAAY,OACvB,WACA,gBACA,WACG;AACH,QAAM,YAAY,KAAK,QAAQ,QAAQ,IAAI,GAAG,SAAS;AAEvD,MAAI;AACF,UAAMC,IAAG,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAC7C,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,OAAO,SAAS;AACjC,cAAM,YAAY,aAAa,IAAI;AACnC,cAAM,WAAW,KAAK,KAAK,WAAW,GAAG,SAAS,KAAK;AACvD,cAAM,cAAc;AAAA,SACnB,KAAK,SAAS;AAAA,QACf,KAAK,SAAS;AAAA;AAAA,EAEpB,KAAK,OAAO;AACN,cAAM,gBAAgB,YAAY,KAAK;AACvC,YAAI,MAAM,kBAAkB,UAAU,aAAa,GAAG;AACpD,gBAAMA,IAAG,UAAU,UAAU,eAAe,OAAO;AAAA,QACrD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,SAAS,GAAG;AACV,WAAO,KAAK,8BAA8B,OAAO,CAAC,CAAC,EAAE;AAAA,EACvD;AACF;;;AN9Be,SAAR,kBACL,SACkB;AAClB,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,SAAS;AAAA,IACT,kBAAkB;AAAA,EACpB,IAAI;AAEJ,SAAO;AAAA,IACL,MAAM;AAAA,IACN,OAAO;AAAA,MACL,sBAAsB,CAAC,EAAE,OAAO,MAAM;AACpC,eAAO,KAAK,4CAAqC;AAEjD,cAAM,MAAM,OAAO,EAChB,IAAI,MAAM,EACV,QAAQ;AAAA,UACP,gBAAgB;AAAA,UAChB,eAAe,UAAU,KAAK;AAAA,QAChC,CAAC;AAEH,oBAAY,YAAY;AACtB,cAAI;AACF,kBAAM,QAAQ;AAAA,cACZ,QAAQ,IAAI,CAAC,WAAW,gBAAgB,KAAK,QAAQ,MAAM,CAAC;AAAA,YAC9D;AAAA,UACF,SAAS,GAAQ;AACf,mBAAO,MAAM,EAAE,WAAW,OAAO,CAAC,CAAC;AAAA,UACrC;AAAA,QACF,GAAG,eAAe;AAAA,MACpB;AAAA,MACA,qBAAqB,OAAO,EAAE,OAAO,MAAM;AACzC,eAAO,KAAK,yBAAyB;AAAA,MACvC;AAAA,IACF;AAAA,EACF;AACF;","names":["path","fs","fs"]}
package/package.json CHANGED
@@ -1,14 +1,10 @@
1
1
  {
2
2
  "name": "astroplugin-logseq",
3
- "version": "0.0.2",
4
3
  "author": "benjypng",
5
4
  "description": "Astro integration to sync Logseq pages as content collections",
6
5
  "license": "MIT",
7
6
  "type": "module",
8
- "repository": {
9
- "type": "git",
10
- "url": "https://github.com/benjypng/astroplugin-logseq.git"
11
- },
7
+ "repository": "benjypng/astroplugin-logseq",
12
8
  "main": "./dist/index.cjs",
13
9
  "module": "./dist/index.js",
14
10
  "types": "./dist/index.d.ts",
@@ -34,11 +30,17 @@
34
30
  "dev": "tsup --watch",
35
31
  "prepublishOnly": "npm run build"
36
32
  },
33
+ "publishConfig": {
34
+ "access": "public",
35
+ "registry": "https://registry.npmjs.org/"
36
+ },
37
37
  "release": {
38
38
  "branches": [
39
39
  "main"
40
40
  ],
41
41
  "plugins": [
42
+ "@semantic-release/commit-analyzer",
43
+ "@semantic-release/release-notes-generator",
42
44
  "@semantic-release/npm",
43
45
  "@semantic-release/github"
44
46
  ]
@@ -67,5 +69,6 @@
67
69
  "tsup": "^8.5.1",
68
70
  "typescript": "^5.5.4",
69
71
  "typescript-eslint": "^8.51.0"
70
- }
72
+ },
73
+ "version": "1.1.0"
71
74
  }