astroplugin-logseq 1.2.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -16,6 +16,8 @@ It operates by polling the Logseq HTTP API for pages containing specific tags an
16
16
  * **Selective Sync:** Filters pages based on user-defined tags (e.g., `blog`, `notes`) and maps them to specific target directories.
17
17
  * **Block Preservation:** Maintains the hierarchy of Logseq blocks, rendering them as nested lists in the output Markdown.
18
18
  * **Change Detection:** Compares fetched content against existing files on disk to minimize unnecessary write operations.
19
+ * **Handles references:** Strips away `[[]]` unless they are in a code block.
20
+ * **Set custom property for published date:** Customise the Logseq property used to set the published date in Astro.
19
21
 
20
22
  ## Installation
21
23
  npm install astroplugin-logseq
@@ -41,6 +43,9 @@ export default defineConfig({
41
43
  // Required: The authorization token generated in Logseq
42
44
  token: 'YOUR_SECRET_LOGSEQ_TOKEN',
43
45
 
46
+ // Required: The Logseq property used for the page you want to publish
47
+ dateRef: 'publish-date',
48
+
44
49
  // Required: mapping tags to destination directories
45
50
  targets: [
46
51
  {
package/dist/index.cjs CHANGED
@@ -49,6 +49,20 @@ var hasContentChanged = async (path2, newContent) => {
49
49
  // src/utils/process-tag-group.ts
50
50
  var import_date_fns = require("date-fns");
51
51
 
52
+ // src/api/get-date-prop-ident.ts
53
+ var getDatePropPage = async (api, dateRef, logger) => {
54
+ try {
55
+ const datePropPage = await api.post({
56
+ method: "logseq.Editor.getPage",
57
+ args: [dateRef]
58
+ }).json();
59
+ const datePropPageIdent = datePropPage.ident;
60
+ return datePropPageIdent;
61
+ } catch (e) {
62
+ logger.info(`Unable to get page for date reference: ${String(e)}`);
63
+ }
64
+ };
65
+
52
66
  // src/api/get-page-blocks-tree.ts
53
67
  var getPageBlocksTree = async (api, page, logger) => {
54
68
  try {
@@ -62,19 +76,20 @@ var getPageBlocksTree = async (api, page, logger) => {
62
76
  };
63
77
 
64
78
  // src/api/get-raw-response.ts
65
- var getRawResponse = async (api, tag, logger) => {
79
+ var getRawResponse = async (api, datePropPageIdent, tag, logger) => {
66
80
  const query = `
67
- [:find (pull ?p
68
- [:block/name
69
- :block/full-title
70
- :block/created-at
71
- :block/updated-at
72
- :block/title
73
- {:block/_parent [:block/uuid]}])
74
- :where
75
- [?p :block/name]
76
- [?p :block/tags ?t]
77
- [?t :block/name "${tag}"]]`;
81
+ [:find (pull ?p
82
+ [:block/name
83
+ :block/full-title
84
+ :block/created-at
85
+ :block/updated-at
86
+ :block/title
87
+ ${datePropPageIdent && `{${datePropPageIdent} [:block/journal-day]}`}
88
+ {:block/_parent [:block/uuid]}])
89
+ :where
90
+ [?p :block/name]
91
+ [?p :block/tags ?t]
92
+ [?t :block/name "${tag}"]]`;
78
93
  try {
79
94
  return await api.post({
80
95
  method: "logseq.DB.datascriptQuery",
@@ -88,11 +103,12 @@ var getRawResponse = async (api, tag, logger) => {
88
103
  };
89
104
 
90
105
  // src/utils/process-tag-group.ts
91
- var processTagGroup = async (api, target, logger) => {
106
+ var processTagGroup = async (api, dateRef, target, logger) => {
92
107
  const { tag, directory } = target;
93
- const rawResponse = await getRawResponse(api, tag, logger);
94
- if (!rawResponse || rawResponse.length === 0) return;
95
108
  const mappedResponse = [];
109
+ const datePropPageIdent = await getDatePropPage(api, dateRef, logger);
110
+ const rawResponse = await getRawResponse(api, datePropPageIdent, tag, logger);
111
+ if (!rawResponse || rawResponse.length === 0) return;
96
112
  for (const page of rawResponse.flat()) {
97
113
  const pbt = await getPageBlocksTree(api, page, logger);
98
114
  if (!pbt) continue;
@@ -100,10 +116,17 @@ var processTagGroup = async (api, target, logger) => {
100
116
  createdAt: (0, import_date_fns.format)(page["created-at"], "yyyy-MM-dd"),
101
117
  updatedAt: (0, import_date_fns.format)(page["updated-at"], "yyyy-MM-dd"),
102
118
  pageTitle: page.title,
103
- content: recursivelyGetContent(pbt)
119
+ content: recursivelyGetContent(pbt),
120
+ ...datePropPageIdent && {
121
+ date: (0, import_date_fns.parse)(
122
+ String(page[datePropPageIdent]["journal-day"]),
123
+ "yyyyMMdd",
124
+ /* @__PURE__ */ new Date()
125
+ )
126
+ }
104
127
  });
128
+ await writeToMd(directory, mappedResponse, logger);
105
129
  }
106
- await writeToMd(directory, mappedResponse, logger);
107
130
  };
108
131
 
109
132
  // src/utils/recursively-get-content.ts
@@ -154,7 +177,7 @@ var writeToMd = async (directory, mappedResponse, logger) => {
154
177
  const filePath = import_node_path.default.join(targetDir, `${cleanSlug}.md`);
155
178
  const fileContent = `---
156
179
  title: ${page.pageTitle}
157
- date: ${page.createdAt}
180
+ date: ${page.date}
158
181
  ---
159
182
  ${page.content}`;
160
183
  const contentToSave = fileContent.trim();
@@ -173,6 +196,7 @@ function logseqIntegration(options) {
173
196
  const {
174
197
  token,
175
198
  targets,
199
+ dateRef,
176
200
  apiUrl = "http://127.0.0.1:12315/api",
177
201
  pollingInterval = 1e3
178
202
  } = options;
@@ -180,7 +204,7 @@ function logseqIntegration(options) {
180
204
  name: "astro-logseq-publish",
181
205
  hooks: {
182
206
  "astro:server:setup": ({ logger }) => {
183
- logger.info("\u{1F680} Logseq Poller Started (Every 3s)");
207
+ logger.info(`\u{1F680} Logseq Poller Started (Every ${pollingInterval}ms)`);
184
208
  const api = (0, import_wretch.default)().url(apiUrl).headers({
185
209
  "Content-Type": "application/json",
186
210
  Authorization: `Bearer ${token}`
@@ -188,7 +212,9 @@ function logseqIntegration(options) {
188
212
  setInterval(async () => {
189
213
  try {
190
214
  await Promise.all(
191
- targets.map((target) => processTagGroup(api, target, logger))
215
+ targets.map(
216
+ (target) => processTagGroup(api, dateRef, target, logger)
217
+ )
192
218
  );
193
219
  } catch (e) {
194
220
  logger.error(e.message || String(e));
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/utils/has-content-changed.ts","../src/utils/process-tag-group.ts","../src/api/get-page-blocks-tree.ts","../src/api/get-raw-response.ts","../src/utils/recursively-get-content.ts","../src/utils/write-to-md.ts","../src/utils/get-clean-slug.ts"],"sourcesContent":["import { AstroIntegration } from 'astro'\nimport wretch from 'wretch'\n\nimport { LogseqIntegrationOptions } from './types'\nimport { processTagGroup } from './utils'\n\nexport default function logseqIntegration(\n options: LogseqIntegrationOptions,\n): AstroIntegration {\n const {\n token,\n targets,\n apiUrl = 'http://127.0.0.1:12315/api',\n pollingInterval = 1000,\n } = options\n\n return {\n name: 'astro-logseq-publish',\n hooks: {\n 'astro:server:setup': ({ logger }) => {\n logger.info('🚀 Logseq Poller Started (Every 3s)')\n\n const api = wretch()\n .url(apiUrl)\n .headers({\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${token}`,\n })\n\n setInterval(async () => {\n try {\n await Promise.all(\n targets.map((target) => processTagGroup(api, target, logger)),\n )\n } catch (e: any) {\n logger.error(e.message || String(e))\n }\n }, pollingInterval)\n },\n 'astro:build:setup': async ({ logger }) => {\n logger.info('Building from Logseq...')\n },\n },\n }\n}\n","import fs from \"node:fs/promises\";\n\nexport const hasContentChanged = async (path: string, newContent: string) => {\n try {\n const currentContent = await fs.readFile(path, \"utf-8\");\n return currentContent !== newContent;\n } catch {\n return true;\n }\n};\n","import { format } from 'date-fns'\nimport { Wretch } from 'wretch/types'\n\nimport { getPageBlocksTree, getRawResponse } from '../api'\nimport { MappedResponse, TagTarget } from '../types'\nimport { recursivelyGetContent, writeToMd } from '.'\n\nexport const processTagGroup = async (\n api: Wretch,\n target: TagTarget,\n logger: any,\n) => {\n const { tag, directory } = target\n\n const rawResponse = await getRawResponse(api, tag, logger)\n if (!rawResponse || rawResponse.length === 0) return\n\n const mappedResponse: MappedResponse[] = []\n\n for (const page of rawResponse.flat()) {\n const pbt = await getPageBlocksTree(api, page, logger)\n if (!pbt) continue\n\n mappedResponse.push({\n createdAt: format(page['created-at'], 'yyyy-MM-dd'),\n updatedAt: format(page['updated-at'], 'yyyy-MM-dd'),\n pageTitle: page.title,\n content: recursivelyGetContent(pbt),\n })\n }\n\n await writeToMd(directory, mappedResponse, logger)\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { ContentBlock, LogseqPageResponse } from '../types'\n\nexport const getPageBlocksTree = async (\n api: Wretch,\n page: LogseqPageResponse,\n logger: AstroIntegrationLogger,\n) => {\n try {\n return await api\n .post({\n method: 'logseq.Editor.getPageBlocksTree',\n args: [page.title.toLowerCase()],\n })\n .json<ContentBlock[]>()\n } catch (e) {\n logger.info(`Unable to get page blocks tree: ${String(e)}`)\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { LogseqPageResponse } from '../types'\n\nexport const getRawResponse = async (\n api: Wretch,\n tag: string,\n logger: AstroIntegrationLogger,\n) => {\n const query = `\n [:find (pull ?p\n [:block/name\n :block/full-title\n :block/created-at\n :block/updated-at\n :block/title\n {:block/_parent [:block/uuid]}])\n :where\n [?p :block/name]\n [?p :block/tags ?t]\n [?t :block/name \"${tag}\"]]`\n\n try {\n return (\n (await api\n .post({\n method: 'logseq.DB.datascriptQuery',\n args: [query],\n })\n .json<LogseqPageResponse[][]>()) ?? []\n )\n } catch (e) {\n logger.info(\n `Unable to query Logseq. Check if API server is running. ${String(e)}`,\n )\n }\n}\n","import { ContentBlock } from '../types'\n\nexport const recursivelyGetContent = (\n contentBlocks: ContentBlock[],\n depth = 0,\n) => {\n let content = ''\n const indent = ' '.repeat(depth)\n for (const block of contentBlocks) {\n const text = (block.fullTitle ?? '').replace(\n /(`[^`]+`)|\\[\\[(.*?)\\]\\]/g,\n (_match, code, linkContent) => {\n if (code) return code\n const isCodeDisplay =\n block[':logseq.property.node/display-type'] === 'code'\n return isCodeDisplay\n ? `\\`\\`\\`\\n[[${linkContent}]]\\n\\`\\`\\``\n : linkContent\n },\n )\n if (depth === 0) {\n content += `\\n\\n${text}`\n } else {\n content += `\\n${indent}- ${text}`\n }\n if (block.children && block.children.length > 0) {\n content += recursivelyGetContent(block.children, depth + 1)\n }\n }\n return content\n}\n","import fs from 'node:fs/promises'\nimport path from 'node:path'\n\nimport { AstroIntegrationLogger } from 'astro'\n\nimport { MappedResponse } from '../types'\nimport { hasContentChanged } from '.'\nimport { getCleanSlug } from './get-clean-slug'\n\nexport const writeToMd = async (\n directory: string,\n mappedResponse: MappedResponse[],\n logger: AstroIntegrationLogger,\n) => {\n const targetDir = path.resolve(process.cwd(), directory)\n\n try {\n await fs.mkdir(targetDir, { recursive: true })\n await Promise.all(\n mappedResponse.map(async (page) => {\n const cleanSlug = getCleanSlug(page)\n const filePath = path.join(targetDir, `${cleanSlug}.md`)\n const fileContent = `---\ntitle: ${page.pageTitle}\ndate: ${page.createdAt}\n---\n${page.content}`\n const contentToSave = fileContent.trim()\n if (await hasContentChanged(filePath, contentToSave)) {\n await fs.writeFile(filePath, contentToSave, 'utf-8')\n }\n }),\n )\n } catch (e) {\n logger.info(`Unable to create MD files: ${String(e)}`)\n }\n}\n","import { MappedResponse } from '../types'\n\nexport const getCleanSlug = (page: MappedResponse) =>\n page.pageTitle\n .toLowerCase()\n .replace(/[^a-z0-9]+/g, '-')\n .replace(/(^-|-$)/g, '')\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAAmB;;;ACDnB,sBAAe;AAER,IAAM,oBAAoB,OAAOA,OAAc,eAAuB;AAC3E,MAAI;AACF,UAAM,iBAAiB,MAAM,gBAAAC,QAAG,SAASD,OAAM,OAAO;AACtD,WAAO,mBAAmB;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ACTA,sBAAuB;;;ACKhB,IAAM,oBAAoB,OAC/B,KACA,MACA,WACG;AACH,MAAI;AACF,WAAO,MAAM,IACV,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK,MAAM,YAAY,CAAC;AAAA,IACjC,CAAC,EACA,KAAqB;AAAA,EAC1B,SAAS,GAAG;AACV,WAAO,KAAK,mCAAmC,OAAO,CAAC,CAAC,EAAE;AAAA,EAC5D;AACF;;;ACfO,IAAM,iBAAiB,OAC5B,KACA,KACA,WACG;AACH,QAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAWoB,GAAG;AAErC,MAAI;AACF,WACG,MAAM,IACJ,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK;AAAA,IACd,CAAC,EACA,KAA6B,KAAM,CAAC;AAAA,EAE3C,SAAS,GAAG;AACV,WAAO;AAAA,MACL,2DAA2D,OAAO,CAAC,CAAC;AAAA,IACtE;AAAA,EACF;AACF;;;AF9BO,IAAM,kBAAkB,OAC7B,KACA,QACA,WACG;AACH,QAAM,EAAE,KAAK,UAAU,IAAI;AAE3B,QAAM,cAAc,MAAM,eAAe,KAAK,KAAK,MAAM;AACzD,MAAI,CAAC,eAAe,YAAY,WAAW,EAAG;AAE9C,QAAM,iBAAmC,CAAC;AAE1C,aAAW,QAAQ,YAAY,KAAK,GAAG;AACrC,UAAM,MAAM,MAAM,kBAAkB,KAAK,MAAM,MAAM;AACrD,QAAI,CAAC,IAAK;AAEV,mBAAe,KAAK;AAAA,MAClB,eAAW,wBAAO,KAAK,YAAY,GAAG,YAAY;AAAA,MAClD,eAAW,wBAAO,KAAK,YAAY,GAAG,YAAY;AAAA,MAClD,WAAW,KAAK;AAAA,MAChB,SAAS,sBAAsB,GAAG;AAAA,IACpC,CAAC;AAAA,EACH;AAEA,QAAM,UAAU,WAAW,gBAAgB,MAAM;AACnD;;;AG9BO,IAAM,wBAAwB,CACnC,eACA,QAAQ,MACL;AACH,MAAI,UAAU;AACd,QAAM,SAAS,KAAK,OAAO,KAAK;AAChC,aAAW,SAAS,eAAe;AACjC,UAAM,QAAQ,MAAM,aAAa,IAAI;AAAA,MACnC;AAAA,MACA,CAAC,QAAQ,MAAM,gBAAgB;AAC7B,YAAI,KAAM,QAAO;AACjB,cAAM,gBACJ,MAAM,oCAAoC,MAAM;AAClD,eAAO,gBACH;AAAA,IAAa,WAAW;AAAA,UACxB;AAAA,MACN;AAAA,IACF;AACA,QAAI,UAAU,GAAG;AACf,iBAAW;AAAA;AAAA,EAAO,IAAI;AAAA,IACxB,OAAO;AACL,iBAAW;AAAA,EAAK,MAAM,KAAK,IAAI;AAAA,IACjC;AACA,QAAI,MAAM,YAAY,MAAM,SAAS,SAAS,GAAG;AAC/C,iBAAW,sBAAsB,MAAM,UAAU,QAAQ,CAAC;AAAA,IAC5D;AAAA,EACF;AACA,SAAO;AACT;;;AC9BA,IAAAE,mBAAe;AACf,uBAAiB;;;ACCV,IAAM,eAAe,CAAC,SAC3B,KAAK,UACF,YAAY,EACZ,QAAQ,eAAe,GAAG,EAC1B,QAAQ,YAAY,EAAE;;;ADGpB,IAAM,YAAY,OACvB,WACA,gBACA,WACG;AACH,QAAM,YAAY,iBAAAC,QAAK,QAAQ,QAAQ,IAAI,GAAG,SAAS;AAEvD,MAAI;AACF,UAAM,iBAAAC,QAAG,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAC7C,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,OAAO,SAAS;AACjC,cAAM,YAAY,aAAa,IAAI;AACnC,cAAM,WAAW,iBAAAD,QAAK,KAAK,WAAW,GAAG,SAAS,KAAK;AACvD,cAAM,cAAc;AAAA,SACnB,KAAK,SAAS;AAAA,QACf,KAAK,SAAS;AAAA;AAAA,EAEpB,KAAK,OAAO;AACN,cAAM,gBAAgB,YAAY,KAAK;AACvC,YAAI,MAAM,kBAAkB,UAAU,aAAa,GAAG;AACpD,gBAAM,iBAAAC,QAAG,UAAU,UAAU,eAAe,OAAO;AAAA,QACrD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,SAAS,GAAG;AACV,WAAO,KAAK,8BAA8B,OAAO,CAAC,CAAC,EAAE;AAAA,EACvD;AACF;;;AN9Be,SAAR,kBACL,SACkB;AAClB,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,SAAS;AAAA,IACT,kBAAkB;AAAA,EACpB,IAAI;AAEJ,SAAO;AAAA,IACL,MAAM;AAAA,IACN,OAAO;AAAA,MACL,sBAAsB,CAAC,EAAE,OAAO,MAAM;AACpC,eAAO,KAAK,4CAAqC;AAEjD,cAAM,UAAM,cAAAC,SAAO,EAChB,IAAI,MAAM,EACV,QAAQ;AAAA,UACP,gBAAgB;AAAA,UAChB,eAAe,UAAU,KAAK;AAAA,QAChC,CAAC;AAEH,oBAAY,YAAY;AACtB,cAAI;AACF,kBAAM,QAAQ;AAAA,cACZ,QAAQ,IAAI,CAAC,WAAW,gBAAgB,KAAK,QAAQ,MAAM,CAAC;AAAA,YAC9D;AAAA,UACF,SAAS,GAAQ;AACf,mBAAO,MAAM,EAAE,WAAW,OAAO,CAAC,CAAC;AAAA,UACrC;AAAA,QACF,GAAG,eAAe;AAAA,MACpB;AAAA,MACA,qBAAqB,OAAO,EAAE,OAAO,MAAM;AACzC,eAAO,KAAK,yBAAyB;AAAA,MACvC;AAAA,IACF;AAAA,EACF;AACF;","names":["path","fs","import_promises","path","fs","wretch"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/utils/has-content-changed.ts","../src/utils/process-tag-group.ts","../src/api/get-date-prop-ident.ts","../src/api/get-page-blocks-tree.ts","../src/api/get-raw-response.ts","../src/utils/recursively-get-content.ts","../src/utils/write-to-md.ts","../src/utils/get-clean-slug.ts"],"sourcesContent":["import { AstroIntegration } from 'astro'\nimport wretch from 'wretch'\n\nimport { LogseqIntegrationOptions } from './types'\nimport { processTagGroup } from './utils'\n\nexport default function logseqIntegration(\n options: LogseqIntegrationOptions,\n): AstroIntegration {\n const {\n token,\n targets,\n dateRef,\n apiUrl = 'http://127.0.0.1:12315/api',\n pollingInterval = 1000,\n } = options\n\n return {\n name: 'astro-logseq-publish',\n hooks: {\n 'astro:server:setup': ({ logger }) => {\n logger.info(`🚀 Logseq Poller Started (Every ${pollingInterval}ms)`)\n\n const api = wretch()\n .url(apiUrl)\n .headers({\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${token}`,\n })\n\n setInterval(async () => {\n try {\n await Promise.all(\n targets.map((target) =>\n processTagGroup(api, dateRef, target, logger),\n ),\n )\n } catch (e: any) {\n logger.error(e.message || String(e))\n }\n }, pollingInterval)\n },\n 'astro:build:setup': async ({ logger }) => {\n logger.info('Building from Logseq...')\n },\n },\n }\n}\n","import fs from \"node:fs/promises\";\n\nexport const hasContentChanged = async (path: string, newContent: string) => {\n try {\n const currentContent = await fs.readFile(path, \"utf-8\");\n return currentContent !== newContent;\n } catch {\n return true;\n }\n};\n","import { AstroIntegrationLogger } from 'astro'\nimport { format, parse } from 'date-fns'\nimport { Wretch } from 'wretch/types'\n\nimport { getDatePropPage, getPageBlocksTree, getRawResponse } from '../api'\nimport { MappedResponse, TagTarget } from '../types'\nimport { recursivelyGetContent, writeToMd } from '.'\n\nexport const processTagGroup = async (\n api: Wretch,\n dateRef: string,\n target: TagTarget,\n logger: AstroIntegrationLogger,\n) => {\n const { tag, directory } = target\n const mappedResponse: MappedResponse[] = []\n\n const datePropPageIdent = await getDatePropPage(api, dateRef, logger)\n\n const rawResponse = await getRawResponse(api, datePropPageIdent, tag, logger)\n if (!rawResponse || rawResponse.length === 0) return\n\n for (const page of rawResponse.flat()) {\n const pbt = await getPageBlocksTree(api, page, logger)\n if (!pbt) continue\n\n mappedResponse.push({\n createdAt: format(page['created-at'], 'yyyy-MM-dd'),\n updatedAt: format(page['updated-at'], 'yyyy-MM-dd'),\n pageTitle: page.title,\n content: recursivelyGetContent(pbt),\n ...(datePropPageIdent && {\n date: parse(\n String(page[datePropPageIdent!]['journal-day']),\n 'yyyyMMdd',\n new Date(),\n ),\n }),\n })\n await writeToMd(directory, mappedResponse, logger)\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { LogseqPageResponse } from 'src/types'\nimport { Wretch } from 'wretch/types'\n\nexport const getDatePropPage = async (\n api: Wretch,\n dateRef: string,\n logger: AstroIntegrationLogger,\n) => {\n try {\n const datePropPage = await api\n .post({\n method: 'logseq.Editor.getPage',\n args: [dateRef],\n })\n .json<LogseqPageResponse>()\n const datePropPageIdent = datePropPage.ident\n return datePropPageIdent\n } catch (e) {\n logger.info(`Unable to get page for date reference: ${String(e)}`)\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { ContentBlock, LogseqPageResponse } from '../types'\n\nexport const getPageBlocksTree = async (\n api: Wretch,\n page: LogseqPageResponse,\n logger: AstroIntegrationLogger,\n) => {\n try {\n return await api\n .post({\n method: 'logseq.Editor.getPageBlocksTree',\n args: [page.title.toLowerCase()],\n })\n .json<ContentBlock[]>()\n } catch (e) {\n logger.info(`Unable to get page blocks tree: ${String(e)}`)\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { LogseqPageResponse } from '../types'\n\nexport const getRawResponse = async (\n api: Wretch,\n datePropPageIdent: string | undefined,\n tag: string,\n logger: AstroIntegrationLogger,\n) => {\n const query = `\n [:find (pull ?p\n [:block/name\n :block/full-title\n :block/created-at\n :block/updated-at\n :block/title\n ${datePropPageIdent && `{${datePropPageIdent} [:block/journal-day]}`}\n {:block/_parent [:block/uuid]}])\n :where\n [?p :block/name]\n [?p :block/tags ?t]\n [?t :block/name \"${tag}\"]]`\n\n try {\n return (\n (await api\n .post({\n method: 'logseq.DB.datascriptQuery',\n args: [query],\n })\n .json<LogseqPageResponse[][]>()) ?? []\n )\n } catch (e) {\n logger.info(\n `Unable to query Logseq. Check if API server is running. ${String(e)}`,\n )\n }\n}\n","import { ContentBlock } from '../types'\n\nexport const recursivelyGetContent = (\n contentBlocks: ContentBlock[],\n depth = 0,\n) => {\n let content = ''\n const indent = ' '.repeat(depth)\n for (const block of contentBlocks) {\n const text = (block.fullTitle ?? '').replace(\n /(`[^`]+`)|\\[\\[(.*?)\\]\\]/g,\n (_match, code, linkContent) => {\n if (code) return code\n const isCodeDisplay =\n block[':logseq.property.node/display-type'] === 'code'\n return isCodeDisplay\n ? `\\`\\`\\`\\n[[${linkContent}]]\\n\\`\\`\\``\n : linkContent\n },\n )\n if (depth === 0) {\n content += `\\n\\n${text}`\n } else {\n content += `\\n${indent}- ${text}`\n }\n if (block.children && block.children.length > 0) {\n content += recursivelyGetContent(block.children, depth + 1)\n }\n }\n return content\n}\n","import fs from 'node:fs/promises'\nimport path from 'node:path'\n\nimport { AstroIntegrationLogger } from 'astro'\n\nimport { MappedResponse } from '../types'\nimport { hasContentChanged } from '.'\nimport { getCleanSlug } from './get-clean-slug'\n\nexport const writeToMd = async (\n directory: string,\n mappedResponse: MappedResponse[],\n logger: AstroIntegrationLogger,\n) => {\n const targetDir = path.resolve(process.cwd(), directory)\n\n try {\n await fs.mkdir(targetDir, { recursive: true })\n await Promise.all(\n mappedResponse.map(async (page) => {\n const cleanSlug = getCleanSlug(page)\n const filePath = path.join(targetDir, `${cleanSlug}.md`)\n const fileContent = `---\ntitle: ${page.pageTitle}\ndate: ${page.date}\n---\n${page.content}`\n const contentToSave = fileContent.trim()\n if (await hasContentChanged(filePath, contentToSave)) {\n await fs.writeFile(filePath, contentToSave, 'utf-8')\n }\n }),\n )\n } catch (e) {\n logger.info(`Unable to create MD files: ${String(e)}`)\n }\n}\n","import { MappedResponse } from '../types'\n\nexport const getCleanSlug = (page: MappedResponse) =>\n page.pageTitle\n .toLowerCase()\n .replace(/[^a-z0-9]+/g, '-')\n .replace(/(^-|-$)/g, '')\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAAmB;;;ACDnB,sBAAe;AAER,IAAM,oBAAoB,OAAOA,OAAc,eAAuB;AAC3E,MAAI;AACF,UAAM,iBAAiB,MAAM,gBAAAC,QAAG,SAASD,OAAM,OAAO;AACtD,WAAO,mBAAmB;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ACRA,sBAA8B;;;ACGvB,IAAM,kBAAkB,OAC7B,KACA,SACA,WACG;AACH,MAAI;AACF,UAAM,eAAe,MAAM,IACxB,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,OAAO;AAAA,IAChB,CAAC,EACA,KAAyB;AAC5B,UAAM,oBAAoB,aAAa;AACvC,WAAO;AAAA,EACT,SAAS,GAAG;AACV,WAAO,KAAK,0CAA0C,OAAO,CAAC,CAAC,EAAE;AAAA,EACnE;AACF;;;AChBO,IAAM,oBAAoB,OAC/B,KACA,MACA,WACG;AACH,MAAI;AACF,WAAO,MAAM,IACV,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK,MAAM,YAAY,CAAC;AAAA,IACjC,CAAC,EACA,KAAqB;AAAA,EAC1B,SAAS,GAAG;AACV,WAAO,KAAK,mCAAmC,OAAO,CAAC,CAAC,EAAE;AAAA,EAC5D;AACF;;;ACfO,IAAM,iBAAiB,OAC5B,KACA,mBACA,KACA,WACG;AACH,QAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAOU,qBAAqB,IAAI,iBAAiB,wBAAwB;AAAA;AAAA;AAAA;AAAA;AAAA,mCAKzD,GAAG;AAEpC,MAAI;AACF,WACG,MAAM,IACJ,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK;AAAA,IACd,CAAC,EACA,KAA6B,KAAM,CAAC;AAAA,EAE3C,SAAS,GAAG;AACV,WAAO;AAAA,MACL,2DAA2D,OAAO,CAAC,CAAC;AAAA,IACtE;AAAA,EACF;AACF;;;AH/BO,IAAM,kBAAkB,OAC7B,KACA,SACA,QACA,WACG;AACH,QAAM,EAAE,KAAK,UAAU,IAAI;AAC3B,QAAM,iBAAmC,CAAC;AAE1C,QAAM,oBAAoB,MAAM,gBAAgB,KAAK,SAAS,MAAM;AAEpE,QAAM,cAAc,MAAM,eAAe,KAAK,mBAAmB,KAAK,MAAM;AAC5E,MAAI,CAAC,eAAe,YAAY,WAAW,EAAG;AAE9C,aAAW,QAAQ,YAAY,KAAK,GAAG;AACrC,UAAM,MAAM,MAAM,kBAAkB,KAAK,MAAM,MAAM;AACrD,QAAI,CAAC,IAAK;AAEV,mBAAe,KAAK;AAAA,MAClB,eAAW,wBAAO,KAAK,YAAY,GAAG,YAAY;AAAA,MAClD,eAAW,wBAAO,KAAK,YAAY,GAAG,YAAY;AAAA,MAClD,WAAW,KAAK;AAAA,MAChB,SAAS,sBAAsB,GAAG;AAAA,MAClC,GAAI,qBAAqB;AAAA,QACvB,UAAM;AAAA,UACJ,OAAO,KAAK,iBAAkB,EAAE,aAAa,CAAC;AAAA,UAC9C;AAAA,UACA,oBAAI,KAAK;AAAA,QACX;AAAA,MACF;AAAA,IACF,CAAC;AACD,UAAM,UAAU,WAAW,gBAAgB,MAAM;AAAA,EACnD;AACF;;;AIvCO,IAAM,wBAAwB,CACnC,eACA,QAAQ,MACL;AACH,MAAI,UAAU;AACd,QAAM,SAAS,KAAK,OAAO,KAAK;AAChC,aAAW,SAAS,eAAe;AACjC,UAAM,QAAQ,MAAM,aAAa,IAAI;AAAA,MACnC;AAAA,MACA,CAAC,QAAQ,MAAM,gBAAgB;AAC7B,YAAI,KAAM,QAAO;AACjB,cAAM,gBACJ,MAAM,oCAAoC,MAAM;AAClD,eAAO,gBACH;AAAA,IAAa,WAAW;AAAA,UACxB;AAAA,MACN;AAAA,IACF;AACA,QAAI,UAAU,GAAG;AACf,iBAAW;AAAA;AAAA,EAAO,IAAI;AAAA,IACxB,OAAO;AACL,iBAAW;AAAA,EAAK,MAAM,KAAK,IAAI;AAAA,IACjC;AACA,QAAI,MAAM,YAAY,MAAM,SAAS,SAAS,GAAG;AAC/C,iBAAW,sBAAsB,MAAM,UAAU,QAAQ,CAAC;AAAA,IAC5D;AAAA,EACF;AACA,SAAO;AACT;;;AC9BA,IAAAE,mBAAe;AACf,uBAAiB;;;ACCV,IAAM,eAAe,CAAC,SAC3B,KAAK,UACF,YAAY,EACZ,QAAQ,eAAe,GAAG,EAC1B,QAAQ,YAAY,EAAE;;;ADGpB,IAAM,YAAY,OACvB,WACA,gBACA,WACG;AACH,QAAM,YAAY,iBAAAC,QAAK,QAAQ,QAAQ,IAAI,GAAG,SAAS;AAEvD,MAAI;AACF,UAAM,iBAAAC,QAAG,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAC7C,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,OAAO,SAAS;AACjC,cAAM,YAAY,aAAa,IAAI;AACnC,cAAM,WAAW,iBAAAD,QAAK,KAAK,WAAW,GAAG,SAAS,KAAK;AACvD,cAAM,cAAc;AAAA,SACnB,KAAK,SAAS;AAAA,QACf,KAAK,IAAI;AAAA;AAAA,EAEf,KAAK,OAAO;AACN,cAAM,gBAAgB,YAAY,KAAK;AACvC,YAAI,MAAM,kBAAkB,UAAU,aAAa,GAAG;AACpD,gBAAM,iBAAAC,QAAG,UAAU,UAAU,eAAe,OAAO;AAAA,QACrD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,SAAS,GAAG;AACV,WAAO,KAAK,8BAA8B,OAAO,CAAC,CAAC,EAAE;AAAA,EACvD;AACF;;;AP9Be,SAAR,kBACL,SACkB;AAClB,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA,SAAS;AAAA,IACT,kBAAkB;AAAA,EACpB,IAAI;AAEJ,SAAO;AAAA,IACL,MAAM;AAAA,IACN,OAAO;AAAA,MACL,sBAAsB,CAAC,EAAE,OAAO,MAAM;AACpC,eAAO,KAAK,0CAAmC,eAAe,KAAK;AAEnE,cAAM,UAAM,cAAAC,SAAO,EAChB,IAAI,MAAM,EACV,QAAQ;AAAA,UACP,gBAAgB;AAAA,UAChB,eAAe,UAAU,KAAK;AAAA,QAChC,CAAC;AAEH,oBAAY,YAAY;AACtB,cAAI;AACF,kBAAM,QAAQ;AAAA,cACZ,QAAQ;AAAA,gBAAI,CAAC,WACX,gBAAgB,KAAK,SAAS,QAAQ,MAAM;AAAA,cAC9C;AAAA,YACF;AAAA,UACF,SAAS,GAAQ;AACf,mBAAO,MAAM,EAAE,WAAW,OAAO,CAAC,CAAC;AAAA,UACrC;AAAA,QACF,GAAG,eAAe;AAAA,MACpB;AAAA,MACA,qBAAqB,OAAO,EAAE,OAAO,MAAM;AACzC,eAAO,KAAK,yBAAyB;AAAA,MACvC;AAAA,IACF;AAAA,EACF;AACF;","names":["path","fs","import_promises","path","fs","wretch"]}
package/dist/index.d.cts CHANGED
@@ -7,6 +7,7 @@ interface TagTarget {
7
7
  interface LogseqIntegrationOptions {
8
8
  token: string;
9
9
  targets: TagTarget[];
10
+ dateRef: string;
10
11
  apiUrl?: string;
11
12
  pollingInterval?: number;
12
13
  }
package/dist/index.d.ts CHANGED
@@ -7,6 +7,7 @@ interface TagTarget {
7
7
  interface LogseqIntegrationOptions {
8
8
  token: string;
9
9
  targets: TagTarget[];
10
+ dateRef: string;
10
11
  apiUrl?: string;
11
12
  pollingInterval?: number;
12
13
  }
package/dist/index.js CHANGED
@@ -13,7 +13,21 @@ var hasContentChanged = async (path2, newContent) => {
13
13
  };
14
14
 
15
15
  // src/utils/process-tag-group.ts
16
- import { format } from "date-fns";
16
+ import { format, parse } from "date-fns";
17
+
18
+ // src/api/get-date-prop-ident.ts
19
+ var getDatePropPage = async (api, dateRef, logger) => {
20
+ try {
21
+ const datePropPage = await api.post({
22
+ method: "logseq.Editor.getPage",
23
+ args: [dateRef]
24
+ }).json();
25
+ const datePropPageIdent = datePropPage.ident;
26
+ return datePropPageIdent;
27
+ } catch (e) {
28
+ logger.info(`Unable to get page for date reference: ${String(e)}`);
29
+ }
30
+ };
17
31
 
18
32
  // src/api/get-page-blocks-tree.ts
19
33
  var getPageBlocksTree = async (api, page, logger) => {
@@ -28,19 +42,20 @@ var getPageBlocksTree = async (api, page, logger) => {
28
42
  };
29
43
 
30
44
  // src/api/get-raw-response.ts
31
- var getRawResponse = async (api, tag, logger) => {
45
+ var getRawResponse = async (api, datePropPageIdent, tag, logger) => {
32
46
  const query = `
33
- [:find (pull ?p
34
- [:block/name
35
- :block/full-title
36
- :block/created-at
37
- :block/updated-at
38
- :block/title
39
- {:block/_parent [:block/uuid]}])
40
- :where
41
- [?p :block/name]
42
- [?p :block/tags ?t]
43
- [?t :block/name "${tag}"]]`;
47
+ [:find (pull ?p
48
+ [:block/name
49
+ :block/full-title
50
+ :block/created-at
51
+ :block/updated-at
52
+ :block/title
53
+ ${datePropPageIdent && `{${datePropPageIdent} [:block/journal-day]}`}
54
+ {:block/_parent [:block/uuid]}])
55
+ :where
56
+ [?p :block/name]
57
+ [?p :block/tags ?t]
58
+ [?t :block/name "${tag}"]]`;
44
59
  try {
45
60
  return await api.post({
46
61
  method: "logseq.DB.datascriptQuery",
@@ -54,11 +69,12 @@ var getRawResponse = async (api, tag, logger) => {
54
69
  };
55
70
 
56
71
  // src/utils/process-tag-group.ts
57
- var processTagGroup = async (api, target, logger) => {
72
+ var processTagGroup = async (api, dateRef, target, logger) => {
58
73
  const { tag, directory } = target;
59
- const rawResponse = await getRawResponse(api, tag, logger);
60
- if (!rawResponse || rawResponse.length === 0) return;
61
74
  const mappedResponse = [];
75
+ const datePropPageIdent = await getDatePropPage(api, dateRef, logger);
76
+ const rawResponse = await getRawResponse(api, datePropPageIdent, tag, logger);
77
+ if (!rawResponse || rawResponse.length === 0) return;
62
78
  for (const page of rawResponse.flat()) {
63
79
  const pbt = await getPageBlocksTree(api, page, logger);
64
80
  if (!pbt) continue;
@@ -66,10 +82,17 @@ var processTagGroup = async (api, target, logger) => {
66
82
  createdAt: format(page["created-at"], "yyyy-MM-dd"),
67
83
  updatedAt: format(page["updated-at"], "yyyy-MM-dd"),
68
84
  pageTitle: page.title,
69
- content: recursivelyGetContent(pbt)
85
+ content: recursivelyGetContent(pbt),
86
+ ...datePropPageIdent && {
87
+ date: parse(
88
+ String(page[datePropPageIdent]["journal-day"]),
89
+ "yyyyMMdd",
90
+ /* @__PURE__ */ new Date()
91
+ )
92
+ }
70
93
  });
94
+ await writeToMd(directory, mappedResponse, logger);
71
95
  }
72
- await writeToMd(directory, mappedResponse, logger);
73
96
  };
74
97
 
75
98
  // src/utils/recursively-get-content.ts
@@ -120,7 +143,7 @@ var writeToMd = async (directory, mappedResponse, logger) => {
120
143
  const filePath = path.join(targetDir, `${cleanSlug}.md`);
121
144
  const fileContent = `---
122
145
  title: ${page.pageTitle}
123
- date: ${page.createdAt}
146
+ date: ${page.date}
124
147
  ---
125
148
  ${page.content}`;
126
149
  const contentToSave = fileContent.trim();
@@ -139,6 +162,7 @@ function logseqIntegration(options) {
139
162
  const {
140
163
  token,
141
164
  targets,
165
+ dateRef,
142
166
  apiUrl = "http://127.0.0.1:12315/api",
143
167
  pollingInterval = 1e3
144
168
  } = options;
@@ -146,7 +170,7 @@ function logseqIntegration(options) {
146
170
  name: "astro-logseq-publish",
147
171
  hooks: {
148
172
  "astro:server:setup": ({ logger }) => {
149
- logger.info("\u{1F680} Logseq Poller Started (Every 3s)");
173
+ logger.info(`\u{1F680} Logseq Poller Started (Every ${pollingInterval}ms)`);
150
174
  const api = wretch().url(apiUrl).headers({
151
175
  "Content-Type": "application/json",
152
176
  Authorization: `Bearer ${token}`
@@ -154,7 +178,9 @@ function logseqIntegration(options) {
154
178
  setInterval(async () => {
155
179
  try {
156
180
  await Promise.all(
157
- targets.map((target) => processTagGroup(api, target, logger))
181
+ targets.map(
182
+ (target) => processTagGroup(api, dateRef, target, logger)
183
+ )
158
184
  );
159
185
  } catch (e) {
160
186
  logger.error(e.message || String(e));
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/utils/has-content-changed.ts","../src/utils/process-tag-group.ts","../src/api/get-page-blocks-tree.ts","../src/api/get-raw-response.ts","../src/utils/recursively-get-content.ts","../src/utils/write-to-md.ts","../src/utils/get-clean-slug.ts"],"sourcesContent":["import { AstroIntegration } from 'astro'\nimport wretch from 'wretch'\n\nimport { LogseqIntegrationOptions } from './types'\nimport { processTagGroup } from './utils'\n\nexport default function logseqIntegration(\n options: LogseqIntegrationOptions,\n): AstroIntegration {\n const {\n token,\n targets,\n apiUrl = 'http://127.0.0.1:12315/api',\n pollingInterval = 1000,\n } = options\n\n return {\n name: 'astro-logseq-publish',\n hooks: {\n 'astro:server:setup': ({ logger }) => {\n logger.info('🚀 Logseq Poller Started (Every 3s)')\n\n const api = wretch()\n .url(apiUrl)\n .headers({\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${token}`,\n })\n\n setInterval(async () => {\n try {\n await Promise.all(\n targets.map((target) => processTagGroup(api, target, logger)),\n )\n } catch (e: any) {\n logger.error(e.message || String(e))\n }\n }, pollingInterval)\n },\n 'astro:build:setup': async ({ logger }) => {\n logger.info('Building from Logseq...')\n },\n },\n }\n}\n","import fs from \"node:fs/promises\";\n\nexport const hasContentChanged = async (path: string, newContent: string) => {\n try {\n const currentContent = await fs.readFile(path, \"utf-8\");\n return currentContent !== newContent;\n } catch {\n return true;\n }\n};\n","import { format } from 'date-fns'\nimport { Wretch } from 'wretch/types'\n\nimport { getPageBlocksTree, getRawResponse } from '../api'\nimport { MappedResponse, TagTarget } from '../types'\nimport { recursivelyGetContent, writeToMd } from '.'\n\nexport const processTagGroup = async (\n api: Wretch,\n target: TagTarget,\n logger: any,\n) => {\n const { tag, directory } = target\n\n const rawResponse = await getRawResponse(api, tag, logger)\n if (!rawResponse || rawResponse.length === 0) return\n\n const mappedResponse: MappedResponse[] = []\n\n for (const page of rawResponse.flat()) {\n const pbt = await getPageBlocksTree(api, page, logger)\n if (!pbt) continue\n\n mappedResponse.push({\n createdAt: format(page['created-at'], 'yyyy-MM-dd'),\n updatedAt: format(page['updated-at'], 'yyyy-MM-dd'),\n pageTitle: page.title,\n content: recursivelyGetContent(pbt),\n })\n }\n\n await writeToMd(directory, mappedResponse, logger)\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { ContentBlock, LogseqPageResponse } from '../types'\n\nexport const getPageBlocksTree = async (\n api: Wretch,\n page: LogseqPageResponse,\n logger: AstroIntegrationLogger,\n) => {\n try {\n return await api\n .post({\n method: 'logseq.Editor.getPageBlocksTree',\n args: [page.title.toLowerCase()],\n })\n .json<ContentBlock[]>()\n } catch (e) {\n logger.info(`Unable to get page blocks tree: ${String(e)}`)\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { LogseqPageResponse } from '../types'\n\nexport const getRawResponse = async (\n api: Wretch,\n tag: string,\n logger: AstroIntegrationLogger,\n) => {\n const query = `\n [:find (pull ?p\n [:block/name\n :block/full-title\n :block/created-at\n :block/updated-at\n :block/title\n {:block/_parent [:block/uuid]}])\n :where\n [?p :block/name]\n [?p :block/tags ?t]\n [?t :block/name \"${tag}\"]]`\n\n try {\n return (\n (await api\n .post({\n method: 'logseq.DB.datascriptQuery',\n args: [query],\n })\n .json<LogseqPageResponse[][]>()) ?? []\n )\n } catch (e) {\n logger.info(\n `Unable to query Logseq. Check if API server is running. ${String(e)}`,\n )\n }\n}\n","import { ContentBlock } from '../types'\n\nexport const recursivelyGetContent = (\n contentBlocks: ContentBlock[],\n depth = 0,\n) => {\n let content = ''\n const indent = ' '.repeat(depth)\n for (const block of contentBlocks) {\n const text = (block.fullTitle ?? '').replace(\n /(`[^`]+`)|\\[\\[(.*?)\\]\\]/g,\n (_match, code, linkContent) => {\n if (code) return code\n const isCodeDisplay =\n block[':logseq.property.node/display-type'] === 'code'\n return isCodeDisplay\n ? `\\`\\`\\`\\n[[${linkContent}]]\\n\\`\\`\\``\n : linkContent\n },\n )\n if (depth === 0) {\n content += `\\n\\n${text}`\n } else {\n content += `\\n${indent}- ${text}`\n }\n if (block.children && block.children.length > 0) {\n content += recursivelyGetContent(block.children, depth + 1)\n }\n }\n return content\n}\n","import fs from 'node:fs/promises'\nimport path from 'node:path'\n\nimport { AstroIntegrationLogger } from 'astro'\n\nimport { MappedResponse } from '../types'\nimport { hasContentChanged } from '.'\nimport { getCleanSlug } from './get-clean-slug'\n\nexport const writeToMd = async (\n directory: string,\n mappedResponse: MappedResponse[],\n logger: AstroIntegrationLogger,\n) => {\n const targetDir = path.resolve(process.cwd(), directory)\n\n try {\n await fs.mkdir(targetDir, { recursive: true })\n await Promise.all(\n mappedResponse.map(async (page) => {\n const cleanSlug = getCleanSlug(page)\n const filePath = path.join(targetDir, `${cleanSlug}.md`)\n const fileContent = `---\ntitle: ${page.pageTitle}\ndate: ${page.createdAt}\n---\n${page.content}`\n const contentToSave = fileContent.trim()\n if (await hasContentChanged(filePath, contentToSave)) {\n await fs.writeFile(filePath, contentToSave, 'utf-8')\n }\n }),\n )\n } catch (e) {\n logger.info(`Unable to create MD files: ${String(e)}`)\n }\n}\n","import { MappedResponse } from '../types'\n\nexport const getCleanSlug = (page: MappedResponse) =>\n page.pageTitle\n .toLowerCase()\n .replace(/[^a-z0-9]+/g, '-')\n .replace(/(^-|-$)/g, '')\n"],"mappings":";AACA,OAAO,YAAY;;;ACDnB,OAAO,QAAQ;AAER,IAAM,oBAAoB,OAAOA,OAAc,eAAuB;AAC3E,MAAI;AACF,UAAM,iBAAiB,MAAM,GAAG,SAASA,OAAM,OAAO;AACtD,WAAO,mBAAmB;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ACTA,SAAS,cAAc;;;ACKhB,IAAM,oBAAoB,OAC/B,KACA,MACA,WACG;AACH,MAAI;AACF,WAAO,MAAM,IACV,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK,MAAM,YAAY,CAAC;AAAA,IACjC,CAAC,EACA,KAAqB;AAAA,EAC1B,SAAS,GAAG;AACV,WAAO,KAAK,mCAAmC,OAAO,CAAC,CAAC,EAAE;AAAA,EAC5D;AACF;;;ACfO,IAAM,iBAAiB,OAC5B,KACA,KACA,WACG;AACH,QAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAWoB,GAAG;AAErC,MAAI;AACF,WACG,MAAM,IACJ,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK;AAAA,IACd,CAAC,EACA,KAA6B,KAAM,CAAC;AAAA,EAE3C,SAAS,GAAG;AACV,WAAO;AAAA,MACL,2DAA2D,OAAO,CAAC,CAAC;AAAA,IACtE;AAAA,EACF;AACF;;;AF9BO,IAAM,kBAAkB,OAC7B,KACA,QACA,WACG;AACH,QAAM,EAAE,KAAK,UAAU,IAAI;AAE3B,QAAM,cAAc,MAAM,eAAe,KAAK,KAAK,MAAM;AACzD,MAAI,CAAC,eAAe,YAAY,WAAW,EAAG;AAE9C,QAAM,iBAAmC,CAAC;AAE1C,aAAW,QAAQ,YAAY,KAAK,GAAG;AACrC,UAAM,MAAM,MAAM,kBAAkB,KAAK,MAAM,MAAM;AACrD,QAAI,CAAC,IAAK;AAEV,mBAAe,KAAK;AAAA,MAClB,WAAW,OAAO,KAAK,YAAY,GAAG,YAAY;AAAA,MAClD,WAAW,OAAO,KAAK,YAAY,GAAG,YAAY;AAAA,MAClD,WAAW,KAAK;AAAA,MAChB,SAAS,sBAAsB,GAAG;AAAA,IACpC,CAAC;AAAA,EACH;AAEA,QAAM,UAAU,WAAW,gBAAgB,MAAM;AACnD;;;AG9BO,IAAM,wBAAwB,CACnC,eACA,QAAQ,MACL;AACH,MAAI,UAAU;AACd,QAAM,SAAS,KAAK,OAAO,KAAK;AAChC,aAAW,SAAS,eAAe;AACjC,UAAM,QAAQ,MAAM,aAAa,IAAI;AAAA,MACnC;AAAA,MACA,CAAC,QAAQ,MAAM,gBAAgB;AAC7B,YAAI,KAAM,QAAO;AACjB,cAAM,gBACJ,MAAM,oCAAoC,MAAM;AAClD,eAAO,gBACH;AAAA,IAAa,WAAW;AAAA,UACxB;AAAA,MACN;AAAA,IACF;AACA,QAAI,UAAU,GAAG;AACf,iBAAW;AAAA;AAAA,EAAO,IAAI;AAAA,IACxB,OAAO;AACL,iBAAW;AAAA,EAAK,MAAM,KAAK,IAAI;AAAA,IACjC;AACA,QAAI,MAAM,YAAY,MAAM,SAAS,SAAS,GAAG;AAC/C,iBAAW,sBAAsB,MAAM,UAAU,QAAQ,CAAC;AAAA,IAC5D;AAAA,EACF;AACA,SAAO;AACT;;;AC9BA,OAAOC,SAAQ;AACf,OAAO,UAAU;;;ACCV,IAAM,eAAe,CAAC,SAC3B,KAAK,UACF,YAAY,EACZ,QAAQ,eAAe,GAAG,EAC1B,QAAQ,YAAY,EAAE;;;ADGpB,IAAM,YAAY,OACvB,WACA,gBACA,WACG;AACH,QAAM,YAAY,KAAK,QAAQ,QAAQ,IAAI,GAAG,SAAS;AAEvD,MAAI;AACF,UAAMC,IAAG,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAC7C,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,OAAO,SAAS;AACjC,cAAM,YAAY,aAAa,IAAI;AACnC,cAAM,WAAW,KAAK,KAAK,WAAW,GAAG,SAAS,KAAK;AACvD,cAAM,cAAc;AAAA,SACnB,KAAK,SAAS;AAAA,QACf,KAAK,SAAS;AAAA;AAAA,EAEpB,KAAK,OAAO;AACN,cAAM,gBAAgB,YAAY,KAAK;AACvC,YAAI,MAAM,kBAAkB,UAAU,aAAa,GAAG;AACpD,gBAAMA,IAAG,UAAU,UAAU,eAAe,OAAO;AAAA,QACrD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,SAAS,GAAG;AACV,WAAO,KAAK,8BAA8B,OAAO,CAAC,CAAC,EAAE;AAAA,EACvD;AACF;;;AN9Be,SAAR,kBACL,SACkB;AAClB,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,SAAS;AAAA,IACT,kBAAkB;AAAA,EACpB,IAAI;AAEJ,SAAO;AAAA,IACL,MAAM;AAAA,IACN,OAAO;AAAA,MACL,sBAAsB,CAAC,EAAE,OAAO,MAAM;AACpC,eAAO,KAAK,4CAAqC;AAEjD,cAAM,MAAM,OAAO,EAChB,IAAI,MAAM,EACV,QAAQ;AAAA,UACP,gBAAgB;AAAA,UAChB,eAAe,UAAU,KAAK;AAAA,QAChC,CAAC;AAEH,oBAAY,YAAY;AACtB,cAAI;AACF,kBAAM,QAAQ;AAAA,cACZ,QAAQ,IAAI,CAAC,WAAW,gBAAgB,KAAK,QAAQ,MAAM,CAAC;AAAA,YAC9D;AAAA,UACF,SAAS,GAAQ;AACf,mBAAO,MAAM,EAAE,WAAW,OAAO,CAAC,CAAC;AAAA,UACrC;AAAA,QACF,GAAG,eAAe;AAAA,MACpB;AAAA,MACA,qBAAqB,OAAO,EAAE,OAAO,MAAM;AACzC,eAAO,KAAK,yBAAyB;AAAA,MACvC;AAAA,IACF;AAAA,EACF;AACF;","names":["path","fs","fs"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/utils/has-content-changed.ts","../src/utils/process-tag-group.ts","../src/api/get-date-prop-ident.ts","../src/api/get-page-blocks-tree.ts","../src/api/get-raw-response.ts","../src/utils/recursively-get-content.ts","../src/utils/write-to-md.ts","../src/utils/get-clean-slug.ts"],"sourcesContent":["import { AstroIntegration } from 'astro'\nimport wretch from 'wretch'\n\nimport { LogseqIntegrationOptions } from './types'\nimport { processTagGroup } from './utils'\n\nexport default function logseqIntegration(\n options: LogseqIntegrationOptions,\n): AstroIntegration {\n const {\n token,\n targets,\n dateRef,\n apiUrl = 'http://127.0.0.1:12315/api',\n pollingInterval = 1000,\n } = options\n\n return {\n name: 'astro-logseq-publish',\n hooks: {\n 'astro:server:setup': ({ logger }) => {\n logger.info(`🚀 Logseq Poller Started (Every ${pollingInterval}ms)`)\n\n const api = wretch()\n .url(apiUrl)\n .headers({\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${token}`,\n })\n\n setInterval(async () => {\n try {\n await Promise.all(\n targets.map((target) =>\n processTagGroup(api, dateRef, target, logger),\n ),\n )\n } catch (e: any) {\n logger.error(e.message || String(e))\n }\n }, pollingInterval)\n },\n 'astro:build:setup': async ({ logger }) => {\n logger.info('Building from Logseq...')\n },\n },\n }\n}\n","import fs from \"node:fs/promises\";\n\nexport const hasContentChanged = async (path: string, newContent: string) => {\n try {\n const currentContent = await fs.readFile(path, \"utf-8\");\n return currentContent !== newContent;\n } catch {\n return true;\n }\n};\n","import { AstroIntegrationLogger } from 'astro'\nimport { format, parse } from 'date-fns'\nimport { Wretch } from 'wretch/types'\n\nimport { getDatePropPage, getPageBlocksTree, getRawResponse } from '../api'\nimport { MappedResponse, TagTarget } from '../types'\nimport { recursivelyGetContent, writeToMd } from '.'\n\nexport const processTagGroup = async (\n api: Wretch,\n dateRef: string,\n target: TagTarget,\n logger: AstroIntegrationLogger,\n) => {\n const { tag, directory } = target\n const mappedResponse: MappedResponse[] = []\n\n const datePropPageIdent = await getDatePropPage(api, dateRef, logger)\n\n const rawResponse = await getRawResponse(api, datePropPageIdent, tag, logger)\n if (!rawResponse || rawResponse.length === 0) return\n\n for (const page of rawResponse.flat()) {\n const pbt = await getPageBlocksTree(api, page, logger)\n if (!pbt) continue\n\n mappedResponse.push({\n createdAt: format(page['created-at'], 'yyyy-MM-dd'),\n updatedAt: format(page['updated-at'], 'yyyy-MM-dd'),\n pageTitle: page.title,\n content: recursivelyGetContent(pbt),\n ...(datePropPageIdent && {\n date: parse(\n String(page[datePropPageIdent!]['journal-day']),\n 'yyyyMMdd',\n new Date(),\n ),\n }),\n })\n await writeToMd(directory, mappedResponse, logger)\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { LogseqPageResponse } from 'src/types'\nimport { Wretch } from 'wretch/types'\n\nexport const getDatePropPage = async (\n api: Wretch,\n dateRef: string,\n logger: AstroIntegrationLogger,\n) => {\n try {\n const datePropPage = await api\n .post({\n method: 'logseq.Editor.getPage',\n args: [dateRef],\n })\n .json<LogseqPageResponse>()\n const datePropPageIdent = datePropPage.ident\n return datePropPageIdent\n } catch (e) {\n logger.info(`Unable to get page for date reference: ${String(e)}`)\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { ContentBlock, LogseqPageResponse } from '../types'\n\nexport const getPageBlocksTree = async (\n api: Wretch,\n page: LogseqPageResponse,\n logger: AstroIntegrationLogger,\n) => {\n try {\n return await api\n .post({\n method: 'logseq.Editor.getPageBlocksTree',\n args: [page.title.toLowerCase()],\n })\n .json<ContentBlock[]>()\n } catch (e) {\n logger.info(`Unable to get page blocks tree: ${String(e)}`)\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { LogseqPageResponse } from '../types'\n\nexport const getRawResponse = async (\n api: Wretch,\n datePropPageIdent: string | undefined,\n tag: string,\n logger: AstroIntegrationLogger,\n) => {\n const query = `\n [:find (pull ?p\n [:block/name\n :block/full-title\n :block/created-at\n :block/updated-at\n :block/title\n ${datePropPageIdent && `{${datePropPageIdent} [:block/journal-day]}`}\n {:block/_parent [:block/uuid]}])\n :where\n [?p :block/name]\n [?p :block/tags ?t]\n [?t :block/name \"${tag}\"]]`\n\n try {\n return (\n (await api\n .post({\n method: 'logseq.DB.datascriptQuery',\n args: [query],\n })\n .json<LogseqPageResponse[][]>()) ?? []\n )\n } catch (e) {\n logger.info(\n `Unable to query Logseq. Check if API server is running. ${String(e)}`,\n )\n }\n}\n","import { ContentBlock } from '../types'\n\nexport const recursivelyGetContent = (\n contentBlocks: ContentBlock[],\n depth = 0,\n) => {\n let content = ''\n const indent = ' '.repeat(depth)\n for (const block of contentBlocks) {\n const text = (block.fullTitle ?? '').replace(\n /(`[^`]+`)|\\[\\[(.*?)\\]\\]/g,\n (_match, code, linkContent) => {\n if (code) return code\n const isCodeDisplay =\n block[':logseq.property.node/display-type'] === 'code'\n return isCodeDisplay\n ? `\\`\\`\\`\\n[[${linkContent}]]\\n\\`\\`\\``\n : linkContent\n },\n )\n if (depth === 0) {\n content += `\\n\\n${text}`\n } else {\n content += `\\n${indent}- ${text}`\n }\n if (block.children && block.children.length > 0) {\n content += recursivelyGetContent(block.children, depth + 1)\n }\n }\n return content\n}\n","import fs from 'node:fs/promises'\nimport path from 'node:path'\n\nimport { AstroIntegrationLogger } from 'astro'\n\nimport { MappedResponse } from '../types'\nimport { hasContentChanged } from '.'\nimport { getCleanSlug } from './get-clean-slug'\n\nexport const writeToMd = async (\n directory: string,\n mappedResponse: MappedResponse[],\n logger: AstroIntegrationLogger,\n) => {\n const targetDir = path.resolve(process.cwd(), directory)\n\n try {\n await fs.mkdir(targetDir, { recursive: true })\n await Promise.all(\n mappedResponse.map(async (page) => {\n const cleanSlug = getCleanSlug(page)\n const filePath = path.join(targetDir, `${cleanSlug}.md`)\n const fileContent = `---\ntitle: ${page.pageTitle}\ndate: ${page.date}\n---\n${page.content}`\n const contentToSave = fileContent.trim()\n if (await hasContentChanged(filePath, contentToSave)) {\n await fs.writeFile(filePath, contentToSave, 'utf-8')\n }\n }),\n )\n } catch (e) {\n logger.info(`Unable to create MD files: ${String(e)}`)\n }\n}\n","import { MappedResponse } from '../types'\n\nexport const getCleanSlug = (page: MappedResponse) =>\n page.pageTitle\n .toLowerCase()\n .replace(/[^a-z0-9]+/g, '-')\n .replace(/(^-|-$)/g, '')\n"],"mappings":";AACA,OAAO,YAAY;;;ACDnB,OAAO,QAAQ;AAER,IAAM,oBAAoB,OAAOA,OAAc,eAAuB;AAC3E,MAAI;AACF,UAAM,iBAAiB,MAAM,GAAG,SAASA,OAAM,OAAO;AACtD,WAAO,mBAAmB;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ACRA,SAAS,QAAQ,aAAa;;;ACGvB,IAAM,kBAAkB,OAC7B,KACA,SACA,WACG;AACH,MAAI;AACF,UAAM,eAAe,MAAM,IACxB,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,OAAO;AAAA,IAChB,CAAC,EACA,KAAyB;AAC5B,UAAM,oBAAoB,aAAa;AACvC,WAAO;AAAA,EACT,SAAS,GAAG;AACV,WAAO,KAAK,0CAA0C,OAAO,CAAC,CAAC,EAAE;AAAA,EACnE;AACF;;;AChBO,IAAM,oBAAoB,OAC/B,KACA,MACA,WACG;AACH,MAAI;AACF,WAAO,MAAM,IACV,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK,MAAM,YAAY,CAAC;AAAA,IACjC,CAAC,EACA,KAAqB;AAAA,EAC1B,SAAS,GAAG;AACV,WAAO,KAAK,mCAAmC,OAAO,CAAC,CAAC,EAAE;AAAA,EAC5D;AACF;;;ACfO,IAAM,iBAAiB,OAC5B,KACA,mBACA,KACA,WACG;AACH,QAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAOU,qBAAqB,IAAI,iBAAiB,wBAAwB;AAAA;AAAA;AAAA;AAAA;AAAA,mCAKzD,GAAG;AAEpC,MAAI;AACF,WACG,MAAM,IACJ,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK;AAAA,IACd,CAAC,EACA,KAA6B,KAAM,CAAC;AAAA,EAE3C,SAAS,GAAG;AACV,WAAO;AAAA,MACL,2DAA2D,OAAO,CAAC,CAAC;AAAA,IACtE;AAAA,EACF;AACF;;;AH/BO,IAAM,kBAAkB,OAC7B,KACA,SACA,QACA,WACG;AACH,QAAM,EAAE,KAAK,UAAU,IAAI;AAC3B,QAAM,iBAAmC,CAAC;AAE1C,QAAM,oBAAoB,MAAM,gBAAgB,KAAK,SAAS,MAAM;AAEpE,QAAM,cAAc,MAAM,eAAe,KAAK,mBAAmB,KAAK,MAAM;AAC5E,MAAI,CAAC,eAAe,YAAY,WAAW,EAAG;AAE9C,aAAW,QAAQ,YAAY,KAAK,GAAG;AACrC,UAAM,MAAM,MAAM,kBAAkB,KAAK,MAAM,MAAM;AACrD,QAAI,CAAC,IAAK;AAEV,mBAAe,KAAK;AAAA,MAClB,WAAW,OAAO,KAAK,YAAY,GAAG,YAAY;AAAA,MAClD,WAAW,OAAO,KAAK,YAAY,GAAG,YAAY;AAAA,MAClD,WAAW,KAAK;AAAA,MAChB,SAAS,sBAAsB,GAAG;AAAA,MAClC,GAAI,qBAAqB;AAAA,QACvB,MAAM;AAAA,UACJ,OAAO,KAAK,iBAAkB,EAAE,aAAa,CAAC;AAAA,UAC9C;AAAA,UACA,oBAAI,KAAK;AAAA,QACX;AAAA,MACF;AAAA,IACF,CAAC;AACD,UAAM,UAAU,WAAW,gBAAgB,MAAM;AAAA,EACnD;AACF;;;AIvCO,IAAM,wBAAwB,CACnC,eACA,QAAQ,MACL;AACH,MAAI,UAAU;AACd,QAAM,SAAS,KAAK,OAAO,KAAK;AAChC,aAAW,SAAS,eAAe;AACjC,UAAM,QAAQ,MAAM,aAAa,IAAI;AAAA,MACnC;AAAA,MACA,CAAC,QAAQ,MAAM,gBAAgB;AAC7B,YAAI,KAAM,QAAO;AACjB,cAAM,gBACJ,MAAM,oCAAoC,MAAM;AAClD,eAAO,gBACH;AAAA,IAAa,WAAW;AAAA,UACxB;AAAA,MACN;AAAA,IACF;AACA,QAAI,UAAU,GAAG;AACf,iBAAW;AAAA;AAAA,EAAO,IAAI;AAAA,IACxB,OAAO;AACL,iBAAW;AAAA,EAAK,MAAM,KAAK,IAAI;AAAA,IACjC;AACA,QAAI,MAAM,YAAY,MAAM,SAAS,SAAS,GAAG;AAC/C,iBAAW,sBAAsB,MAAM,UAAU,QAAQ,CAAC;AAAA,IAC5D;AAAA,EACF;AACA,SAAO;AACT;;;AC9BA,OAAOC,SAAQ;AACf,OAAO,UAAU;;;ACCV,IAAM,eAAe,CAAC,SAC3B,KAAK,UACF,YAAY,EACZ,QAAQ,eAAe,GAAG,EAC1B,QAAQ,YAAY,EAAE;;;ADGpB,IAAM,YAAY,OACvB,WACA,gBACA,WACG;AACH,QAAM,YAAY,KAAK,QAAQ,QAAQ,IAAI,GAAG,SAAS;AAEvD,MAAI;AACF,UAAMC,IAAG,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAC7C,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,OAAO,SAAS;AACjC,cAAM,YAAY,aAAa,IAAI;AACnC,cAAM,WAAW,KAAK,KAAK,WAAW,GAAG,SAAS,KAAK;AACvD,cAAM,cAAc;AAAA,SACnB,KAAK,SAAS;AAAA,QACf,KAAK,IAAI;AAAA;AAAA,EAEf,KAAK,OAAO;AACN,cAAM,gBAAgB,YAAY,KAAK;AACvC,YAAI,MAAM,kBAAkB,UAAU,aAAa,GAAG;AACpD,gBAAMA,IAAG,UAAU,UAAU,eAAe,OAAO;AAAA,QACrD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,SAAS,GAAG;AACV,WAAO,KAAK,8BAA8B,OAAO,CAAC,CAAC,EAAE;AAAA,EACvD;AACF;;;AP9Be,SAAR,kBACL,SACkB;AAClB,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA,SAAS;AAAA,IACT,kBAAkB;AAAA,EACpB,IAAI;AAEJ,SAAO;AAAA,IACL,MAAM;AAAA,IACN,OAAO;AAAA,MACL,sBAAsB,CAAC,EAAE,OAAO,MAAM;AACpC,eAAO,KAAK,0CAAmC,eAAe,KAAK;AAEnE,cAAM,MAAM,OAAO,EAChB,IAAI,MAAM,EACV,QAAQ;AAAA,UACP,gBAAgB;AAAA,UAChB,eAAe,UAAU,KAAK;AAAA,QAChC,CAAC;AAEH,oBAAY,YAAY;AACtB,cAAI;AACF,kBAAM,QAAQ;AAAA,cACZ,QAAQ;AAAA,gBAAI,CAAC,WACX,gBAAgB,KAAK,SAAS,QAAQ,MAAM;AAAA,cAC9C;AAAA,YACF;AAAA,UACF,SAAS,GAAQ;AACf,mBAAO,MAAM,EAAE,WAAW,OAAO,CAAC,CAAC;AAAA,UACrC;AAAA,QACF,GAAG,eAAe;AAAA,MACpB;AAAA,MACA,qBAAqB,OAAO,EAAE,OAAO,MAAM;AACzC,eAAO,KAAK,yBAAyB;AAAA,MACvC;AAAA,IACF;AAAA,EACF;AACF;","names":["path","fs","fs"]}
package/package.json CHANGED
@@ -73,5 +73,5 @@
73
73
  "typescript": "^5.5.4",
74
74
  "typescript-eslint": "^8.51.0"
75
75
  },
76
- "version": "1.2.0"
76
+ "version": "1.3.0"
77
77
  }