astroplugin-logseq 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +1 -0
- package/dist/index.cjs +185 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +12 -0
- package/dist/index.d.ts +12 -0
- package/dist/index.js +154 -0
- package/dist/index.js.map +1 -0
- package/package.json +57 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 benjypng
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# astroplugin-logseq
|
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/index.ts
|
|
31
|
+
var index_exports = {};
|
|
32
|
+
__export(index_exports, {
|
|
33
|
+
default: () => logseqIntegration
|
|
34
|
+
});
|
|
35
|
+
module.exports = __toCommonJS(index_exports);
|
|
36
|
+
var import_date_fns = require("date-fns");
|
|
37
|
+
var import_wretch = __toESM(require("wretch"), 1);
|
|
38
|
+
|
|
39
|
+
// src/api/get-page-blocks-tree.ts
|
|
40
|
+
var getPageBlocksTree = async (api, item, logger) => {
|
|
41
|
+
try {
|
|
42
|
+
return await api.post({
|
|
43
|
+
method: "logseq.Editor.getPageBlocksTree",
|
|
44
|
+
args: [item.title.toLowerCase()]
|
|
45
|
+
}).json();
|
|
46
|
+
} catch (e) {
|
|
47
|
+
logger.info(`Unable to get page blocks tree: ${String(e)}`);
|
|
48
|
+
}
|
|
49
|
+
};
|
|
50
|
+
|
|
51
|
+
// src/api/get-raw-response.ts
|
|
52
|
+
var getRawResponse = async (api, logger) => {
|
|
53
|
+
const query = `
|
|
54
|
+
[:find (pull ?p
|
|
55
|
+
[:block/name
|
|
56
|
+
:block/full-title
|
|
57
|
+
:block/created-at
|
|
58
|
+
:block/updated-at
|
|
59
|
+
:block/title
|
|
60
|
+
{:block/_parent ...}])
|
|
61
|
+
:where
|
|
62
|
+
[?p :block/name]
|
|
63
|
+
[?p :block/tags ?t]
|
|
64
|
+
[?t :block/name "public"]]`;
|
|
65
|
+
try {
|
|
66
|
+
return await api.post({
|
|
67
|
+
method: "logseq.DB.datascriptQuery",
|
|
68
|
+
args: [query]
|
|
69
|
+
}).json() ?? [];
|
|
70
|
+
} catch (e) {
|
|
71
|
+
logger.info(
|
|
72
|
+
`Unable to query Logseq. Check if API server is running. ${String(e)}`
|
|
73
|
+
);
|
|
74
|
+
}
|
|
75
|
+
};
|
|
76
|
+
|
|
77
|
+
// src/utils/has-content-changed.ts
|
|
78
|
+
var import_promises = __toESM(require("fs/promises"), 1);
|
|
79
|
+
var hasContentChanged = async (path2, newContent) => {
|
|
80
|
+
try {
|
|
81
|
+
const currentContent = await import_promises.default.readFile(path2, "utf-8");
|
|
82
|
+
return currentContent !== newContent;
|
|
83
|
+
} catch {
|
|
84
|
+
return true;
|
|
85
|
+
}
|
|
86
|
+
};
|
|
87
|
+
|
|
88
|
+
// src/utils/recursively-get-content.ts
|
|
89
|
+
var recursivelyGetContent = (contentBlocks, depth = 0) => {
|
|
90
|
+
let content = "";
|
|
91
|
+
const indent = " ".repeat(depth);
|
|
92
|
+
for (const block of contentBlocks) {
|
|
93
|
+
const text = block.title || "";
|
|
94
|
+
if (depth === 0) {
|
|
95
|
+
content += `
|
|
96
|
+
|
|
97
|
+
${text}`;
|
|
98
|
+
} else {
|
|
99
|
+
content += `
|
|
100
|
+
${indent}- ${text}`;
|
|
101
|
+
}
|
|
102
|
+
if (block.children && block.children.length > 0) {
|
|
103
|
+
content += recursivelyGetContent(block.children, depth + 1);
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
return content;
|
|
107
|
+
};
|
|
108
|
+
|
|
109
|
+
// src/utils/write-to-md.ts
|
|
110
|
+
var import_promises2 = __toESM(require("fs/promises"), 1);
|
|
111
|
+
var import_node_path = __toESM(require("path"), 1);
|
|
112
|
+
|
|
113
|
+
// src/utils/get-clean-slug.ts
|
|
114
|
+
var getCleanSlug = (page) => page.pageTitle.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/(^-|-$)/g, "");
|
|
115
|
+
|
|
116
|
+
// src/utils/write-to-md.ts
|
|
117
|
+
var writeToMd = async (directory, mappedResponse, logger) => {
|
|
118
|
+
const targetDir = import_node_path.default.resolve(process.cwd(), directory);
|
|
119
|
+
try {
|
|
120
|
+
await import_promises2.default.mkdir(targetDir, { recursive: true });
|
|
121
|
+
await Promise.all(
|
|
122
|
+
mappedResponse.map(async (page) => {
|
|
123
|
+
const cleanSlug = getCleanSlug(page);
|
|
124
|
+
const filePath = import_node_path.default.join(targetDir, `${cleanSlug}.md`);
|
|
125
|
+
const fileContent = `---
|
|
126
|
+
title: ${page.pageTitle}
|
|
127
|
+
date: ${page.createdAt}
|
|
128
|
+
---
|
|
129
|
+
${page.content}`;
|
|
130
|
+
const contentToSave = fileContent.trim();
|
|
131
|
+
if (await hasContentChanged(filePath, contentToSave)) {
|
|
132
|
+
await import_promises2.default.writeFile(filePath, contentToSave, "utf-8");
|
|
133
|
+
}
|
|
134
|
+
})
|
|
135
|
+
);
|
|
136
|
+
} catch (e) {
|
|
137
|
+
logger.info(`Unable to create MD files: ${String(e)}`);
|
|
138
|
+
}
|
|
139
|
+
};
|
|
140
|
+
|
|
141
|
+
// src/index.ts
|
|
142
|
+
function logseqIntegration(options) {
|
|
143
|
+
const {
|
|
144
|
+
token,
|
|
145
|
+
apiUrl = "http://127.0.0.1:12315/api",
|
|
146
|
+
pollingInterval = 1e3,
|
|
147
|
+
directory = "src/content/docs/blog"
|
|
148
|
+
} = options;
|
|
149
|
+
return {
|
|
150
|
+
name: "astro-logseq-publish",
|
|
151
|
+
hooks: {
|
|
152
|
+
"astro:server:setup": ({ logger }) => {
|
|
153
|
+
logger.info("\u{1F680} Logseq Poller Started (Every 3s)");
|
|
154
|
+
const api = (0, import_wretch.default)().url(apiUrl).headers({
|
|
155
|
+
"Content-Type": "application/json",
|
|
156
|
+
Authorization: `Bearer ${token}`
|
|
157
|
+
});
|
|
158
|
+
setInterval(async () => {
|
|
159
|
+
try {
|
|
160
|
+
const rawResponse = await getRawResponse(api, logger);
|
|
161
|
+
if (!rawResponse) return;
|
|
162
|
+
const mappedResponse = [];
|
|
163
|
+
for (const item of rawResponse.flat()) {
|
|
164
|
+
const pbt = await getPageBlocksTree(api, item, logger);
|
|
165
|
+
if (!pbt) continue;
|
|
166
|
+
mappedResponse.push({
|
|
167
|
+
createdAt: (0, import_date_fns.format)(item["created-at"], "yyyy-MM-dd"),
|
|
168
|
+
updatedAt: (0, import_date_fns.format)(item["updated-at"], "yyyy-MM-dd"),
|
|
169
|
+
pageTitle: item.title,
|
|
170
|
+
content: recursivelyGetContent(pbt)
|
|
171
|
+
});
|
|
172
|
+
}
|
|
173
|
+
await writeToMd(directory, mappedResponse, logger);
|
|
174
|
+
} catch (e) {
|
|
175
|
+
logger.info(e.message || String(e));
|
|
176
|
+
}
|
|
177
|
+
}, pollingInterval);
|
|
178
|
+
},
|
|
179
|
+
"astro:build:setup": async ({ logger }) => {
|
|
180
|
+
logger.info("Building from Logseq...");
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
};
|
|
184
|
+
}
|
|
185
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/api/get-page-blocks-tree.ts","../src/api/get-raw-response.ts","../src/utils/has-content-changed.ts","../src/utils/recursively-get-content.ts","../src/utils/write-to-md.ts","../src/utils/get-clean-slug.ts"],"sourcesContent":["import { AstroIntegration } from 'astro'\nimport { format } from 'date-fns'\nimport wretch from 'wretch'\n\nimport { getPageBlocksTree, getRawResponse } from './api'\nimport { LogseqIntegrationOptions, MappedResponse } from './types'\nimport { recursivelyGetContent, writeToMd } from './utils'\n\nexport default function logseqIntegration(\n options: LogseqIntegrationOptions,\n): AstroIntegration {\n const {\n token,\n apiUrl = 'http://127.0.0.1:12315/api',\n pollingInterval = 1000,\n directory = 'src/content/docs/blog',\n } = options\n\n return {\n name: 'astro-logseq-publish',\n hooks: {\n 'astro:server:setup': ({ logger }) => {\n logger.info('🚀 Logseq Poller Started (Every 3s)')\n\n const api = wretch()\n .url(apiUrl)\n .headers({\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${token}`,\n })\n\n setInterval(async () => {\n try {\n const rawResponse = await getRawResponse(api, logger)\n if (!rawResponse) return\n\n const mappedResponse: MappedResponse[] = []\n for (const item of rawResponse.flat()) {\n const pbt = await getPageBlocksTree(api, item, logger)\n if (!pbt) continue\n\n mappedResponse.push({\n createdAt: format(item['created-at'], 'yyyy-MM-dd'),\n updatedAt: format(item['updated-at'], 'yyyy-MM-dd'),\n pageTitle: item.title,\n content: recursivelyGetContent(pbt),\n })\n }\n\n await writeToMd(directory, mappedResponse, logger)\n } catch (e: any) {\n logger.info(e.message || String(e))\n }\n }, pollingInterval)\n },\n 'astro:build:setup': async ({ logger }) => {\n logger.info('Building from Logseq...')\n },\n },\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { ContentBlock, LogseqResponse } from '../types'\n\nexport const getPageBlocksTree = async (\n api: Wretch,\n item: LogseqResponse,\n logger: AstroIntegrationLogger,\n) => {\n try {\n return await api\n .post({\n method: 'logseq.Editor.getPageBlocksTree',\n args: [item.title.toLowerCase()],\n })\n .json<ContentBlock[]>()\n } catch (e) {\n logger.info(`Unable to get page blocks tree: ${String(e)}`)\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { LogseqResponse } from '../types'\n\nexport const getRawResponse = async (\n api: Wretch,\n logger: AstroIntegrationLogger,\n) => {\n const query = `\n [:find (pull ?p\n [:block/name\n :block/full-title\n :block/created-at\n :block/updated-at\n :block/title\n {:block/_parent ...}])\n :where\n [?p :block/name]\n [?p :block/tags ?t]\n [?t :block/name \"public\"]]`\n\n try {\n return (\n (await api\n .post({\n method: 'logseq.DB.datascriptQuery',\n args: [query],\n })\n .json<LogseqResponse[][]>()) ?? []\n )\n } catch (e) {\n logger.info(\n `Unable to query Logseq. Check if API server is running. ${String(e)}`,\n )\n }\n}\n","import fs from \"node:fs/promises\";\n\nexport const hasContentChanged = async (path: string, newContent: string) => {\n try {\n const currentContent = await fs.readFile(path, \"utf-8\");\n return currentContent !== newContent;\n } catch {\n return true;\n }\n};\n","import { ContentBlock } from \"../types\";\n\nexport const recursivelyGetContent = (\n contentBlocks: ContentBlock[],\n depth = 0,\n) => {\n let content = \"\";\n const indent = \" \".repeat(depth);\n for (const block of contentBlocks) {\n const text = block.title || \"\";\n if (depth === 0) {\n content += `\\n\\n${text}`;\n } else {\n content += `\\n${indent}- ${text}`;\n }\n if (block.children && block.children.length > 0) {\n content += recursivelyGetContent(block.children, depth + 1);\n }\n }\n return content;\n};\n","import fs from 'node:fs/promises'\nimport path from 'node:path'\n\nimport { AstroIntegrationLogger } from 'astro'\n\nimport { MappedResponse } from '../types'\nimport { hasContentChanged } from '.'\nimport { getCleanSlug } from './get-clean-slug'\n\nexport const writeToMd = async (\n directory: string,\n mappedResponse: MappedResponse[],\n logger: AstroIntegrationLogger,\n) => {\n const targetDir = path.resolve(process.cwd(), directory)\n\n try {\n await fs.mkdir(targetDir, { recursive: true })\n await Promise.all(\n mappedResponse.map(async (page) => {\n const cleanSlug = getCleanSlug(page)\n const filePath = path.join(targetDir, `${cleanSlug}.md`)\n const fileContent = `---\ntitle: ${page.pageTitle}\ndate: ${page.createdAt}\n---\n${page.content}`\n const contentToSave = fileContent.trim()\n if (await hasContentChanged(filePath, contentToSave)) {\n await fs.writeFile(filePath, contentToSave, 'utf-8')\n }\n }),\n )\n } catch (e) {\n logger.info(`Unable to create MD files: ${String(e)}`)\n }\n}\n","import { MappedResponse } from '../types'\n\nexport const getCleanSlug = (page: MappedResponse) =>\n page.pageTitle\n .toLowerCase()\n .replace(/[^a-z0-9]+/g, '-')\n .replace(/(^-|-$)/g, '')\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,sBAAuB;AACvB,oBAAmB;;;ACGZ,IAAM,oBAAoB,OAC/B,KACA,MACA,WACG;AACH,MAAI;AACF,WAAO,MAAM,IACV,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK,MAAM,YAAY,CAAC;AAAA,IACjC,CAAC,EACA,KAAqB;AAAA,EAC1B,SAAS,GAAG;AACV,WAAO,KAAK,mCAAmC,OAAO,CAAC,CAAC,EAAE;AAAA,EAC5D;AACF;;;ACfO,IAAM,iBAAiB,OAC5B,KACA,WACG;AACH,QAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAad,MAAI;AACF,WACG,MAAM,IACJ,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK;AAAA,IACd,CAAC,EACA,KAAyB,KAAM,CAAC;AAAA,EAEvC,SAAS,GAAG;AACV,WAAO;AAAA,MACL,2DAA2D,OAAO,CAAC,CAAC;AAAA,IACtE;AAAA,EACF;AACF;;;ACpCA,sBAAe;AAER,IAAM,oBAAoB,OAAOA,OAAc,eAAuB;AAC3E,MAAI;AACF,UAAM,iBAAiB,MAAM,gBAAAC,QAAG,SAASD,OAAM,OAAO;AACtD,WAAO,mBAAmB;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ACPO,IAAM,wBAAwB,CACnC,eACA,QAAQ,MACL;AACH,MAAI,UAAU;AACd,QAAM,SAAS,KAAK,OAAO,KAAK;AAChC,aAAW,SAAS,eAAe;AACjC,UAAM,OAAO,MAAM,SAAS;AAC5B,QAAI,UAAU,GAAG;AACf,iBAAW;AAAA;AAAA,EAAO,IAAI;AAAA,IACxB,OAAO;AACL,iBAAW;AAAA,EAAK,MAAM,KAAK,IAAI;AAAA,IACjC;AACA,QAAI,MAAM,YAAY,MAAM,SAAS,SAAS,GAAG;AAC/C,iBAAW,sBAAsB,MAAM,UAAU,QAAQ,CAAC;AAAA,IAC5D;AAAA,EACF;AACA,SAAO;AACT;;;ACpBA,IAAAE,mBAAe;AACf,uBAAiB;;;ACCV,IAAM,eAAe,CAAC,SAC3B,KAAK,UACF,YAAY,EACZ,QAAQ,eAAe,GAAG,EAC1B,QAAQ,YAAY,EAAE;;;ADGpB,IAAM,YAAY,OACvB,WACA,gBACA,WACG;AACH,QAAM,YAAY,iBAAAC,QAAK,QAAQ,QAAQ,IAAI,GAAG,SAAS;AAEvD,MAAI;AACF,UAAM,iBAAAC,QAAG,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAC7C,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,OAAO,SAAS;AACjC,cAAM,YAAY,aAAa,IAAI;AACnC,cAAM,WAAW,iBAAAD,QAAK,KAAK,WAAW,GAAG,SAAS,KAAK;AACvD,cAAM,cAAc;AAAA,SACnB,KAAK,SAAS;AAAA,QACf,KAAK,SAAS;AAAA;AAAA,EAEpB,KAAK,OAAO;AACN,cAAM,gBAAgB,YAAY,KAAK;AACvC,YAAI,MAAM,kBAAkB,UAAU,aAAa,GAAG;AACpD,gBAAM,iBAAAC,QAAG,UAAU,UAAU,eAAe,OAAO;AAAA,QACrD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,SAAS,GAAG;AACV,WAAO,KAAK,8BAA8B,OAAO,CAAC,CAAC,EAAE;AAAA,EACvD;AACF;;;AL5Be,SAAR,kBACL,SACkB;AAClB,QAAM;AAAA,IACJ;AAAA,IACA,SAAS;AAAA,IACT,kBAAkB;AAAA,IAClB,YAAY;AAAA,EACd,IAAI;AAEJ,SAAO;AAAA,IACL,MAAM;AAAA,IACN,OAAO;AAAA,MACL,sBAAsB,CAAC,EAAE,OAAO,MAAM;AACpC,eAAO,KAAK,4CAAqC;AAEjD,cAAM,UAAM,cAAAC,SAAO,EAChB,IAAI,MAAM,EACV,QAAQ;AAAA,UACP,gBAAgB;AAAA,UAChB,eAAe,UAAU,KAAK;AAAA,QAChC,CAAC;AAEH,oBAAY,YAAY;AACtB,cAAI;AACF,kBAAM,cAAc,MAAM,eAAe,KAAK,MAAM;AACpD,gBAAI,CAAC,YAAa;AAElB,kBAAM,iBAAmC,CAAC;AAC1C,uBAAW,QAAQ,YAAY,KAAK,GAAG;AACrC,oBAAM,MAAM,MAAM,kBAAkB,KAAK,MAAM,MAAM;AACrD,kBAAI,CAAC,IAAK;AAEV,6BAAe,KAAK;AAAA,gBAClB,eAAW,wBAAO,KAAK,YAAY,GAAG,YAAY;AAAA,gBAClD,eAAW,wBAAO,KAAK,YAAY,GAAG,YAAY;AAAA,gBAClD,WAAW,KAAK;AAAA,gBAChB,SAAS,sBAAsB,GAAG;AAAA,cACpC,CAAC;AAAA,YACH;AAEA,kBAAM,UAAU,WAAW,gBAAgB,MAAM;AAAA,UACnD,SAAS,GAAQ;AACf,mBAAO,KAAK,EAAE,WAAW,OAAO,CAAC,CAAC;AAAA,UACpC;AAAA,QACF,GAAG,eAAe;AAAA,MACpB;AAAA,MACA,qBAAqB,OAAO,EAAE,OAAO,MAAM;AACzC,eAAO,KAAK,yBAAyB;AAAA,MACvC;AAAA,IACF;AAAA,EACF;AACF;","names":["path","fs","import_promises","path","fs","wretch"]}
|
package/dist/index.d.cts
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { AstroIntegration } from 'astro';
|
|
2
|
+
|
|
3
|
+
interface LogseqIntegrationOptions {
|
|
4
|
+
token: string;
|
|
5
|
+
apiUrl?: string;
|
|
6
|
+
pollingInterval?: number;
|
|
7
|
+
directory?: string;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
declare function logseqIntegration(options: LogseqIntegrationOptions): AstroIntegration;
|
|
11
|
+
|
|
12
|
+
export { logseqIntegration as default };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { AstroIntegration } from 'astro';
|
|
2
|
+
|
|
3
|
+
interface LogseqIntegrationOptions {
|
|
4
|
+
token: string;
|
|
5
|
+
apiUrl?: string;
|
|
6
|
+
pollingInterval?: number;
|
|
7
|
+
directory?: string;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
declare function logseqIntegration(options: LogseqIntegrationOptions): AstroIntegration;
|
|
11
|
+
|
|
12
|
+
export { logseqIntegration as default };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
// src/index.ts
|
|
2
|
+
import { format } from "date-fns";
|
|
3
|
+
import wretch from "wretch";
|
|
4
|
+
|
|
5
|
+
// src/api/get-page-blocks-tree.ts
|
|
6
|
+
var getPageBlocksTree = async (api, item, logger) => {
|
|
7
|
+
try {
|
|
8
|
+
return await api.post({
|
|
9
|
+
method: "logseq.Editor.getPageBlocksTree",
|
|
10
|
+
args: [item.title.toLowerCase()]
|
|
11
|
+
}).json();
|
|
12
|
+
} catch (e) {
|
|
13
|
+
logger.info(`Unable to get page blocks tree: ${String(e)}`);
|
|
14
|
+
}
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
// src/api/get-raw-response.ts
|
|
18
|
+
var getRawResponse = async (api, logger) => {
|
|
19
|
+
const query = `
|
|
20
|
+
[:find (pull ?p
|
|
21
|
+
[:block/name
|
|
22
|
+
:block/full-title
|
|
23
|
+
:block/created-at
|
|
24
|
+
:block/updated-at
|
|
25
|
+
:block/title
|
|
26
|
+
{:block/_parent ...}])
|
|
27
|
+
:where
|
|
28
|
+
[?p :block/name]
|
|
29
|
+
[?p :block/tags ?t]
|
|
30
|
+
[?t :block/name "public"]]`;
|
|
31
|
+
try {
|
|
32
|
+
return await api.post({
|
|
33
|
+
method: "logseq.DB.datascriptQuery",
|
|
34
|
+
args: [query]
|
|
35
|
+
}).json() ?? [];
|
|
36
|
+
} catch (e) {
|
|
37
|
+
logger.info(
|
|
38
|
+
`Unable to query Logseq. Check if API server is running. ${String(e)}`
|
|
39
|
+
);
|
|
40
|
+
}
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
// src/utils/has-content-changed.ts
|
|
44
|
+
import fs from "fs/promises";
|
|
45
|
+
var hasContentChanged = async (path2, newContent) => {
|
|
46
|
+
try {
|
|
47
|
+
const currentContent = await fs.readFile(path2, "utf-8");
|
|
48
|
+
return currentContent !== newContent;
|
|
49
|
+
} catch {
|
|
50
|
+
return true;
|
|
51
|
+
}
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
// src/utils/recursively-get-content.ts
|
|
55
|
+
var recursivelyGetContent = (contentBlocks, depth = 0) => {
|
|
56
|
+
let content = "";
|
|
57
|
+
const indent = " ".repeat(depth);
|
|
58
|
+
for (const block of contentBlocks) {
|
|
59
|
+
const text = block.title || "";
|
|
60
|
+
if (depth === 0) {
|
|
61
|
+
content += `
|
|
62
|
+
|
|
63
|
+
${text}`;
|
|
64
|
+
} else {
|
|
65
|
+
content += `
|
|
66
|
+
${indent}- ${text}`;
|
|
67
|
+
}
|
|
68
|
+
if (block.children && block.children.length > 0) {
|
|
69
|
+
content += recursivelyGetContent(block.children, depth + 1);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
return content;
|
|
73
|
+
};
|
|
74
|
+
|
|
75
|
+
// src/utils/write-to-md.ts
|
|
76
|
+
import fs2 from "fs/promises";
|
|
77
|
+
import path from "path";
|
|
78
|
+
|
|
79
|
+
// src/utils/get-clean-slug.ts
|
|
80
|
+
var getCleanSlug = (page) => page.pageTitle.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/(^-|-$)/g, "");
|
|
81
|
+
|
|
82
|
+
// src/utils/write-to-md.ts
|
|
83
|
+
var writeToMd = async (directory, mappedResponse, logger) => {
|
|
84
|
+
const targetDir = path.resolve(process.cwd(), directory);
|
|
85
|
+
try {
|
|
86
|
+
await fs2.mkdir(targetDir, { recursive: true });
|
|
87
|
+
await Promise.all(
|
|
88
|
+
mappedResponse.map(async (page) => {
|
|
89
|
+
const cleanSlug = getCleanSlug(page);
|
|
90
|
+
const filePath = path.join(targetDir, `${cleanSlug}.md`);
|
|
91
|
+
const fileContent = `---
|
|
92
|
+
title: ${page.pageTitle}
|
|
93
|
+
date: ${page.createdAt}
|
|
94
|
+
---
|
|
95
|
+
${page.content}`;
|
|
96
|
+
const contentToSave = fileContent.trim();
|
|
97
|
+
if (await hasContentChanged(filePath, contentToSave)) {
|
|
98
|
+
await fs2.writeFile(filePath, contentToSave, "utf-8");
|
|
99
|
+
}
|
|
100
|
+
})
|
|
101
|
+
);
|
|
102
|
+
} catch (e) {
|
|
103
|
+
logger.info(`Unable to create MD files: ${String(e)}`);
|
|
104
|
+
}
|
|
105
|
+
};
|
|
106
|
+
|
|
107
|
+
// src/index.ts
|
|
108
|
+
function logseqIntegration(options) {
|
|
109
|
+
const {
|
|
110
|
+
token,
|
|
111
|
+
apiUrl = "http://127.0.0.1:12315/api",
|
|
112
|
+
pollingInterval = 1e3,
|
|
113
|
+
directory = "src/content/docs/blog"
|
|
114
|
+
} = options;
|
|
115
|
+
return {
|
|
116
|
+
name: "astro-logseq-publish",
|
|
117
|
+
hooks: {
|
|
118
|
+
"astro:server:setup": ({ logger }) => {
|
|
119
|
+
logger.info("\u{1F680} Logseq Poller Started (Every 3s)");
|
|
120
|
+
const api = wretch().url(apiUrl).headers({
|
|
121
|
+
"Content-Type": "application/json",
|
|
122
|
+
Authorization: `Bearer ${token}`
|
|
123
|
+
});
|
|
124
|
+
setInterval(async () => {
|
|
125
|
+
try {
|
|
126
|
+
const rawResponse = await getRawResponse(api, logger);
|
|
127
|
+
if (!rawResponse) return;
|
|
128
|
+
const mappedResponse = [];
|
|
129
|
+
for (const item of rawResponse.flat()) {
|
|
130
|
+
const pbt = await getPageBlocksTree(api, item, logger);
|
|
131
|
+
if (!pbt) continue;
|
|
132
|
+
mappedResponse.push({
|
|
133
|
+
createdAt: format(item["created-at"], "yyyy-MM-dd"),
|
|
134
|
+
updatedAt: format(item["updated-at"], "yyyy-MM-dd"),
|
|
135
|
+
pageTitle: item.title,
|
|
136
|
+
content: recursivelyGetContent(pbt)
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
await writeToMd(directory, mappedResponse, logger);
|
|
140
|
+
} catch (e) {
|
|
141
|
+
logger.info(e.message || String(e));
|
|
142
|
+
}
|
|
143
|
+
}, pollingInterval);
|
|
144
|
+
},
|
|
145
|
+
"astro:build:setup": async ({ logger }) => {
|
|
146
|
+
logger.info("Building from Logseq...");
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
export {
|
|
152
|
+
logseqIntegration as default
|
|
153
|
+
};
|
|
154
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/api/get-page-blocks-tree.ts","../src/api/get-raw-response.ts","../src/utils/has-content-changed.ts","../src/utils/recursively-get-content.ts","../src/utils/write-to-md.ts","../src/utils/get-clean-slug.ts"],"sourcesContent":["import { AstroIntegration } from 'astro'\nimport { format } from 'date-fns'\nimport wretch from 'wretch'\n\nimport { getPageBlocksTree, getRawResponse } from './api'\nimport { LogseqIntegrationOptions, MappedResponse } from './types'\nimport { recursivelyGetContent, writeToMd } from './utils'\n\nexport default function logseqIntegration(\n options: LogseqIntegrationOptions,\n): AstroIntegration {\n const {\n token,\n apiUrl = 'http://127.0.0.1:12315/api',\n pollingInterval = 1000,\n directory = 'src/content/docs/blog',\n } = options\n\n return {\n name: 'astro-logseq-publish',\n hooks: {\n 'astro:server:setup': ({ logger }) => {\n logger.info('🚀 Logseq Poller Started (Every 3s)')\n\n const api = wretch()\n .url(apiUrl)\n .headers({\n 'Content-Type': 'application/json',\n Authorization: `Bearer ${token}`,\n })\n\n setInterval(async () => {\n try {\n const rawResponse = await getRawResponse(api, logger)\n if (!rawResponse) return\n\n const mappedResponse: MappedResponse[] = []\n for (const item of rawResponse.flat()) {\n const pbt = await getPageBlocksTree(api, item, logger)\n if (!pbt) continue\n\n mappedResponse.push({\n createdAt: format(item['created-at'], 'yyyy-MM-dd'),\n updatedAt: format(item['updated-at'], 'yyyy-MM-dd'),\n pageTitle: item.title,\n content: recursivelyGetContent(pbt),\n })\n }\n\n await writeToMd(directory, mappedResponse, logger)\n } catch (e: any) {\n logger.info(e.message || String(e))\n }\n }, pollingInterval)\n },\n 'astro:build:setup': async ({ logger }) => {\n logger.info('Building from Logseq...')\n },\n },\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { ContentBlock, LogseqResponse } from '../types'\n\nexport const getPageBlocksTree = async (\n api: Wretch,\n item: LogseqResponse,\n logger: AstroIntegrationLogger,\n) => {\n try {\n return await api\n .post({\n method: 'logseq.Editor.getPageBlocksTree',\n args: [item.title.toLowerCase()],\n })\n .json<ContentBlock[]>()\n } catch (e) {\n logger.info(`Unable to get page blocks tree: ${String(e)}`)\n }\n}\n","import { AstroIntegrationLogger } from 'astro'\nimport { Wretch } from 'wretch/types'\n\nimport { LogseqResponse } from '../types'\n\nexport const getRawResponse = async (\n api: Wretch,\n logger: AstroIntegrationLogger,\n) => {\n const query = `\n [:find (pull ?p\n [:block/name\n :block/full-title\n :block/created-at\n :block/updated-at\n :block/title\n {:block/_parent ...}])\n :where\n [?p :block/name]\n [?p :block/tags ?t]\n [?t :block/name \"public\"]]`\n\n try {\n return (\n (await api\n .post({\n method: 'logseq.DB.datascriptQuery',\n args: [query],\n })\n .json<LogseqResponse[][]>()) ?? []\n )\n } catch (e) {\n logger.info(\n `Unable to query Logseq. Check if API server is running. ${String(e)}`,\n )\n }\n}\n","import fs from \"node:fs/promises\";\n\nexport const hasContentChanged = async (path: string, newContent: string) => {\n try {\n const currentContent = await fs.readFile(path, \"utf-8\");\n return currentContent !== newContent;\n } catch {\n return true;\n }\n};\n","import { ContentBlock } from \"../types\";\n\nexport const recursivelyGetContent = (\n contentBlocks: ContentBlock[],\n depth = 0,\n) => {\n let content = \"\";\n const indent = \" \".repeat(depth);\n for (const block of contentBlocks) {\n const text = block.title || \"\";\n if (depth === 0) {\n content += `\\n\\n${text}`;\n } else {\n content += `\\n${indent}- ${text}`;\n }\n if (block.children && block.children.length > 0) {\n content += recursivelyGetContent(block.children, depth + 1);\n }\n }\n return content;\n};\n","import fs from 'node:fs/promises'\nimport path from 'node:path'\n\nimport { AstroIntegrationLogger } from 'astro'\n\nimport { MappedResponse } from '../types'\nimport { hasContentChanged } from '.'\nimport { getCleanSlug } from './get-clean-slug'\n\nexport const writeToMd = async (\n directory: string,\n mappedResponse: MappedResponse[],\n logger: AstroIntegrationLogger,\n) => {\n const targetDir = path.resolve(process.cwd(), directory)\n\n try {\n await fs.mkdir(targetDir, { recursive: true })\n await Promise.all(\n mappedResponse.map(async (page) => {\n const cleanSlug = getCleanSlug(page)\n const filePath = path.join(targetDir, `${cleanSlug}.md`)\n const fileContent = `---\ntitle: ${page.pageTitle}\ndate: ${page.createdAt}\n---\n${page.content}`\n const contentToSave = fileContent.trim()\n if (await hasContentChanged(filePath, contentToSave)) {\n await fs.writeFile(filePath, contentToSave, 'utf-8')\n }\n }),\n )\n } catch (e) {\n logger.info(`Unable to create MD files: ${String(e)}`)\n }\n}\n","import { MappedResponse } from '../types'\n\nexport const getCleanSlug = (page: MappedResponse) =>\n page.pageTitle\n .toLowerCase()\n .replace(/[^a-z0-9]+/g, '-')\n .replace(/(^-|-$)/g, '')\n"],"mappings":";AACA,SAAS,cAAc;AACvB,OAAO,YAAY;;;ACGZ,IAAM,oBAAoB,OAC/B,KACA,MACA,WACG;AACH,MAAI;AACF,WAAO,MAAM,IACV,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK,MAAM,YAAY,CAAC;AAAA,IACjC,CAAC,EACA,KAAqB;AAAA,EAC1B,SAAS,GAAG;AACV,WAAO,KAAK,mCAAmC,OAAO,CAAC,CAAC,EAAE;AAAA,EAC5D;AACF;;;ACfO,IAAM,iBAAiB,OAC5B,KACA,WACG;AACH,QAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAad,MAAI;AACF,WACG,MAAM,IACJ,KAAK;AAAA,MACJ,QAAQ;AAAA,MACR,MAAM,CAAC,KAAK;AAAA,IACd,CAAC,EACA,KAAyB,KAAM,CAAC;AAAA,EAEvC,SAAS,GAAG;AACV,WAAO;AAAA,MACL,2DAA2D,OAAO,CAAC,CAAC;AAAA,IACtE;AAAA,EACF;AACF;;;ACpCA,OAAO,QAAQ;AAER,IAAM,oBAAoB,OAAOA,OAAc,eAAuB;AAC3E,MAAI;AACF,UAAM,iBAAiB,MAAM,GAAG,SAASA,OAAM,OAAO;AACtD,WAAO,mBAAmB;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ACPO,IAAM,wBAAwB,CACnC,eACA,QAAQ,MACL;AACH,MAAI,UAAU;AACd,QAAM,SAAS,KAAK,OAAO,KAAK;AAChC,aAAW,SAAS,eAAe;AACjC,UAAM,OAAO,MAAM,SAAS;AAC5B,QAAI,UAAU,GAAG;AACf,iBAAW;AAAA;AAAA,EAAO,IAAI;AAAA,IACxB,OAAO;AACL,iBAAW;AAAA,EAAK,MAAM,KAAK,IAAI;AAAA,IACjC;AACA,QAAI,MAAM,YAAY,MAAM,SAAS,SAAS,GAAG;AAC/C,iBAAW,sBAAsB,MAAM,UAAU,QAAQ,CAAC;AAAA,IAC5D;AAAA,EACF;AACA,SAAO;AACT;;;ACpBA,OAAOC,SAAQ;AACf,OAAO,UAAU;;;ACCV,IAAM,eAAe,CAAC,SAC3B,KAAK,UACF,YAAY,EACZ,QAAQ,eAAe,GAAG,EAC1B,QAAQ,YAAY,EAAE;;;ADGpB,IAAM,YAAY,OACvB,WACA,gBACA,WACG;AACH,QAAM,YAAY,KAAK,QAAQ,QAAQ,IAAI,GAAG,SAAS;AAEvD,MAAI;AACF,UAAMC,IAAG,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAC7C,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,OAAO,SAAS;AACjC,cAAM,YAAY,aAAa,IAAI;AACnC,cAAM,WAAW,KAAK,KAAK,WAAW,GAAG,SAAS,KAAK;AACvD,cAAM,cAAc;AAAA,SACnB,KAAK,SAAS;AAAA,QACf,KAAK,SAAS;AAAA;AAAA,EAEpB,KAAK,OAAO;AACN,cAAM,gBAAgB,YAAY,KAAK;AACvC,YAAI,MAAM,kBAAkB,UAAU,aAAa,GAAG;AACpD,gBAAMA,IAAG,UAAU,UAAU,eAAe,OAAO;AAAA,QACrD;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,SAAS,GAAG;AACV,WAAO,KAAK,8BAA8B,OAAO,CAAC,CAAC,EAAE;AAAA,EACvD;AACF;;;AL5Be,SAAR,kBACL,SACkB;AAClB,QAAM;AAAA,IACJ;AAAA,IACA,SAAS;AAAA,IACT,kBAAkB;AAAA,IAClB,YAAY;AAAA,EACd,IAAI;AAEJ,SAAO;AAAA,IACL,MAAM;AAAA,IACN,OAAO;AAAA,MACL,sBAAsB,CAAC,EAAE,OAAO,MAAM;AACpC,eAAO,KAAK,4CAAqC;AAEjD,cAAM,MAAM,OAAO,EAChB,IAAI,MAAM,EACV,QAAQ;AAAA,UACP,gBAAgB;AAAA,UAChB,eAAe,UAAU,KAAK;AAAA,QAChC,CAAC;AAEH,oBAAY,YAAY;AACtB,cAAI;AACF,kBAAM,cAAc,MAAM,eAAe,KAAK,MAAM;AACpD,gBAAI,CAAC,YAAa;AAElB,kBAAM,iBAAmC,CAAC;AAC1C,uBAAW,QAAQ,YAAY,KAAK,GAAG;AACrC,oBAAM,MAAM,MAAM,kBAAkB,KAAK,MAAM,MAAM;AACrD,kBAAI,CAAC,IAAK;AAEV,6BAAe,KAAK;AAAA,gBAClB,WAAW,OAAO,KAAK,YAAY,GAAG,YAAY;AAAA,gBAClD,WAAW,OAAO,KAAK,YAAY,GAAG,YAAY;AAAA,gBAClD,WAAW,KAAK;AAAA,gBAChB,SAAS,sBAAsB,GAAG;AAAA,cACpC,CAAC;AAAA,YACH;AAEA,kBAAM,UAAU,WAAW,gBAAgB,MAAM;AAAA,UACnD,SAAS,GAAQ;AACf,mBAAO,KAAK,EAAE,WAAW,OAAO,CAAC,CAAC;AAAA,UACpC;AAAA,QACF,GAAG,eAAe;AAAA,MACpB;AAAA,MACA,qBAAqB,OAAO,EAAE,OAAO,MAAM;AACzC,eAAO,KAAK,yBAAyB;AAAA,MACvC;AAAA,IACF;AAAA,EACF;AACF;","names":["path","fs","fs"]}
|
package/package.json
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "astroplugin-logseq",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"author": "benjypng",
|
|
5
|
+
"description": "Astro integration to sync Logseq pages as content collections",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"type": "module",
|
|
8
|
+
"main": "./dist/index.cjs",
|
|
9
|
+
"module": "./dist/index.js",
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"exports": {
|
|
12
|
+
".": {
|
|
13
|
+
"types": "./dist/index.d.ts",
|
|
14
|
+
"import": "./dist/index.js",
|
|
15
|
+
"require": "./dist/index.cjs"
|
|
16
|
+
}
|
|
17
|
+
},
|
|
18
|
+
"files": [
|
|
19
|
+
"dist",
|
|
20
|
+
"README.md"
|
|
21
|
+
],
|
|
22
|
+
"keywords": [
|
|
23
|
+
"astro",
|
|
24
|
+
"astro-integration",
|
|
25
|
+
"logseq",
|
|
26
|
+
"cms"
|
|
27
|
+
],
|
|
28
|
+
"scripts": {
|
|
29
|
+
"build": "tsup",
|
|
30
|
+
"dev": "tsup --watch",
|
|
31
|
+
"prepublishOnly": "npm run build"
|
|
32
|
+
},
|
|
33
|
+
"peerDependencies": {
|
|
34
|
+
"astro": "^5.0.0"
|
|
35
|
+
},
|
|
36
|
+
"dependencies": {
|
|
37
|
+
"date-fns": "^4.1.0",
|
|
38
|
+
"wretch": "^3.0.6"
|
|
39
|
+
},
|
|
40
|
+
"devDependencies": {
|
|
41
|
+
"tsup": "^8.5.1",
|
|
42
|
+
"astro": "^5.16.6",
|
|
43
|
+
"@types/node": "^25.0.3",
|
|
44
|
+
"typescript": "^5.5.4",
|
|
45
|
+
"@eslint/js": "^9.8.0",
|
|
46
|
+
"@types/eslint": "^9.6.1",
|
|
47
|
+
"@types/eslint-config-prettier": "^6.11.3",
|
|
48
|
+
"@typescript-eslint/eslint-plugin": "^8.51.0",
|
|
49
|
+
"@typescript-eslint/parser": "^8.51.0",
|
|
50
|
+
"eslint": "^9.8.0",
|
|
51
|
+
"eslint-config-prettier": "^10.1.8",
|
|
52
|
+
"eslint-plugin-prettier": "^5.2.1",
|
|
53
|
+
"eslint-plugin-simple-import-sort": "^12.1.1",
|
|
54
|
+
"prettier": "^3.7.4",
|
|
55
|
+
"typescript-eslint": "^8.51.0"
|
|
56
|
+
}
|
|
57
|
+
}
|