@truto/truto-jsonata 1.0.13 → 1.0.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/main.cjs +41 -16
- package/dist/main.cjs.map +1 -1
- package/dist/module.js +42 -17
- package/dist/module.js.map +1 -1
- package/package.json +1 -1
package/dist/main.cjs
CHANGED
|
@@ -458,6 +458,7 @@ var $90c566adb85cb52e$export$2e2bcd8739ae039 = $90c566adb85cb52e$var$convertMark
|
|
|
458
458
|
|
|
459
459
|
|
|
460
460
|
|
|
461
|
+
|
|
461
462
|
const $bc8e7b2fdb012b3a$var$parseMarkedTokenToNotionRequest = (tokens, acc = [])=>{
|
|
462
463
|
return (0, $dxT2C$lodashes.reduce)(tokens, (acc, token)=>{
|
|
463
464
|
const childTokens = token.type === "blockquote" ? (0, $dxT2C$lodashes.get)(token, "tokens[0].tokens") : (0, $dxT2C$lodashes.get)(token, "tokens");
|
|
@@ -519,17 +520,25 @@ const $bc8e7b2fdb012b3a$var$parseMarkedTokenToNotionRequest = (tokens, acc = [])
|
|
|
519
520
|
return acc;
|
|
520
521
|
}
|
|
521
522
|
if (token.type === "code") {
|
|
523
|
+
const textSplitByNewLine = token.text.split("\n");
|
|
524
|
+
const chunksOfChunks = (0, $dxT2C$lodashes.compact)((0, $dxT2C$lodashes.map)(textSplitByNewLine, (chunk)=>$bc8e7b2fdb012b3a$var$chunkText(chunk)));
|
|
525
|
+
const chunksWithNewLines = (0, $dxT2C$lodashes.flattenDeep)((0, $233848446ca33fe6$export$2e2bcd8739ae039)((0, $dxT2C$lodashes.map)(chunksOfChunks, (chunk)=>{
|
|
526
|
+
return (0, $dxT2C$lodashes.map)(chunk, (_chunk)=>({
|
|
527
|
+
type: "text",
|
|
528
|
+
text: {
|
|
529
|
+
content: _chunk
|
|
530
|
+
}
|
|
531
|
+
}));
|
|
532
|
+
}), {
|
|
533
|
+
type: "text",
|
|
534
|
+
text: {
|
|
535
|
+
content: "\n\n"
|
|
536
|
+
}
|
|
537
|
+
}));
|
|
522
538
|
acc.push({
|
|
523
539
|
type: "code",
|
|
524
540
|
code: {
|
|
525
|
-
rich_text:
|
|
526
|
-
{
|
|
527
|
-
type: "text",
|
|
528
|
-
text: {
|
|
529
|
-
content: token.text
|
|
530
|
-
}
|
|
531
|
-
}
|
|
532
|
-
],
|
|
541
|
+
rich_text: chunksWithNewLines,
|
|
533
542
|
language: token.lang || "plain text"
|
|
534
543
|
}
|
|
535
544
|
});
|
|
@@ -632,20 +641,36 @@ const $bc8e7b2fdb012b3a$var$parseMarkedTokenToNotionRequest = (tokens, acc = [])
|
|
|
632
641
|
const text = (0, $dxT2C$lodashes.get)(token, "text");
|
|
633
642
|
if (!text) return acc;
|
|
634
643
|
const textToInsert = (0, $5a7cb266718aeaae$export$2e2bcd8739ae039)(text);
|
|
635
|
-
|
|
644
|
+
// chunk the text into 2000 character chunks, should handle emojis and multi-byte characters
|
|
645
|
+
const textSplitByNewLine = textToInsert.split("\n");
|
|
646
|
+
const chunksOfChunks = (0, $dxT2C$lodashes.compact)((0, $dxT2C$lodashes.map)(textSplitByNewLine, (chunk)=>$bc8e7b2fdb012b3a$var$chunkText(chunk)));
|
|
647
|
+
const chunksWithNewLines = (0, $dxT2C$lodashes.flattenDeep)((0, $233848446ca33fe6$export$2e2bcd8739ae039)((0, $dxT2C$lodashes.map)(chunksOfChunks, (chunk)=>{
|
|
648
|
+
return (0, $dxT2C$lodashes.map)(chunk, (_chunk)=>({
|
|
649
|
+
type: "text",
|
|
650
|
+
text: {
|
|
651
|
+
content: _chunk
|
|
652
|
+
},
|
|
653
|
+
...token.type === "codespan" ? {
|
|
654
|
+
annotations: {
|
|
655
|
+
code: true
|
|
656
|
+
}
|
|
657
|
+
} : {}
|
|
658
|
+
}));
|
|
659
|
+
}), {
|
|
636
660
|
type: "text",
|
|
637
661
|
text: {
|
|
638
|
-
content:
|
|
639
|
-
}
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
}
|
|
644
|
-
} : {}
|
|
662
|
+
content: "\n"
|
|
663
|
+
}
|
|
664
|
+
}));
|
|
665
|
+
(0, $dxT2C$lodashes.each)(chunksWithNewLines, (chunk)=>{
|
|
666
|
+
acc.push(chunk);
|
|
645
667
|
});
|
|
646
668
|
return acc;
|
|
647
669
|
}, acc);
|
|
648
670
|
};
|
|
671
|
+
const $bc8e7b2fdb012b3a$var$chunkText = (text, numChars = 2000)=>{
|
|
672
|
+
return text.match(new RegExp(`.{1,${numChars}}`, "g"));
|
|
673
|
+
};
|
|
649
674
|
const $bc8e7b2fdb012b3a$var$convertMarkdownToNotion = (text)=>{
|
|
650
675
|
const tokens = (0, $dxT2C$marked.Lexer).lex(text);
|
|
651
676
|
const parsedTokens = $bc8e7b2fdb012b3a$var$parseMarkedTokenToNotionRequest(tokens);
|