@payloadcms/richtext-lexical 3.57.0-internal.266049e → 3.57.0-internal.2988185
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/exports/client/{Field-BJACUMFU.js → Field-PKJPWZJL.js} +2 -2
- package/dist/exports/client/Field-PKJPWZJL.js.map +7 -0
- package/dist/exports/client/bundled.css +1 -1
- package/dist/exports/client/{chunk-SEPPJCZ6.js → chunk-YCH4JNUH.js} +2 -2
- package/dist/exports/client/component-3PENNOM3.js +2 -0
- package/dist/exports/client/component-3PENNOM3.js.map +7 -0
- package/dist/exports/client/index.d.ts +0 -2
- package/dist/exports/client/index.d.ts.map +1 -1
- package/dist/exports/client/index.js +10 -35
- package/dist/exports/client/index.js.map +4 -4
- package/dist/features/align/server/i18n.d.ts.map +1 -1
- package/dist/features/align/server/i18n.js +0 -6
- package/dist/features/align/server/i18n.js.map +1 -1
- package/dist/features/blockquote/server/i18n.d.ts.map +1 -1
- package/dist/features/blockquote/server/i18n.js +0 -3
- package/dist/features/blockquote/server/i18n.js.map +1 -1
- package/dist/features/blocks/client/index.d.ts.map +1 -1
- package/dist/features/blocks/client/index.js +0 -5
- package/dist/features/blocks/client/index.js.map +1 -1
- package/dist/features/blocks/client/markdownTransformer.d.ts +17 -0
- package/dist/features/blocks/client/markdownTransformer.d.ts.map +1 -0
- package/dist/features/blocks/client/markdownTransformer.js +144 -0
- package/dist/features/blocks/client/markdownTransformer.js.map +1 -0
- package/dist/features/blocks/server/i18n.d.ts.map +1 -1
- package/dist/features/blocks/server/i18n.js +0 -9
- package/dist/features/blocks/server/i18n.js.map +1 -1
- package/dist/features/blocks/server/index.js +1 -1
- package/dist/features/blocks/server/index.js.map +1 -1
- package/dist/features/blocks/server/linesFromMatchToContentAndPropsString.d.ts.map +1 -0
- package/dist/features/blocks/server/linesFromMatchToContentAndPropsString.js.map +1 -0
- package/dist/features/blocks/server/markdownTransformer.d.ts +22 -0
- package/dist/features/blocks/server/markdownTransformer.d.ts.map +1 -0
- package/dist/features/blocks/server/{markdown/markdownTransformer.js → markdownTransformer.js} +48 -6
- package/dist/features/blocks/server/markdownTransformer.js.map +1 -0
- package/dist/features/heading/server/i18n.d.ts.map +1 -1
- package/dist/features/heading/server/i18n.js +0 -3
- package/dist/features/heading/server/i18n.js.map +1 -1
- package/dist/features/horizontalRule/server/i18n.d.ts.map +1 -1
- package/dist/features/horizontalRule/server/i18n.js +0 -3
- package/dist/features/horizontalRule/server/i18n.js.map +1 -1
- package/dist/features/indent/server/i18n.d.ts.map +1 -1
- package/dist/features/indent/server/i18n.js +0 -4
- package/dist/features/indent/server/i18n.js.map +1 -1
- package/dist/features/link/server/i18n.d.ts.map +1 -1
- package/dist/features/link/server/i18n.js +0 -4
- package/dist/features/link/server/i18n.js.map +1 -1
- package/dist/features/lists/checklist/server/i18n.d.ts.map +1 -1
- package/dist/features/lists/checklist/server/i18n.js +0 -3
- package/dist/features/lists/checklist/server/i18n.js.map +1 -1
- package/dist/features/lists/orderedList/server/i18n.d.ts.map +1 -1
- package/dist/features/lists/orderedList/server/i18n.js +0 -3
- package/dist/features/lists/orderedList/server/i18n.js.map +1 -1
- package/dist/features/lists/unorderedList/server/i18n.d.ts.map +1 -1
- package/dist/features/lists/unorderedList/server/i18n.js +0 -3
- package/dist/features/lists/unorderedList/server/i18n.js.map +1 -1
- package/dist/features/paragraph/server/i18n.d.ts.map +1 -1
- package/dist/features/paragraph/server/i18n.js +0 -4
- package/dist/features/paragraph/server/i18n.js.map +1 -1
- package/dist/features/relationship/server/i18n.d.ts.map +1 -1
- package/dist/features/relationship/server/i18n.js +0 -3
- package/dist/features/relationship/server/i18n.js.map +1 -1
- package/dist/features/textState/i18n.d.ts.map +1 -1
- package/dist/features/textState/i18n.js +0 -3
- package/dist/features/textState/i18n.js.map +1 -1
- package/dist/features/upload/client/component/index.d.ts +0 -2
- package/dist/features/upload/client/component/index.d.ts.map +1 -1
- package/dist/features/upload/client/component/index.js +24 -28
- package/dist/features/upload/client/component/index.js.map +1 -1
- package/dist/features/upload/client/nodes/UploadNode.d.ts +7 -2
- package/dist/features/upload/client/nodes/UploadNode.d.ts.map +1 -1
- package/dist/features/upload/client/nodes/UploadNode.js +27 -9
- package/dist/features/upload/client/nodes/UploadNode.js.map +1 -1
- package/dist/features/upload/client/plugin/index.d.ts +0 -6
- package/dist/features/upload/client/plugin/index.d.ts.map +1 -1
- package/dist/features/upload/client/plugin/index.js +23 -306
- package/dist/features/upload/client/plugin/index.js.map +1 -1
- package/dist/features/upload/server/i18n.d.ts.map +1 -1
- package/dist/features/upload/server/i18n.js +0 -3
- package/dist/features/upload/server/i18n.js.map +1 -1
- package/dist/features/upload/server/nodes/UploadNode.d.ts +1 -16
- package/dist/features/upload/server/nodes/UploadNode.d.ts.map +1 -1
- package/dist/features/upload/server/nodes/UploadNode.js +39 -12
- package/dist/features/upload/server/nodes/UploadNode.js.map +1 -1
- package/dist/field/Field.d.ts.map +1 -1
- package/dist/field/Field.js +1 -1
- package/dist/field/Field.js.map +1 -1
- package/dist/field/bundled.css +1 -1
- package/dist/i18n.d.ts.map +1 -1
- package/dist/i18n.js +0 -6
- package/dist/i18n.js.map +1 -1
- package/dist/index.d.ts +0 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +0 -1
- package/dist/index.js.map +1 -1
- package/dist/utilities/jsx/collectTopLevelJSXInLines.js +2 -2
- package/dist/utilities/jsx/collectTopLevelJSXInLines.js.map +1 -1
- package/package.json +6 -6
- package/dist/exports/client/Field-BJACUMFU.js.map +0 -7
- package/dist/exports/client/component-VDJI45F2.js +0 -2
- package/dist/exports/client/component-VDJI45F2.js.map +0 -7
- package/dist/features/blocks/client/markdown/getLexicalToMarkdown.d.ts +0 -6
- package/dist/features/blocks/client/markdown/getLexicalToMarkdown.d.ts.map +0 -1
- package/dist/features/blocks/client/markdown/getLexicalToMarkdown.js +0 -24
- package/dist/features/blocks/client/markdown/getLexicalToMarkdown.js.map +0 -1
- package/dist/features/blocks/client/markdown/getMarkdownToLexical.d.ts +0 -6
- package/dist/features/blocks/client/markdown/getMarkdownToLexical.d.ts.map +0 -1
- package/dist/features/blocks/client/markdown/getMarkdownToLexical.js +0 -20
- package/dist/features/blocks/client/markdown/getMarkdownToLexical.js.map +0 -1
- package/dist/features/blocks/client/markdown/markdownTransformer.d.ts +0 -12
- package/dist/features/blocks/client/markdown/markdownTransformer.d.ts.map +0 -1
- package/dist/features/blocks/client/markdown/markdownTransformer.js +0 -348
- package/dist/features/blocks/client/markdown/markdownTransformer.js.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/Component.d.ts +0 -53
- package/dist/features/blocks/premade/CodeBlock/Component.d.ts.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/Component.js +0 -68
- package/dist/features/blocks/premade/CodeBlock/Component.js.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/converter.d.ts +0 -7
- package/dist/features/blocks/premade/CodeBlock/converter.d.ts.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/converter.js +0 -46
- package/dist/features/blocks/premade/CodeBlock/converter.js.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/converterClient.d.ts +0 -2
- package/dist/features/blocks/premade/CodeBlock/converterClient.d.ts.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/converterClient.js +0 -4
- package/dist/features/blocks/premade/CodeBlock/converterClient.js.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/index.d.ts +0 -7
- package/dist/features/blocks/premade/CodeBlock/index.d.ts.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/index.js +0 -39
- package/dist/features/blocks/premade/CodeBlock/index.js.map +0 -1
- package/dist/features/blocks/server/markdown/getLexicalToMarkdown.d.ts +0 -6
- package/dist/features/blocks/server/markdown/getLexicalToMarkdown.d.ts.map +0 -1
- package/dist/features/blocks/server/markdown/getLexicalToMarkdown.js +0 -27
- package/dist/features/blocks/server/markdown/getLexicalToMarkdown.js.map +0 -1
- package/dist/features/blocks/server/markdown/getMarkdownToLexical.d.ts +0 -7
- package/dist/features/blocks/server/markdown/getMarkdownToLexical.d.ts.map +0 -1
- package/dist/features/blocks/server/markdown/getMarkdownToLexical.js +0 -22
- package/dist/features/blocks/server/markdown/getMarkdownToLexical.js.map +0 -1
- package/dist/features/blocks/server/markdown/linesFromMatchToContentAndPropsString.d.ts.map +0 -1
- package/dist/features/blocks/server/markdown/linesFromMatchToContentAndPropsString.js.map +0 -1
- package/dist/features/blocks/server/markdown/markdownTransformer.d.ts +0 -15
- package/dist/features/blocks/server/markdown/markdownTransformer.d.ts.map +0 -1
- package/dist/features/blocks/server/markdown/markdownTransformer.js.map +0 -1
- package/dist/features/upload/client/component/pending/index.d.ts +0 -3
- package/dist/features/upload/client/component/pending/index.d.ts.map +0 -1
- package/dist/features/upload/client/component/pending/index.js +0 -14
- package/dist/features/upload/client/component/pending/index.js.map +0 -1
- package/dist/features/upload/server/nodes/conversions.d.ts +0 -6
- package/dist/features/upload/server/nodes/conversions.d.ts.map +0 -1
- package/dist/features/upload/server/nodes/conversions.js +0 -53
- package/dist/features/upload/server/nodes/conversions.js.map +0 -1
- /package/dist/exports/client/{chunk-SEPPJCZ6.js.map → chunk-YCH4JNUH.js.map} +0 -0
- /package/dist/features/blocks/server/{markdown/linesFromMatchToContentAndPropsString.d.ts → linesFromMatchToContentAndPropsString.d.ts} +0 -0
- /package/dist/features/blocks/server/{markdown/linesFromMatchToContentAndPropsString.js → linesFromMatchToContentAndPropsString.js} +0 -0
|
@@ -1,39 +0,0 @@
|
|
|
1
|
-
import { codeConverter } from './converter.js';
|
|
2
|
-
/**
|
|
3
|
-
* @experimental - this API may change in future, minor releases
|
|
4
|
-
*/
|
|
5
|
-
export const CodeBlock = args => {
|
|
6
|
-
const languages = args?.languages || {
|
|
7
|
-
js: 'JavaScript',
|
|
8
|
-
plaintext: 'Plain Text',
|
|
9
|
-
ts: 'TypeScript'
|
|
10
|
-
};
|
|
11
|
-
return {
|
|
12
|
-
slug: args?.slug || 'Code',
|
|
13
|
-
admin: {
|
|
14
|
-
jsx: '@payloadcms/richtext-lexical/client#codeConverterClient'
|
|
15
|
-
},
|
|
16
|
-
fields: [{
|
|
17
|
-
name: 'language',
|
|
18
|
-
type: 'select',
|
|
19
|
-
defaultValue: args?.defaultLanguage || Object.keys(languages)[0],
|
|
20
|
-
options: Object.entries(languages).map(([key, value]) => ({
|
|
21
|
-
label: value,
|
|
22
|
-
value: key
|
|
23
|
-
}))
|
|
24
|
-
}, {
|
|
25
|
-
name: 'code',
|
|
26
|
-
type: 'code',
|
|
27
|
-
admin: {
|
|
28
|
-
components: {
|
|
29
|
-
Field: {
|
|
30
|
-
clientProps: args,
|
|
31
|
-
path: '@payloadcms/richtext-lexical/client#CodeComponent'
|
|
32
|
-
}
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
}],
|
|
36
|
-
jsx: codeConverter
|
|
37
|
-
};
|
|
38
|
-
};
|
|
39
|
-
//# sourceMappingURL=index.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":["codeConverter","CodeBlock","args","languages","js","plaintext","ts","slug","admin","jsx","fields","name","type","defaultValue","defaultLanguage","Object","keys","options","entries","map","key","value","label","components","Field","clientProps","path"],"sources":["../../../../../src/features/blocks/premade/CodeBlock/index.ts"],"sourcesContent":["import type { Block } from 'payload'\n\nimport type { AdditionalCodeComponentProps } from './Component.js'\n\nimport { codeConverter } from './converter.js'\n\n/**\n * @experimental - this API may change in future, minor releases\n */\nexport const CodeBlock: (args?: AdditionalCodeComponentProps) => Block = (args) => {\n const languages = args?.languages || {\n js: 'JavaScript',\n plaintext: 'Plain Text',\n ts: 'TypeScript',\n }\n\n return {\n slug: args?.slug || 'Code',\n admin: {\n jsx: '@payloadcms/richtext-lexical/client#codeConverterClient',\n },\n fields: [\n {\n name: 'language',\n type: 'select',\n defaultValue: args?.defaultLanguage || Object.keys(languages)[0],\n options: Object.entries(languages).map(([key, value]) => ({\n label: value,\n value: key,\n })),\n },\n {\n name: 'code',\n type: 'code',\n admin: {\n components: {\n Field: {\n clientProps: args,\n path: '@payloadcms/richtext-lexical/client#CodeComponent',\n },\n },\n },\n },\n ],\n jsx: codeConverter,\n }\n}\n"],"mappings":"AAIA,SAASA,aAAa,QAAQ;AAE9B;;;AAGA,OAAO,MAAMC,SAAA,GAA6DC,IAAA;EACxE,MAAMC,SAAA,GAAYD,IAAA,EAAMC,SAAA,IAAa;IACnCC,EAAA,EAAI;IACJC,SAAA,EAAW;IACXC,EAAA,EAAI;EACN;EAEA,OAAO;IACLC,IAAA,EAAML,IAAA,EAAMK,IAAA,IAAQ;IACpBC,KAAA,EAAO;MACLC,GAAA,EAAK;IACP;IACAC,MAAA,EAAQ,CACN;MACEC,IAAA,EAAM;MACNC,IAAA,EAAM;MACNC,YAAA,EAAcX,IAAA,EAAMY,eAAA,IAAmBC,MAAA,CAAOC,IAAI,CAACb,SAAA,CAAU,CAAC,EAAE;MAChEc,OAAA,EAASF,MAAA,CAAOG,OAAO,CAACf,SAAA,EAAWgB,GAAG,CAAC,CAAC,CAACC,GAAA,EAAKC,KAAA,CAAM,MAAM;QACxDC,KAAA,EAAOD,KAAA;QACPA,KAAA,EAAOD;MACT;IACF,GACA;MACET,IAAA,EAAM;MACNC,IAAA,EAAM;MACNJ,KAAA,EAAO;QACLe,UAAA,EAAY;UACVC,KAAA,EAAO;YACLC,WAAA,EAAavB,IAAA;YACbwB,IAAA,EAAM;UACR;QACF;MACF;IACF,EACD;IACDjB,GAAA,EAAKT;EACP;AACF","ignoreList":[]}
|
|
@@ -1,6 +0,0 @@
|
|
|
1
|
-
import type { NodeWithHooks } from '../../../typesServer.js';
|
|
2
|
-
import { type Transformer } from '../../../../packages/@lexical/markdown/index.js';
|
|
3
|
-
export declare function getLexicalToMarkdown(allNodes: Array<NodeWithHooks>, allTransformers: Transformer[]): (args: {
|
|
4
|
-
editorState: Record<string, any>;
|
|
5
|
-
}) => string;
|
|
6
|
-
//# sourceMappingURL=getLexicalToMarkdown.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"getLexicalToMarkdown.d.ts","sourceRoot":"","sources":["../../../../../src/features/blocks/server/markdown/getLexicalToMarkdown.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,yBAAyB,CAAA;AAG5D,OAAO,EAEL,KAAK,WAAW,EACjB,MAAM,iDAAiD,CAAA;AACxD,wBAAgB,oBAAoB,CAClC,QAAQ,EAAE,KAAK,CAAC,aAAa,CAAC,EAC9B,eAAe,EAAE,WAAW,EAAE,GAC7B,CAAC,IAAI,EAAE;IAAE,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAA;CAAE,KAAK,MAAM,CAsBxD"}
|
|
@@ -1,27 +0,0 @@
|
|
|
1
|
-
import { createHeadlessEditor } from '@lexical/headless';
|
|
2
|
-
import { getEnabledNodesFromServerNodes } from '../../../../lexical/nodes/index.js';
|
|
3
|
-
import { $convertToMarkdownString } from '../../../../packages/@lexical/markdown/index.js';
|
|
4
|
-
export function getLexicalToMarkdown(allNodes, allTransformers) {
|
|
5
|
-
const lexicalToMarkdown = ({
|
|
6
|
-
editorState
|
|
7
|
-
}) => {
|
|
8
|
-
const headlessEditor = createHeadlessEditor({
|
|
9
|
-
nodes: getEnabledNodesFromServerNodes({
|
|
10
|
-
nodes: allNodes
|
|
11
|
-
})
|
|
12
|
-
});
|
|
13
|
-
try {
|
|
14
|
-
headlessEditor.setEditorState(headlessEditor.parseEditorState(editorState)) // This should commit the editor state immediately
|
|
15
|
-
;
|
|
16
|
-
} catch (e) {
|
|
17
|
-
console.error('getLexicalToMarkdown: ERROR parsing editor state', e);
|
|
18
|
-
}
|
|
19
|
-
let markdown = '';
|
|
20
|
-
headlessEditor.getEditorState().read(() => {
|
|
21
|
-
markdown = $convertToMarkdownString(allTransformers);
|
|
22
|
-
});
|
|
23
|
-
return markdown;
|
|
24
|
-
};
|
|
25
|
-
return lexicalToMarkdown;
|
|
26
|
-
}
|
|
27
|
-
//# sourceMappingURL=getLexicalToMarkdown.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"getLexicalToMarkdown.js","names":["createHeadlessEditor","getEnabledNodesFromServerNodes","$convertToMarkdownString","getLexicalToMarkdown","allNodes","allTransformers","lexicalToMarkdown","editorState","headlessEditor","nodes","setEditorState","parseEditorState","e","console","error","markdown","getEditorState","read"],"sources":["../../../../../src/features/blocks/server/markdown/getLexicalToMarkdown.ts"],"sourcesContent":["import { createHeadlessEditor } from '@lexical/headless'\n\nimport type { NodeWithHooks } from '../../../typesServer.js'\n\nimport { getEnabledNodesFromServerNodes } from '../../../../lexical/nodes/index.js'\nimport {\n $convertToMarkdownString,\n type Transformer,\n} from '../../../../packages/@lexical/markdown/index.js'\nexport function getLexicalToMarkdown(\n allNodes: Array<NodeWithHooks>,\n allTransformers: Transformer[],\n): (args: { editorState: Record<string, any> }) => string {\n const lexicalToMarkdown = ({ editorState }: { editorState: Record<string, any> }): string => {\n const headlessEditor = createHeadlessEditor({\n nodes: getEnabledNodesFromServerNodes({\n nodes: allNodes,\n }),\n })\n\n try {\n headlessEditor.setEditorState(headlessEditor.parseEditorState(editorState as any)) // This should commit the editor state immediately\n } catch (e) {\n console.error('getLexicalToMarkdown: ERROR parsing editor state', e)\n }\n\n let markdown: string = ''\n headlessEditor.getEditorState().read(() => {\n markdown = $convertToMarkdownString(allTransformers)\n })\n\n return markdown\n }\n return lexicalToMarkdown\n}\n"],"mappings":"AAAA,SAASA,oBAAoB,QAAQ;AAIrC,SAASC,8BAA8B,QAAQ;AAC/C,SACEC,wBAAwB,QAEnB;AACP,OAAO,SAASC,qBACdC,QAA8B,EAC9BC,eAA8B;EAE9B,MAAMC,iBAAA,GAAoBA,CAAC;IAAEC;EAAW,CAAwC;IAC9E,MAAMC,cAAA,GAAiBR,oBAAA,CAAqB;MAC1CS,KAAA,EAAOR,8BAAA,CAA+B;QACpCQ,KAAA,EAAOL;MACT;IACF;IAEA,IAAI;MACFI,cAAA,CAAeE,cAAc,CAACF,cAAA,CAAeG,gBAAgB,CAACJ,WAAA,GAAqB;MAAA;IACrF,EAAE,OAAOK,CAAA,EAAG;MACVC,OAAA,CAAQC,KAAK,CAAC,oDAAoDF,CAAA;IACpE;IAEA,IAAIG,QAAA,GAAmB;IACvBP,cAAA,CAAeQ,cAAc,GAAGC,IAAI,CAAC;MACnCF,QAAA,GAAWb,wBAAA,CAAyBG,eAAA;IACtC;IAEA,OAAOU,QAAA;EACT;EACA,OAAOT,iBAAA;AACT","ignoreList":[]}
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
import type { SerializedEditorState } from 'lexical';
|
|
2
|
-
import type { NodeWithHooks } from '../../../typesServer.js';
|
|
3
|
-
import { type Transformer } from '../../../../packages/@lexical/markdown/index.js';
|
|
4
|
-
export declare function getMarkdownToLexical(allNodes: Array<NodeWithHooks>, allTransformers: Transformer[]): (args: {
|
|
5
|
-
markdown: string;
|
|
6
|
-
}) => SerializedEditorState;
|
|
7
|
-
//# sourceMappingURL=getMarkdownToLexical.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"getMarkdownToLexical.d.ts","sourceRoot":"","sources":["../../../../../src/features/blocks/server/markdown/getMarkdownToLexical.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,qBAAqB,EAAE,MAAM,SAAS,CAAA;AAIpD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,yBAAyB,CAAA;AAG5D,OAAO,EAEL,KAAK,WAAW,EACjB,MAAM,iDAAiD,CAAA;AACxD,wBAAgB,oBAAoB,CAClC,QAAQ,EAAE,KAAK,CAAC,aAAa,CAAC,EAC9B,eAAe,EAAE,WAAW,EAAE,GAC7B,CAAC,IAAI,EAAE;IAAE,QAAQ,EAAE,MAAM,CAAA;CAAE,KAAK,qBAAqB,CAkBvD"}
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
import { createHeadlessEditor } from '@lexical/headless';
|
|
2
|
-
import { getEnabledNodesFromServerNodes } from '../../../../lexical/nodes/index.js';
|
|
3
|
-
import { $convertFromMarkdownString } from '../../../../packages/@lexical/markdown/index.js';
|
|
4
|
-
export function getMarkdownToLexical(allNodes, allTransformers) {
|
|
5
|
-
const markdownToLexical = ({
|
|
6
|
-
markdown
|
|
7
|
-
}) => {
|
|
8
|
-
const headlessEditor = createHeadlessEditor({
|
|
9
|
-
nodes: getEnabledNodesFromServerNodes({
|
|
10
|
-
nodes: allNodes
|
|
11
|
-
})
|
|
12
|
-
});
|
|
13
|
-
headlessEditor.update(() => {
|
|
14
|
-
$convertFromMarkdownString(markdown, allTransformers);
|
|
15
|
-
}, {
|
|
16
|
-
discrete: true
|
|
17
|
-
});
|
|
18
|
-
return headlessEditor.getEditorState().toJSON();
|
|
19
|
-
};
|
|
20
|
-
return markdownToLexical;
|
|
21
|
-
}
|
|
22
|
-
//# sourceMappingURL=getMarkdownToLexical.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"getMarkdownToLexical.js","names":["createHeadlessEditor","getEnabledNodesFromServerNodes","$convertFromMarkdownString","getMarkdownToLexical","allNodes","allTransformers","markdownToLexical","markdown","headlessEditor","nodes","update","discrete","getEditorState","toJSON"],"sources":["../../../../../src/features/blocks/server/markdown/getMarkdownToLexical.ts"],"sourcesContent":["import type { SerializedEditorState } from 'lexical'\n\nimport { createHeadlessEditor } from '@lexical/headless'\n\nimport type { NodeWithHooks } from '../../../typesServer.js'\n\nimport { getEnabledNodesFromServerNodes } from '../../../../lexical/nodes/index.js'\nimport {\n $convertFromMarkdownString,\n type Transformer,\n} from '../../../../packages/@lexical/markdown/index.js'\nexport function getMarkdownToLexical(\n allNodes: Array<NodeWithHooks>,\n allTransformers: Transformer[],\n): (args: { markdown: string }) => SerializedEditorState {\n const markdownToLexical = ({ markdown }: { markdown: string }): SerializedEditorState => {\n const headlessEditor = createHeadlessEditor({\n nodes: getEnabledNodesFromServerNodes({\n nodes: allNodes,\n }),\n })\n\n headlessEditor.update(\n () => {\n $convertFromMarkdownString(markdown, allTransformers)\n },\n { discrete: true },\n )\n\n return headlessEditor.getEditorState().toJSON()\n }\n return markdownToLexical\n}\n"],"mappings":"AAEA,SAASA,oBAAoB,QAAQ;AAIrC,SAASC,8BAA8B,QAAQ;AAC/C,SACEC,0BAA0B,QAErB;AACP,OAAO,SAASC,qBACdC,QAA8B,EAC9BC,eAA8B;EAE9B,MAAMC,iBAAA,GAAoBA,CAAC;IAAEC;EAAQ,CAAwB;IAC3D,MAAMC,cAAA,GAAiBR,oBAAA,CAAqB;MAC1CS,KAAA,EAAOR,8BAAA,CAA+B;QACpCQ,KAAA,EAAOL;MACT;IACF;IAEAI,cAAA,CAAeE,MAAM,CACnB;MACER,0BAAA,CAA2BK,QAAA,EAAUF,eAAA;IACvC,GACA;MAAEM,QAAA,EAAU;IAAK;IAGnB,OAAOH,cAAA,CAAeI,cAAc,GAAGC,MAAM;EAC/C;EACA,OAAOP,iBAAA;AACT","ignoreList":[]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"linesFromMatchToContentAndPropsString.d.ts","sourceRoot":"","sources":["../../../../../src/features/blocks/server/markdown/linesFromMatchToContentAndPropsString.ts"],"names":[],"mappings":"AAAA,wBAAgB,qCAAqC,CAAC,EACpD,aAAa,EACb,KAAK,EACL,cAAc,EACd,cAAc,EACd,UAAU,EACV,YAAY,GACb,EAAE;IACD,aAAa,CAAC,EAAE,OAAO,CAAA;IACvB,KAAK,EAAE,MAAM,EAAE,CAAA;IACf,cAAc,CAAC,EAAE,MAAM,CAAA;IACvB,cAAc,EAAE,MAAM,CAAA;IACtB,UAAU,EAAE,gBAAgB,CAAA;IAC5B,YAAY,CAAC,EAAE,OAAO,CAAA;CACvB,GAAG;IACF;;OAEG;IACH,YAAY,EAAE,MAAM,CAAA;IACpB;;OAEG;IACH,eAAe,EAAE,MAAM,CAAA;IACvB,OAAO,EAAE,MAAM,CAAA;IACf,YAAY,EAAE,MAAM,CAAA;IACpB,oBAAoB,EAAE,MAAM,CAAA;IAC5B,WAAW,EAAE,MAAM,CAAA;CACpB,CAqJA"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"linesFromMatchToContentAndPropsString.js","names":["linesFromStartToContentAndPropsString","isEndOptional","lines","regexpEndRegex","startLineIndex","startMatch","trimChildren","propsString","content","linesCopy","slice","isWithinContent","contentSubTagStartAmount","bracketCount","quoteChar","isSelfClosing","isWithinCodeBlockAmount","beforeStartLine","index","endlineLastCharIndex","endLineIndex","mainLoop","lineIndex","lineCopy","entries","line","trim","amountOfBeginningSpacesRemoved","i","length","charIndex","char","nextChar","match","undefined","Error","join","JSON","stringify","afterEndLine"],"sources":["../../../../../src/features/blocks/server/markdown/linesFromMatchToContentAndPropsString.ts"],"sourcesContent":["export function linesFromStartToContentAndPropsString({\n isEndOptional,\n lines,\n regexpEndRegex,\n startLineIndex,\n startMatch,\n trimChildren,\n}: {\n isEndOptional?: boolean\n lines: string[]\n regexpEndRegex?: RegExp\n startLineIndex: number\n startMatch: RegExpMatchArray\n trimChildren?: boolean\n}): {\n /**\n * The matched string after the end match, in the same line as the end match. Useful for inline matches.\n */\n afterEndLine: string\n /**\n * The matched string before the start match, in the same line as the start match. Useful for inline matches.\n */\n beforeStartLine: string\n content: string\n endLineIndex: number\n endlineLastCharIndex: number\n propsString: string\n} {\n let propsString = ''\n let content = ''\n const linesCopy = lines.slice(startLineIndex)\n\n let isWithinContent = false // If false => is within prop\n let contentSubTagStartAmount = 0\n\n let bracketCount = 0\n let quoteChar: null | string = null\n let isSelfClosing = false\n let isWithinCodeBlockAmount = 0\n\n const beforeStartLine = linesCopy[0]!.slice(0, startMatch.index)\n let endlineLastCharIndex = 0\n\n let endLineIndex = startLineIndex\n\n mainLoop: for (const [lineIndex, lineCopy] of linesCopy.entries()) {\n const line = trimChildren ? lineCopy.trim() : lineCopy\n let amountOfBeginningSpacesRemoved = 0\n if (trimChildren) {\n for (let i = 0; i < lineCopy.length; i++) {\n if (lineCopy[i] === ' ') {\n amountOfBeginningSpacesRemoved++\n } else {\n break\n }\n }\n }\n\n let charIndex = 0\n\n if (lineIndex === 0) {\n charIndex = (startMatch.index ?? 0) + startMatch[0].length - amountOfBeginningSpacesRemoved // We need to also loop over the \">\" in something like \"<InlineCode>\" in order to later set isWithinContent to true\n }\n\n while (charIndex < line.length) {\n const char = line[charIndex]\n const nextChar = line[charIndex + 1]\n\n if (!isWithinContent) {\n if (char === '{' && !quoteChar) {\n bracketCount++\n } else if (char === '}' && !quoteChar) {\n bracketCount--\n } else if ((char === '\"' || char === \"'\") && !quoteChar) {\n quoteChar = char\n } else if (char === quoteChar) {\n quoteChar = null\n }\n\n if (char === '/' && nextChar === '>' && bracketCount === 0 && !quoteChar) {\n isSelfClosing = true\n endLineIndex = lineIndex\n endlineLastCharIndex = charIndex + 2\n\n break mainLoop\n } else if (char === '>' && bracketCount === 0 && !quoteChar) {\n isWithinContent = true\n charIndex++\n continue\n }\n\n propsString += char\n } else {\n if (char === '`') {\n isWithinCodeBlockAmount++\n }\n\n if (isWithinCodeBlockAmount % 2 === 0) {\n if (char === '<' && nextChar === '/') {\n contentSubTagStartAmount--\n\n if (contentSubTagStartAmount < 0) {\n if (content[content.length - 1] === '\\n') {\n content = content.slice(0, -1) // Remove the last newline\n }\n endLineIndex = lineIndex\n // Calculate endlineLastCharIndex by finding \">\" in line\n for (let i = charIndex; i < line.length; i++) {\n if (line[i] === '>') {\n endlineLastCharIndex = i + 1\n\n break\n }\n }\n break mainLoop\n }\n } else if (char === '/' && nextChar === '>') {\n contentSubTagStartAmount--\n\n if (contentSubTagStartAmount < 0) {\n if (content[content.length - 1] === '\\n') {\n content = content.slice(0, -1) // Remove the last newline\n }\n endLineIndex = lineIndex\n endlineLastCharIndex = charIndex + 2\n break mainLoop\n }\n } else if (char === '<' && nextChar !== '/') {\n contentSubTagStartAmount++\n }\n }\n\n content += char\n }\n\n charIndex++\n }\n\n if (isWithinContent) {\n if (content?.length > 0 && lineIndex > 0) {\n content += '\\n'\n }\n } else {\n propsString += '\\n'\n }\n\n if (regexpEndRegex && contentSubTagStartAmount < 0) {\n // If 0 and in same line where it got lowered to 0 then this is not the match we are looking for\n const match = line.match(regexpEndRegex)\n if (match?.index !== undefined) {\n endLineIndex = lineIndex\n endlineLastCharIndex = match.index + match[0].length - 1\n break\n }\n }\n\n if (lineIndex === linesCopy.length - 1 && !isEndOptional && !isSelfClosing) {\n throw new Error(\n 'End match not found for lines ' +\n lines.join('\\n') +\n '\\n\\n. Start match: ' +\n JSON.stringify(startMatch),\n )\n }\n }\n\n const afterEndLine = linesCopy[endLineIndex]!.trim().slice(endlineLastCharIndex)\n\n return {\n afterEndLine,\n beforeStartLine,\n content,\n endLineIndex: startLineIndex + endLineIndex,\n endlineLastCharIndex,\n propsString,\n }\n}\n"],"mappings":"AAAA,OAAO,SAASA,sCAAsC;EACpDC,aAAa;EACbC,KAAK;EACLC,cAAc;EACdC,cAAc;EACdC,UAAU;EACVC;AAAY,CAQb;EAcC,IAAIC,WAAA,GAAc;EAClB,IAAIC,OAAA,GAAU;EACd,MAAMC,SAAA,GAAYP,KAAA,CAAMQ,KAAK,CAACN,cAAA;EAE9B,IAAIO,eAAA,GAAkB,MAAM;EAAA;EAC5B,IAAIC,wBAAA,GAA2B;EAE/B,IAAIC,YAAA,GAAe;EACnB,IAAIC,SAAA,GAA2B;EAC/B,IAAIC,aAAA,GAAgB;EACpB,IAAIC,uBAAA,GAA0B;EAE9B,MAAMC,eAAA,GAAkBR,SAAS,CAAC,EAAE,CAAEC,KAAK,CAAC,GAAGL,UAAA,CAAWa,KAAK;EAC/D,IAAIC,oBAAA,GAAuB;EAE3B,IAAIC,YAAA,GAAehB,cAAA;EAEnBiB,QAAA,EAAU,KAAK,MAAM,CAACC,SAAA,EAAWC,QAAA,CAAS,IAAId,SAAA,CAAUe,OAAO,IAAI;IACjE,MAAMC,IAAA,GAAOnB,YAAA,GAAeiB,QAAA,CAASG,IAAI,KAAKH,QAAA;IAC9C,IAAII,8BAAA,GAAiC;IACrC,IAAIrB,YAAA,EAAc;MAChB,KAAK,IAAIsB,CAAA,GAAI,GAAGA,CAAA,GAAIL,QAAA,CAASM,MAAM,EAAED,CAAA,IAAK;QACxC,IAAIL,QAAQ,CAACK,CAAA,CAAE,KAAK,KAAK;UACvBD,8BAAA;QACF,OAAO;UACL;QACF;MACF;IACF;IAEA,IAAIG,SAAA,GAAY;IAEhB,IAAIR,SAAA,KAAc,GAAG;MACnBQ,SAAA,GAAY,CAACzB,UAAA,CAAWa,KAAK,IAAI,KAAKb,UAAU,CAAC,EAAE,CAACwB,MAAM,GAAGF,8BAAA,CAA+B;MAAA;IAC9F;IAEA,OAAOG,SAAA,GAAYL,IAAA,CAAKI,MAAM,EAAE;MAC9B,MAAME,IAAA,GAAON,IAAI,CAACK,SAAA,CAAU;MAC5B,MAAME,QAAA,GAAWP,IAAI,CAACK,SAAA,GAAY,EAAE;MAEpC,IAAI,CAACnB,eAAA,EAAiB;QACpB,IAAIoB,IAAA,KAAS,OAAO,CAACjB,SAAA,EAAW;UAC9BD,YAAA;QACF,OAAO,IAAIkB,IAAA,KAAS,OAAO,CAACjB,SAAA,EAAW;UACrCD,YAAA;QACF,OAAO,IAAI,CAACkB,IAAA,KAAS,OAAOA,IAAA,KAAS,GAAE,KAAM,CAACjB,SAAA,EAAW;UACvDA,SAAA,GAAYiB,IAAA;QACd,OAAO,IAAIA,IAAA,KAASjB,SAAA,EAAW;UAC7BA,SAAA,GAAY;QACd;QAEA,IAAIiB,IAAA,KAAS,OAAOC,QAAA,KAAa,OAAOnB,YAAA,KAAiB,KAAK,CAACC,SAAA,EAAW;UACxEC,aAAA,GAAgB;UAChBK,YAAA,GAAeE,SAAA;UACfH,oBAAA,GAAuBW,SAAA,GAAY;UAEnC,MAAMT,QAAA;QACR,OAAO,IAAIU,IAAA,KAAS,OAAOlB,YAAA,KAAiB,KAAK,CAACC,SAAA,EAAW;UAC3DH,eAAA,GAAkB;UAClBmB,SAAA;UACA;QACF;QAEAvB,WAAA,IAAewB,IAAA;MACjB,OAAO;QACL,IAAIA,IAAA,KAAS,KAAK;UAChBf,uBAAA;QACF;QAEA,IAAIA,uBAAA,GAA0B,MAAM,GAAG;UACrC,IAAIe,IAAA,KAAS,OAAOC,QAAA,KAAa,KAAK;YACpCpB,wBAAA;YAEA,IAAIA,wBAAA,GAA2B,GAAG;cAChC,IAAIJ,OAAO,CAACA,OAAA,CAAQqB,MAAM,GAAG,EAAE,KAAK,MAAM;gBACxCrB,OAAA,GAAUA,OAAA,CAAQE,KAAK,CAAC,GAAG,CAAC,GAAG;gBAAA;cACjC;cACAU,YAAA,GAAeE,SAAA;cACf;cACA,KAAK,IAAIM,CAAA,GAAIE,SAAA,EAAWF,CAAA,GAAIH,IAAA,CAAKI,MAAM,EAAED,CAAA,IAAK;gBAC5C,IAAIH,IAAI,CAACG,CAAA,CAAE,KAAK,KAAK;kBACnBT,oBAAA,GAAuBS,CAAA,GAAI;kBAE3B;gBACF;cACF;cACA,MAAMP,QAAA;YACR;UACF,OAAO,IAAIU,IAAA,KAAS,OAAOC,QAAA,KAAa,KAAK;YAC3CpB,wBAAA;YAEA,IAAIA,wBAAA,GAA2B,GAAG;cAChC,IAAIJ,OAAO,CAACA,OAAA,CAAQqB,MAAM,GAAG,EAAE,KAAK,MAAM;gBACxCrB,OAAA,GAAUA,OAAA,CAAQE,KAAK,CAAC,GAAG,CAAC,GAAG;gBAAA;cACjC;cACAU,YAAA,GAAeE,SAAA;cACfH,oBAAA,GAAuBW,SAAA,GAAY;cACnC,MAAMT,QAAA;YACR;UACF,OAAO,IAAIU,IAAA,KAAS,OAAOC,QAAA,KAAa,KAAK;YAC3CpB,wBAAA;UACF;QACF;QAEAJ,OAAA,IAAWuB,IAAA;MACb;MAEAD,SAAA;IACF;IAEA,IAAInB,eAAA,EAAiB;MACnB,IAAIH,OAAA,EAASqB,MAAA,GAAS,KAAKP,SAAA,GAAY,GAAG;QACxCd,OAAA,IAAW;MACb;IACF,OAAO;MACLD,WAAA,IAAe;IACjB;IAEA,IAAIJ,cAAA,IAAkBS,wBAAA,GAA2B,GAAG;MAClD;MACA,MAAMqB,KAAA,GAAQR,IAAA,CAAKQ,KAAK,CAAC9B,cAAA;MACzB,IAAI8B,KAAA,EAAOf,KAAA,KAAUgB,SAAA,EAAW;QAC9Bd,YAAA,GAAeE,SAAA;QACfH,oBAAA,GAAuBc,KAAA,CAAMf,KAAK,GAAGe,KAAK,CAAC,EAAE,CAACJ,MAAM,GAAG;QACvD;MACF;IACF;IAEA,IAAIP,SAAA,KAAcb,SAAA,CAAUoB,MAAM,GAAG,KAAK,CAAC5B,aAAA,IAAiB,CAACc,aAAA,EAAe;MAC1E,MAAM,IAAIoB,KAAA,CACR,mCACEjC,KAAA,CAAMkC,IAAI,CAAC,QACX,wBACAC,IAAA,CAAKC,SAAS,CAACjC,UAAA;IAErB;EACF;EAEA,MAAMkC,YAAA,GAAe9B,SAAS,CAACW,YAAA,CAAa,CAAEM,IAAI,GAAGhB,KAAK,CAACS,oBAAA;EAE3D,OAAO;IACLoB,YAAA;IACAtB,eAAA;IACAT,OAAA;IACAY,YAAA,EAAchB,cAAA,GAAiBgB,YAAA;IAC/BD,oBAAA;IACAZ;EACF;AACF","ignoreList":[]}
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
import type { Block } from 'payload';
|
|
2
|
-
import type { NodeWithHooks } from '../../../typesServer.js';
|
|
3
|
-
import { type MultilineElementTransformer, type TextMatchTransformer, type Transformer } from '../../../../packages/@lexical/markdown/index.js';
|
|
4
|
-
export declare function createTagRegexes(tagName: string): {
|
|
5
|
-
regExpEnd: RegExp;
|
|
6
|
-
regExpStart: RegExp;
|
|
7
|
-
};
|
|
8
|
-
export declare const getBlockMarkdownTransformers: ({ blocks, inlineBlocks, }: {
|
|
9
|
-
blocks: Block[];
|
|
10
|
-
inlineBlocks: Block[];
|
|
11
|
-
}) => ((props: {
|
|
12
|
-
allNodes: Array<NodeWithHooks>;
|
|
13
|
-
allTransformers: Transformer[];
|
|
14
|
-
}) => MultilineElementTransformer | TextMatchTransformer)[];
|
|
15
|
-
//# sourceMappingURL=markdownTransformer.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"markdownTransformer.d.ts","sourceRoot":"","sources":["../../../../../src/features/blocks/server/markdown/markdownTransformer.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,SAAS,CAAA;AAIpC,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,yBAAyB,CAAA;AAE5D,OAAO,EACL,KAAK,2BAA2B,EAChC,KAAK,oBAAoB,EACzB,KAAK,WAAW,EACjB,MAAM,iDAAiD,CAAA;AAaxD,wBAAgB,gBAAgB,CAAC,OAAO,EAAE,MAAM;;;EAiB/C;AACD,eAAO,MAAM,4BAA4B,8BAGtC;IACD,MAAM,EAAE,KAAK,EAAE,CAAA;IACf,YAAY,EAAE,KAAK,EAAE,CAAA;CACtB,KAAG,CAAC,CAAC,KAAK,EAAE;IACX,QAAQ,EAAE,KAAK,CAAC,aAAa,CAAC,CAAA;IAC9B,eAAe,EAAE,WAAW,EAAE,CAAA;CAC/B,KAAK,2BAA2B,GAAG,oBAAoB,CAAC,EA+BxD,CAAA"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"markdownTransformer.js","names":["$parseSerializedNode","extractPropsFromJSXPropsString","propsToJSXString","$createServerBlockNode","$isServerBlockNode","ServerBlockNode","$createServerInlineBlockNode","$isServerInlineBlockNode","ServerInlineBlockNode","getLexicalToMarkdown","getMarkdownToLexical","linesFromStartToContentAndPropsString","createTagRegexes","tagName","escapedTagName","replace","openingTag","closingTag","optionalWhitespace","mandatoryClosingBracket","startPattern","endPattern","regExpEnd","RegExp","regExpStart","getBlockMarkdownTransformers","blocks","inlineBlocks","length","transformers","block","transformer","getMarkdownTransformerForBlock","concat","isInlineBlock","jsx","regex","slug","toReturn","push","allNodes","allTransformers","type","dependencies","export","node","getFields","blockType","toLowerCase","nodeFields","lexicalToMarkdown","exportResult","fields","hasProps","props","Object","keys","children","getEndIndex","match","endlineLastCharIndex","isEndOptional","lines","getTextContent","regexpEndRegex","startLineIndex","startMatch","trimChildren","importRegExp","customStartRegex","regExp","content","propsString","index","import","markdownToLexical","blockFields","closeMatch","htmlToLexical","openMatch","inlineBlockNode","sanitizedChildren","includes","child","split","sanitizedChild","doNotTrimChildren","handleImportAfterStartMatch","customEndRegex","undefined","rootNode","optional","afterEndLine","beforeStartLine","unsanitizedContent","endLineIndex","endsWith","slice","startsWith","prevNodes","nextNodes","markdown","root","firstPrevNode","append","lastChild","getChildren","linesInBetween","line","childrenString","join","trim"],"sources":["../../../../../src/features/blocks/server/markdown/markdownTransformer.ts"],"sourcesContent":["import type { ElementNode, SerializedLexicalNode } from 'lexical'\nimport type { Block } from 'payload'\n\nimport { $parseSerializedNode } from 'lexical'\n\nimport type { NodeWithHooks } from '../../../typesServer.js'\n\nimport {\n type MultilineElementTransformer,\n type TextMatchTransformer,\n type Transformer,\n} from '../../../../packages/@lexical/markdown/index.js'\nimport { extractPropsFromJSXPropsString } from '../../../../utilities/jsx/extractPropsFromJSXPropsString.js'\nimport { propsToJSXString } from '../../../../utilities/jsx/jsx.js'\nimport { $createServerBlockNode, $isServerBlockNode, ServerBlockNode } from '../nodes/BlocksNode.js'\nimport {\n $createServerInlineBlockNode,\n $isServerInlineBlockNode,\n ServerInlineBlockNode,\n} from '../nodes/InlineBlocksNode.js'\nimport { getLexicalToMarkdown } from './getLexicalToMarkdown.js'\nimport { getMarkdownToLexical } from './getMarkdownToLexical.js'\nimport { linesFromStartToContentAndPropsString } from './linesFromMatchToContentAndPropsString.js'\n\nexport function createTagRegexes(tagName: string) {\n const escapedTagName = tagName.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')\n\n // Regex components\n const openingTag = `<${escapedTagName}`\n const closingTag = `</${escapedTagName}`\n const optionalWhitespace = `\\\\s*`\n const mandatoryClosingBracket = `>`\n\n // Assembled regex patterns\n const startPattern = `${openingTag}(?=\\\\s|>|$)` // Only match the tag name\n const endPattern = `${closingTag}${optionalWhitespace}${mandatoryClosingBracket}`\n\n return {\n regExpEnd: new RegExp(endPattern, 'i'),\n regExpStart: new RegExp(startPattern, 'i'),\n }\n}\nexport const getBlockMarkdownTransformers = ({\n blocks,\n inlineBlocks,\n}: {\n blocks: Block[]\n inlineBlocks: Block[]\n}): ((props: {\n allNodes: Array<NodeWithHooks>\n allTransformers: Transformer[]\n}) => MultilineElementTransformer | TextMatchTransformer)[] => {\n if (!blocks?.length && !inlineBlocks?.length) {\n return []\n }\n\n let transformers: ((props: {\n allNodes: Array<NodeWithHooks>\n allTransformers: Transformer[]\n }) => MultilineElementTransformer | TextMatchTransformer)[] = []\n\n if (blocks?.length) {\n for (const block of blocks) {\n const transformer = getMarkdownTransformerForBlock(block, false)\n\n if (transformer) {\n transformers = transformers.concat(transformer)\n }\n }\n }\n\n if (inlineBlocks?.length) {\n for (const block of inlineBlocks) {\n const transformer = getMarkdownTransformerForBlock(block, true)\n\n if (transformer) {\n transformers = transformers.concat(transformer)\n }\n }\n }\n\n return transformers\n}\n\nfunction getMarkdownTransformerForBlock(\n block: Block,\n isInlineBlock: boolean,\n): Array<\n (props: {\n allNodes: Array<NodeWithHooks>\n allTransformers: Transformer[]\n }) => MultilineElementTransformer | TextMatchTransformer\n> | null {\n if (!block.jsx) {\n return null\n }\n const regex = createTagRegexes(block.slug)\n const toReturn: Array<\n (props: {\n allNodes: Array<NodeWithHooks>\n allTransformers: Transformer[]\n }) => MultilineElementTransformer | TextMatchTransformer\n > = []\n\n if (isInlineBlock) {\n toReturn.push(({ allNodes, allTransformers }) => ({\n type: 'text-match',\n dependencies: [ServerInlineBlockNode],\n export: (node) => {\n if (!$isServerInlineBlockNode(node)) {\n return null\n }\n\n if (node.getFields()?.blockType?.toLowerCase() !== block.slug.toLowerCase()) {\n return null\n }\n\n const nodeFields = node.getFields()\n const lexicalToMarkdown = getLexicalToMarkdown(allNodes, allTransformers)\n\n const exportResult = block.jsx!.export({\n fields: nodeFields,\n lexicalToMarkdown,\n })\n if (exportResult === false) {\n return null\n }\n if (typeof exportResult === 'string') {\n return exportResult\n }\n\n const hasProps = exportResult.props && Object.keys(exportResult.props)?.length > 0\n const props = exportResult.props ?? {}\n\n if (exportResult?.children?.length) {\n return `<${nodeFields.blockType}${hasProps ? ' ' + propsToJSXString({ props }) : ''}>${exportResult.children}</${nodeFields.blockType}>`\n }\n\n return `<${nodeFields.blockType}${hasProps ? ' ' + propsToJSXString({ props }) : ''}/>`\n },\n getEndIndex: (node, match) => {\n const { endlineLastCharIndex } = linesFromStartToContentAndPropsString({\n isEndOptional: false,\n lines: [node.getTextContent()],\n regexpEndRegex: regex.regExpEnd,\n startLineIndex: 0,\n startMatch: match,\n trimChildren: false,\n })\n\n return endlineLastCharIndex\n },\n importRegExp: block.jsx?.customStartRegex ?? regex.regExpStart,\n regExp: /___ignoreignoreignore___/g,\n replace(node, match) {\n const { content, propsString } = linesFromStartToContentAndPropsString({\n isEndOptional: false,\n lines: [node.getTextContent()],\n regexpEndRegex: regex.regExpEnd,\n startLineIndex: 0,\n startMatch: {\n ...match,\n index: 0,\n },\n trimChildren: false,\n })\n\n if (!block?.jsx?.import) {\n // No multiline transformer handled this line successfully\n return\n }\n\n const markdownToLexical = getMarkdownToLexical(allNodes, allTransformers)\n\n const blockFields = block.jsx.import({\n children: content,\n closeMatch: null,\n htmlToLexical: null, // TODO\n markdownToLexical,\n openMatch: match,\n props: propsString\n ? extractPropsFromJSXPropsString({\n propsString,\n })\n : {},\n })\n if (blockFields === false) {\n return\n }\n\n const inlineBlockNode = $createServerInlineBlockNode({\n blockType: block.slug,\n ...(blockFields as any),\n })\n\n node.replace(inlineBlockNode)\n },\n }))\n\n return toReturn\n }\n\n toReturn.push(({ allNodes, allTransformers }) => ({\n dependencies: [ServerBlockNode],\n export: (node) => {\n if (!$isServerBlockNode(node)) {\n return null\n }\n\n if (node.getFields()?.blockType?.toLowerCase() !== block.slug.toLowerCase()) {\n return null\n }\n\n const nodeFields = node.getFields()\n const lexicalToMarkdown = getLexicalToMarkdown(allNodes, allTransformers)\n\n const exportResult = block.jsx!.export({\n fields: nodeFields,\n lexicalToMarkdown,\n })\n if (exportResult === false) {\n return null\n }\n if (typeof exportResult === 'string') {\n return exportResult\n }\n\n const hasProps = exportResult.props && Object.keys(exportResult.props)?.length > 0\n const props = exportResult.props ?? {}\n\n if (exportResult?.children?.length) {\n const children = exportResult.children\n let sanitizedChildren = ''\n\n // Ensure it has a leftpad of at least 2 spaces. The data is saved without those spaces, so we can just blindly add it to every child\n if (children.includes('\\n')) {\n for (const child of children.split('\\n')) {\n let sanitizedChild = ''\n if (!block?.jsx?.doNotTrimChildren && child !== '') {\n sanitizedChild = ' '\n }\n sanitizedChild += child + '\\n'\n\n sanitizedChildren += sanitizedChild\n }\n } else {\n sanitizedChildren = (block?.jsx?.doNotTrimChildren ? '' : ' ') + children + '\\n'\n }\n\n return `<${nodeFields.blockType}${hasProps ? ' ' + propsToJSXString({ props }) : ''}>\\n${sanitizedChildren}</${nodeFields.blockType}>`\n }\n\n return `<${nodeFields.blockType}${hasProps ? ' ' + propsToJSXString({ props }) : ''}/>`\n },\n handleImportAfterStartMatch: block.jsx?.customEndRegex\n ? undefined\n : ({ lines, rootNode, startLineIndex, startMatch, transformer }) => {\n const regexpEndRegex: RegExp | undefined =\n typeof transformer.regExpEnd === 'object' && 'regExp' in transformer.regExpEnd\n ? transformer.regExpEnd.regExp\n : transformer.regExpEnd\n\n const isEndOptional =\n transformer.regExpEnd &&\n typeof transformer.regExpEnd === 'object' &&\n 'optional' in transformer.regExpEnd\n ? transformer.regExpEnd.optional\n : !transformer.regExpEnd\n\n const {\n afterEndLine,\n beforeStartLine,\n content: unsanitizedContent,\n endLineIndex,\n propsString,\n } = linesFromStartToContentAndPropsString({\n isEndOptional,\n lines,\n regexpEndRegex,\n startLineIndex,\n startMatch,\n trimChildren: false,\n })\n\n let content = ''\n\n if (block?.jsx?.doNotTrimChildren) {\n content = unsanitizedContent.endsWith('\\n')\n ? unsanitizedContent.slice(0, -1)\n : unsanitizedContent\n } else {\n // Ensure it has a leftpad of at least 2 spaces. The data is saved without those spaces, so we can just blindly add it to every child\n if (unsanitizedContent.includes('\\n')) {\n const split = unsanitizedContent.split('\\n')\n let index = 0\n for (const child of split) {\n index++\n\n if (child.startsWith(' ')) {\n content += child.slice(2)\n } else {\n // If one child is misaligned, skip aligning completely, unless it's just empty\n if (child === '') {\n content += child\n } else {\n content = unsanitizedContent.endsWith('\\n')\n ? unsanitizedContent.slice(0, -1)\n : unsanitizedContent\n break\n }\n }\n\n content += index === split.length ? '' : '\\n'\n }\n } else {\n content =\n (!unsanitizedContent.startsWith(' ')\n ? unsanitizedContent\n : unsanitizedContent.slice(2)) + '\\n'\n }\n }\n\n if (!block?.jsx?.import) {\n // No multiline transformer handled this line successfully\n return [false, startLineIndex]\n }\n\n const markdownToLexical = getMarkdownToLexical(allNodes, allTransformers)\n\n const blockFields = block.jsx.import({\n children: content,\n closeMatch: null,\n htmlToLexical: null, // TODO\n markdownToLexical,\n openMatch: startMatch,\n props: propsString\n ? extractPropsFromJSXPropsString({\n propsString,\n })\n : {},\n })\n if (blockFields === false) {\n return [false, startLineIndex]\n }\n\n const node = $createServerBlockNode({\n blockType: block.slug,\n ...blockFields,\n } as any)\n\n if (node) {\n // Now handle beforeStartLine and afterEndLine. If those are not empty, we need to add them as text nodes before and after the block node.\n // However, those themselves can contain other markdown matches, so we need to parse them as well.\n // Example where this is needed: \"Hello <InlineCode>inline code</InlineCode> test.\"\n let prevNodes: null | SerializedLexicalNode[] = null\n let nextNodes: null | SerializedLexicalNode[] = null\n // TODO: Might not need this prevNodes and nextNodes handling if inline nodes are handled by textmatch transformers\n\n if (beforeStartLine?.length) {\n prevNodes = markdownToLexical({ markdown: beforeStartLine })?.root?.children ?? []\n\n const firstPrevNode = prevNodes?.[0]\n if (firstPrevNode) {\n rootNode.append($parseSerializedNode(firstPrevNode))\n }\n }\n\n rootNode.append(node)\n\n if (afterEndLine?.length) {\n nextNodes = markdownToLexical({ markdown: afterEndLine })?.root?.children\n const lastChild = rootNode.getChildren()[rootNode.getChildren().length - 1]\n\n const children = ($parseSerializedNode(nextNodes[0]!) as ElementNode)?.getChildren()\n if (children?.length) {\n for (const child of children) {\n ;(lastChild as ElementNode).append(child)\n }\n }\n }\n }\n\n return [true, endLineIndex]\n },\n regExpEnd: block.jsx?.customEndRegex ?? regex.regExpEnd,\n regExpStart: block.jsx?.customStartRegex ?? regex.regExpStart,\n // This replace is ONLY run for ``` code blocks (so any blocks with custom start and end regexes). For others, we use the special JSX handling above:\n type: 'multiline-element',\n replace: (rootNode, children, openMatch, closeMatch, linesInBetween) => {\n if (block?.jsx?.import) {\n if (!linesInBetween) {\n // convert children to linesInBetween\n let line = ''\n if (children) {\n for (const child of children) {\n line += child.getTextContent()\n }\n }\n\n linesInBetween = [line]\n }\n\n let childrenString = ''\n if (block?.jsx?.doNotTrimChildren) {\n childrenString = linesInBetween.join('\\n')\n } else {\n childrenString = linesInBetween.join('\\n').trim()\n }\n\n const propsString = openMatch[1]?.trim()\n\n const markdownToLexical = getMarkdownToLexical(allNodes, allTransformers)\n\n const blockFields = block.jsx.import({\n children: childrenString,\n closeMatch: closeMatch as RegExpMatchArray,\n htmlToLexical: null, // TODO\n markdownToLexical,\n openMatch: openMatch as RegExpMatchArray,\n props: propsString\n ? extractPropsFromJSXPropsString({\n propsString,\n })\n : {},\n })\n if (blockFields === false) {\n return false\n }\n\n const node = $createServerBlockNode({\n blockType: block.slug,\n ...blockFields,\n } as any)\n\n if (node) {\n rootNode.append(node)\n }\n\n return\n }\n return false // Run next transformer\n },\n }))\n\n return toReturn\n}\n"],"mappings":"AAGA,SAASA,oBAAoB,QAAQ;AASrC,SAASC,8BAA8B,QAAQ;AAC/C,SAASC,gBAAgB,QAAQ;AACjC,SAASC,sBAAsB,EAAEC,kBAAkB,EAAEC,eAAe,QAAQ;AAC5E,SACEC,4BAA4B,EAC5BC,wBAAwB,EACxBC,qBAAqB,QAChB;AACP,SAASC,oBAAoB,QAAQ;AACrC,SAASC,oBAAoB,QAAQ;AACrC,SAASC,qCAAqC,QAAQ;AAEtD,OAAO,SAASC,iBAAiBC,OAAe;EAC9C,MAAMC,cAAA,GAAiBD,OAAA,CAAQE,OAAO,CAAC,uBAAuB;EAE9D;EACA,MAAMC,UAAA,GAAa,IAAIF,cAAA,EAAgB;EACvC,MAAMG,UAAA,GAAa,KAAKH,cAAA,EAAgB;EACxC,MAAMI,kBAAA,GAAqB,MAAM;EACjC,MAAMC,uBAAA,GAA0B,GAAG;EAEnC;EACA,MAAMC,YAAA,GAAe,GAAGJ,UAAA,aAAuB,CAAC;EAAA;EAChD,MAAMK,UAAA,GAAa,GAAGJ,UAAA,GAAaC,kBAAA,GAAqBC,uBAAA,EAAyB;EAEjF,OAAO;IACLG,SAAA,EAAW,IAAIC,MAAA,CAAOF,UAAA,EAAY;IAClCG,WAAA,EAAa,IAAID,MAAA,CAAOH,YAAA,EAAc;EACxC;AACF;AACA,OAAO,MAAMK,4BAAA,GAA+BA,CAAC;EAC3CC,MAAM;EACNC;AAAY,CAIb;EAIC,IAAI,CAACD,MAAA,EAAQE,MAAA,IAAU,CAACD,YAAA,EAAcC,MAAA,EAAQ;IAC5C,OAAO,EAAE;EACX;EAEA,IAAIC,YAAA,GAG0D,EAAE;EAEhE,IAAIH,MAAA,EAAQE,MAAA,EAAQ;IAClB,KAAK,MAAME,KAAA,IAASJ,MAAA,EAAQ;MAC1B,MAAMK,WAAA,GAAcC,8BAAA,CAA+BF,KAAA,EAAO;MAE1D,IAAIC,WAAA,EAAa;QACfF,YAAA,GAAeA,YAAA,CAAaI,MAAM,CAACF,WAAA;MACrC;IACF;EACF;EAEA,IAAIJ,YAAA,EAAcC,MAAA,EAAQ;IACxB,KAAK,MAAME,KAAA,IAASH,YAAA,EAAc;MAChC,MAAMI,WAAA,GAAcC,8BAAA,CAA+BF,KAAA,EAAO;MAE1D,IAAIC,WAAA,EAAa;QACfF,YAAA,GAAeA,YAAA,CAAaI,MAAM,CAACF,WAAA;MACrC;IACF;EACF;EAEA,OAAOF,YAAA;AACT;AAEA,SAASG,+BACPF,KAAY,EACZI,aAAsB;EAOtB,IAAI,CAACJ,KAAA,CAAMK,GAAG,EAAE;IACd,OAAO;EACT;EACA,MAAMC,KAAA,GAAQxB,gBAAA,CAAiBkB,KAAA,CAAMO,IAAI;EACzC,MAAMC,QAAA,GAKF,EAAE;EAEN,IAAIJ,aAAA,EAAe;IACjBI,QAAA,CAASC,IAAI,CAAC,CAAC;MAAEC,QAAQ;MAAEC;IAAe,CAAE,MAAM;MAChDC,IAAA,EAAM;MACNC,YAAA,EAAc,CAACnC,qBAAA,CAAsB;MACrCoC,MAAA,EAASC,IAAA;QACP,IAAI,CAACtC,wBAAA,CAAyBsC,IAAA,GAAO;UACnC,OAAO;QACT;QAEA,IAAIA,IAAA,CAAKC,SAAS,IAAIC,SAAA,EAAWC,WAAA,OAAkBlB,KAAA,CAAMO,IAAI,CAACW,WAAW,IAAI;UAC3E,OAAO;QACT;QAEA,MAAMC,UAAA,GAAaJ,IAAA,CAAKC,SAAS;QACjC,MAAMI,iBAAA,GAAoBzC,oBAAA,CAAqB+B,QAAA,EAAUC,eAAA;QAEzD,MAAMU,YAAA,GAAerB,KAAA,CAAMK,GAAG,CAAES,MAAM,CAAC;UACrCQ,MAAA,EAAQH,UAAA;UACRC;QACF;QACA,IAAIC,YAAA,KAAiB,OAAO;UAC1B,OAAO;QACT;QACA,IAAI,OAAOA,YAAA,KAAiB,UAAU;UACpC,OAAOA,YAAA;QACT;QAEA,MAAME,QAAA,GAAWF,YAAA,CAAaG,KAAK,IAAIC,MAAA,CAAOC,IAAI,CAACL,YAAA,CAAaG,KAAK,GAAG1B,MAAA,GAAS;QACjF,MAAM0B,KAAA,GAAQH,YAAA,CAAaG,KAAK,IAAI,CAAC;QAErC,IAAIH,YAAA,EAAcM,QAAA,EAAU7B,MAAA,EAAQ;UAClC,OAAO,IAAIqB,UAAA,CAAWF,SAAS,GAAGM,QAAA,GAAW,MAAMnD,gBAAA,CAAiB;YAAEoD;UAAM,KAAK,MAAMH,YAAA,CAAaM,QAAQ,KAAKR,UAAA,CAAWF,SAAS,GAAG;QAC1I;QAEA,OAAO,IAAIE,UAAA,CAAWF,SAAS,GAAGM,QAAA,GAAW,MAAMnD,gBAAA,CAAiB;UAAEoD;QAAM,KAAK,MAAM;MACzF;MACAI,WAAA,EAAaA,CAACb,IAAA,EAAMc,KAAA;QAClB,MAAM;UAAEC;QAAoB,CAAE,GAAGjD,qCAAA,CAAsC;UACrEkD,aAAA,EAAe;UACfC,KAAA,EAAO,CAACjB,IAAA,CAAKkB,cAAc,GAAG;UAC9BC,cAAA,EAAgB5B,KAAA,CAAMd,SAAS;UAC/B2C,cAAA,EAAgB;UAChBC,UAAA,EAAYP,KAAA;UACZQ,YAAA,EAAc;QAChB;QAEA,OAAOP,oBAAA;MACT;MACAQ,YAAA,EAActC,KAAA,CAAMK,GAAG,EAAEkC,gBAAA,IAAoBjC,KAAA,CAAMZ,WAAW;MAC9D8C,MAAA,EAAQ;MACRvD,QAAQ8B,IAAI,EAAEc,KAAK;QACjB,MAAM;UAAEY,OAAO;UAAEC;QAAW,CAAE,GAAG7D,qCAAA,CAAsC;UACrEkD,aAAA,EAAe;UACfC,KAAA,EAAO,CAACjB,IAAA,CAAKkB,cAAc,GAAG;UAC9BC,cAAA,EAAgB5B,KAAA,CAAMd,SAAS;UAC/B2C,cAAA,EAAgB;UAChBC,UAAA,EAAY;YACV,GAAGP,KAAK;YACRc,KAAA,EAAO;UACT;UACAN,YAAA,EAAc;QAChB;QAEA,IAAI,CAACrC,KAAA,EAAOK,GAAA,EAAKuC,MAAA,EAAQ;UACvB;UACA;QACF;QAEA,MAAMC,iBAAA,GAAoBjE,oBAAA,CAAqB8B,QAAA,EAAUC,eAAA;QAEzD,MAAMmC,WAAA,GAAc9C,KAAA,CAAMK,GAAG,CAACuC,MAAM,CAAC;UACnCjB,QAAA,EAAUc,OAAA;UACVM,UAAA,EAAY;UACZC,aAAA,EAAe;UACfH,iBAAA;UACAI,SAAA,EAAWpB,KAAA;UACXL,KAAA,EAAOkB,WAAA,GACHvE,8BAAA,CAA+B;YAC7BuE;UACF,KACA,CAAC;QACP;QACA,IAAII,WAAA,KAAgB,OAAO;UACzB;QACF;QAEA,MAAMI,eAAA,GAAkB1E,4BAAA,CAA6B;UACnDyC,SAAA,EAAWjB,KAAA,CAAMO,IAAI;UACrB,GAAIuC;QACN;QAEA/B,IAAA,CAAK9B,OAAO,CAACiE,eAAA;MACf;IACF;IAEA,OAAO1C,QAAA;EACT;EAEAA,QAAA,CAASC,IAAI,CAAC,CAAC;IAAEC,QAAQ;IAAEC;EAAe,CAAE,MAAM;IAChDE,YAAA,EAAc,CAACtC,eAAA,CAAgB;IAC/BuC,MAAA,EAASC,IAAA;MACP,IAAI,CAACzC,kBAAA,CAAmByC,IAAA,GAAO;QAC7B,OAAO;MACT;MAEA,IAAIA,IAAA,CAAKC,SAAS,IAAIC,SAAA,EAAWC,WAAA,OAAkBlB,KAAA,CAAMO,IAAI,CAACW,WAAW,IAAI;QAC3E,OAAO;MACT;MAEA,MAAMC,UAAA,GAAaJ,IAAA,CAAKC,SAAS;MACjC,MAAMI,iBAAA,GAAoBzC,oBAAA,CAAqB+B,QAAA,EAAUC,eAAA;MAEzD,MAAMU,YAAA,GAAerB,KAAA,CAAMK,GAAG,CAAES,MAAM,CAAC;QACrCQ,MAAA,EAAQH,UAAA;QACRC;MACF;MACA,IAAIC,YAAA,KAAiB,OAAO;QAC1B,OAAO;MACT;MACA,IAAI,OAAOA,YAAA,KAAiB,UAAU;QACpC,OAAOA,YAAA;MACT;MAEA,MAAME,QAAA,GAAWF,YAAA,CAAaG,KAAK,IAAIC,MAAA,CAAOC,IAAI,CAACL,YAAA,CAAaG,KAAK,GAAG1B,MAAA,GAAS;MACjF,MAAM0B,KAAA,GAAQH,YAAA,CAAaG,KAAK,IAAI,CAAC;MAErC,IAAIH,YAAA,EAAcM,QAAA,EAAU7B,MAAA,EAAQ;QAClC,MAAM6B,QAAA,GAAWN,YAAA,CAAaM,QAAQ;QACtC,IAAIwB,iBAAA,GAAoB;QAExB;QACA,IAAIxB,QAAA,CAASyB,QAAQ,CAAC,OAAO;UAC3B,KAAK,MAAMC,KAAA,IAAS1B,QAAA,CAAS2B,KAAK,CAAC,OAAO;YACxC,IAAIC,cAAA,GAAiB;YACrB,IAAI,CAACvD,KAAA,EAAOK,GAAA,EAAKmD,iBAAA,IAAqBH,KAAA,KAAU,IAAI;cAClDE,cAAA,GAAiB;YACnB;YACAA,cAAA,IAAkBF,KAAA,GAAQ;YAE1BF,iBAAA,IAAqBI,cAAA;UACvB;QACF,OAAO;UACLJ,iBAAA,GAAoB,CAACnD,KAAA,EAAOK,GAAA,EAAKmD,iBAAA,GAAoB,KAAK,IAAG,IAAK7B,QAAA,GAAW;QAC/E;QAEA,OAAO,IAAIR,UAAA,CAAWF,SAAS,GAAGM,QAAA,GAAW,MAAMnD,gBAAA,CAAiB;UAAEoD;QAAM,KAAK,QAAQ2B,iBAAA,KAAsBhC,UAAA,CAAWF,SAAS,GAAG;MACxI;MAEA,OAAO,IAAIE,UAAA,CAAWF,SAAS,GAAGM,QAAA,GAAW,MAAMnD,gBAAA,CAAiB;QAAEoD;MAAM,KAAK,MAAM;IACzF;IACAiC,2BAAA,EAA6BzD,KAAA,CAAMK,GAAG,EAAEqD,cAAA,GACpCC,SAAA,GACA,CAAC;MAAE3B,KAAK;MAAE4B,QAAQ;MAAEzB,cAAc;MAAEC,UAAU;MAAEnC;IAAW,CAAE;MAC3D,MAAMiC,cAAA,GACJ,OAAOjC,WAAA,CAAYT,SAAS,KAAK,YAAY,YAAYS,WAAA,CAAYT,SAAS,GAC1ES,WAAA,CAAYT,SAAS,CAACgD,MAAM,GAC5BvC,WAAA,CAAYT,SAAS;MAE3B,MAAMuC,aAAA,GACJ9B,WAAA,CAAYT,SAAS,IACrB,OAAOS,WAAA,CAAYT,SAAS,KAAK,YACjC,cAAcS,WAAA,CAAYT,SAAS,GAC/BS,WAAA,CAAYT,SAAS,CAACqE,QAAQ,GAC9B,CAAC5D,WAAA,CAAYT,SAAS;MAE5B,MAAM;QACJsE,YAAY;QACZC,eAAe;QACftB,OAAA,EAASuB,kBAAkB;QAC3BC,YAAY;QACZvB;MAAW,CACZ,GAAG7D,qCAAA,CAAsC;QACxCkD,aAAA;QACAC,KAAA;QACAE,cAAA;QACAC,cAAA;QACAC,UAAA;QACAC,YAAA,EAAc;MAChB;MAEA,IAAII,OAAA,GAAU;MAEd,IAAIzC,KAAA,EAAOK,GAAA,EAAKmD,iBAAA,EAAmB;QACjCf,OAAA,GAAUuB,kBAAA,CAAmBE,QAAQ,CAAC,QAClCF,kBAAA,CAAmBG,KAAK,CAAC,GAAG,CAAC,KAC7BH,kBAAA;MACN,OAAO;QACL;QACA,IAAIA,kBAAA,CAAmBZ,QAAQ,CAAC,OAAO;UACrC,MAAME,KAAA,GAAQU,kBAAA,CAAmBV,KAAK,CAAC;UACvC,IAAIX,KAAA,GAAQ;UACZ,KAAK,MAAMU,KAAA,IAASC,KAAA,EAAO;YACzBX,KAAA;YAEA,IAAIU,KAAA,CAAMe,UAAU,CAAC,OAAO;cAC1B3B,OAAA,IAAWY,KAAA,CAAMc,KAAK,CAAC;YACzB,OAAO;cACL;cACA,IAAId,KAAA,KAAU,IAAI;gBAChBZ,OAAA,IAAWY,KAAA;cACb,OAAO;gBACLZ,OAAA,GAAUuB,kBAAA,CAAmBE,QAAQ,CAAC,QAClCF,kBAAA,CAAmBG,KAAK,CAAC,GAAG,CAAC,KAC7BH,kBAAA;gBACJ;cACF;YACF;YAEAvB,OAAA,IAAWE,KAAA,KAAUW,KAAA,CAAMxD,MAAM,GAAG,KAAK;UAC3C;QACF,OAAO;UACL2C,OAAA,GACE,CAAC,CAACuB,kBAAA,CAAmBI,UAAU,CAAC,QAC5BJ,kBAAA,GACAA,kBAAA,CAAmBG,KAAK,CAAC,EAAC,IAAK;QACvC;MACF;MAEA,IAAI,CAACnE,KAAA,EAAOK,GAAA,EAAKuC,MAAA,EAAQ;QACvB;QACA,OAAO,CAAC,OAAOT,cAAA,CAAe;MAChC;MAEA,MAAMU,iBAAA,GAAoBjE,oBAAA,CAAqB8B,QAAA,EAAUC,eAAA;MAEzD,MAAMmC,WAAA,GAAc9C,KAAA,CAAMK,GAAG,CAACuC,MAAM,CAAC;QACnCjB,QAAA,EAAUc,OAAA;QACVM,UAAA,EAAY;QACZC,aAAA,EAAe;QACfH,iBAAA;QACAI,SAAA,EAAWb,UAAA;QACXZ,KAAA,EAAOkB,WAAA,GACHvE,8BAAA,CAA+B;UAC7BuE;QACF,KACA,CAAC;MACP;MACA,IAAII,WAAA,KAAgB,OAAO;QACzB,OAAO,CAAC,OAAOX,cAAA,CAAe;MAChC;MAEA,MAAMpB,IAAA,GAAO1C,sBAAA,CAAuB;QAClC4C,SAAA,EAAWjB,KAAA,CAAMO,IAAI;QACrB,GAAGuC;MACL;MAEA,IAAI/B,IAAA,EAAM;QACR;QACA;QACA;QACA,IAAIsD,SAAA,GAA4C;QAChD,IAAIC,SAAA,GAA4C;QAChD;QAEA,IAAIP,eAAA,EAAiBjE,MAAA,EAAQ;UAC3BuE,SAAA,GAAYxB,iBAAA,CAAkB;YAAE0B,QAAA,EAAUR;UAAgB,IAAIS,IAAA,EAAM7C,QAAA,IAAY,EAAE;UAElF,MAAM8C,aAAA,GAAgBJ,SAAA,GAAY,EAAE;UACpC,IAAII,aAAA,EAAe;YACjBb,QAAA,CAASc,MAAM,CAACxG,oBAAA,CAAqBuG,aAAA;UACvC;QACF;QAEAb,QAAA,CAASc,MAAM,CAAC3D,IAAA;QAEhB,IAAI+C,YAAA,EAAchE,MAAA,EAAQ;UACxBwE,SAAA,GAAYzB,iBAAA,CAAkB;YAAE0B,QAAA,EAAUT;UAAa,IAAIU,IAAA,EAAM7C,QAAA;UACjE,MAAMgD,SAAA,GAAYf,QAAA,CAASgB,WAAW,EAAE,CAAChB,QAAA,CAASgB,WAAW,GAAG9E,MAAM,GAAG,EAAE;UAE3E,MAAM6B,QAAA,GAAYzD,oBAAA,CAAqBoG,SAAS,CAAC,EAAE,GAAoBM,WAAA;UACvE,IAAIjD,QAAA,EAAU7B,MAAA,EAAQ;YACpB,KAAK,MAAMuD,KAAA,IAAS1B,QAAA,EAAU;cAC1BgD,SAAA,CAA0BD,MAAM,CAACrB,KAAA;YACrC;UACF;QACF;MACF;MAEA,OAAO,CAAC,MAAMY,YAAA,CAAa;IAC7B;IACJzE,SAAA,EAAWQ,KAAA,CAAMK,GAAG,EAAEqD,cAAA,IAAkBpD,KAAA,CAAMd,SAAS;IACvDE,WAAA,EAAaM,KAAA,CAAMK,GAAG,EAAEkC,gBAAA,IAAoBjC,KAAA,CAAMZ,WAAW;IAC7D;IACAkB,IAAA,EAAM;IACN3B,OAAA,EAASA,CAAC2E,QAAA,EAAUjC,QAAA,EAAUsB,SAAA,EAAWF,UAAA,EAAY8B,cAAA;MACnD,IAAI7E,KAAA,EAAOK,GAAA,EAAKuC,MAAA,EAAQ;QACtB,IAAI,CAACiC,cAAA,EAAgB;UACnB;UACA,IAAIC,IAAA,GAAO;UACX,IAAInD,QAAA,EAAU;YACZ,KAAK,MAAM0B,KAAA,IAAS1B,QAAA,EAAU;cAC5BmD,IAAA,IAAQzB,KAAA,CAAMpB,cAAc;YAC9B;UACF;UAEA4C,cAAA,GAAiB,CAACC,IAAA,CAAK;QACzB;QAEA,IAAIC,cAAA,GAAiB;QACrB,IAAI/E,KAAA,EAAOK,GAAA,EAAKmD,iBAAA,EAAmB;UACjCuB,cAAA,GAAiBF,cAAA,CAAeG,IAAI,CAAC;QACvC,OAAO;UACLD,cAAA,GAAiBF,cAAA,CAAeG,IAAI,CAAC,MAAMC,IAAI;QACjD;QAEA,MAAMvC,WAAA,GAAcO,SAAS,CAAC,EAAE,EAAEgC,IAAA;QAElC,MAAMpC,iBAAA,GAAoBjE,oBAAA,CAAqB8B,QAAA,EAAUC,eAAA;QAEzD,MAAMmC,WAAA,GAAc9C,KAAA,CAAMK,GAAG,CAACuC,MAAM,CAAC;UACnCjB,QAAA,EAAUoD,cAAA;UACVhC,UAAA,EAAYA,UAAA;UACZC,aAAA,EAAe;UACfH,iBAAA;UACAI,SAAA,EAAWA,SAAA;UACXzB,KAAA,EAAOkB,WAAA,GACHvE,8BAAA,CAA+B;YAC7BuE;UACF,KACA,CAAC;QACP;QACA,IAAII,WAAA,KAAgB,OAAO;UACzB,OAAO;QACT;QAEA,MAAM/B,IAAA,GAAO1C,sBAAA,CAAuB;UAClC4C,SAAA,EAAWjB,KAAA,CAAMO,IAAI;UACrB,GAAGuC;QACL;QAEA,IAAI/B,IAAA,EAAM;UACR6C,QAAA,CAASc,MAAM,CAAC3D,IAAA;QAClB;QAEA;MACF;MACA,OAAO,MAAM;MAAA;IACf;EACF;EAEA,OAAOP,QAAA;AACT","ignoreList":[]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../../../src/features/upload/client/component/pending/index.tsx"],"names":[],"mappings":"AAIA,OAAO,eAAe,CAAA;AAEtB,eAAO,MAAM,sBAAsB,QAAO,KAAK,CAAC,SAM/C,CAAA"}
|
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
'use client';
|
|
2
|
-
|
|
3
|
-
import { jsx as _jsx } from "react/jsx-runtime";
|
|
4
|
-
import { ShimmerEffect } from '@payloadcms/ui';
|
|
5
|
-
export const PendingUploadComponent = () => {
|
|
6
|
-
return /*#__PURE__*/_jsx("div", {
|
|
7
|
-
className: 'lexical-upload',
|
|
8
|
-
children: /*#__PURE__*/_jsx(ShimmerEffect, {
|
|
9
|
-
height: '95px',
|
|
10
|
-
width: '203px'
|
|
11
|
-
})
|
|
12
|
-
});
|
|
13
|
-
};
|
|
14
|
-
//# sourceMappingURL=index.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":["ShimmerEffect","PendingUploadComponent","_jsx","className","height","width"],"sources":["../../../../../../src/features/upload/client/component/pending/index.tsx"],"sourcesContent":["'use client'\n\nimport { ShimmerEffect } from '@payloadcms/ui'\n\nimport '../index.scss'\n\nexport const PendingUploadComponent = (): React.ReactNode => {\n return (\n <div className={'lexical-upload'}>\n <ShimmerEffect height={'95px'} width={'203px'} />\n </div>\n )\n}\n"],"mappings":"AAAA;;;AAEA,SAASA,aAAa,QAAQ;AAI9B,OAAO,MAAMC,sBAAA,GAAyBA,CAAA;EACpC,oBACEC,IAAA,CAAC;IAAIC,SAAA,EAAW;cACd,aAAAD,IAAA,CAACF,aAAA;MAAcI,MAAA,EAAQ;MAAQC,KAAA,EAAO;;;AAG5C","ignoreList":[]}
|
|
@@ -1,6 +0,0 @@
|
|
|
1
|
-
import type { DOMConversionOutput } from 'lexical';
|
|
2
|
-
import type { $createUploadNode } from '../../client/nodes/UploadNode.js';
|
|
3
|
-
import type { $createUploadServerNode } from './UploadNode.js';
|
|
4
|
-
export declare function isGoogleDocCheckboxImg(img: HTMLImageElement): boolean;
|
|
5
|
-
export declare function $convertUploadElement(domNode: HTMLImageElement, $createNode: typeof $createUploadNode | typeof $createUploadServerNode): DOMConversionOutput | null;
|
|
6
|
-
//# sourceMappingURL=conversions.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"conversions.d.ts","sourceRoot":"","sources":["../../../../../src/features/upload/server/nodes/conversions.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,SAAS,CAAA;AAIlD,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,kCAAkC,CAAA;AACzE,OAAO,KAAK,EAAE,uBAAuB,EAAuB,MAAM,iBAAiB,CAAA;AAEnF,wBAAgB,sBAAsB,CAAC,GAAG,EAAE,gBAAgB,GAAG,OAAO,CAOrE;AAED,wBAAgB,qBAAqB,CACnC,OAAO,EAAE,gBAAgB,EACzB,WAAW,EAAE,OAAO,iBAAiB,GAAG,OAAO,uBAAuB,GACrE,mBAAmB,GAAG,IAAI,CA8C5B"}
|
|
@@ -1,53 +0,0 @@
|
|
|
1
|
-
// This file contains functions used to convert dom elements to upload or pending upload lexical nodes. It requires the actual node
|
|
2
|
-
// creation functions to be passed in to stay compatible with both client and server code.
|
|
3
|
-
import ObjectID from 'bson-objectid';
|
|
4
|
-
export function isGoogleDocCheckboxImg(img) {
|
|
5
|
-
return img.parentElement != null && img.parentElement.tagName === 'LI' && img.previousSibling === null && img.getAttribute('aria-roledescription') === 'checkbox';
|
|
6
|
-
}
|
|
7
|
-
export function $convertUploadElement(domNode, $createNode) {
|
|
8
|
-
if (domNode.hasAttribute('data-lexical-pending-upload-form-id')) {
|
|
9
|
-
const formID = domNode.getAttribute('data-lexical-pending-upload-form-id');
|
|
10
|
-
if (formID != null) {
|
|
11
|
-
const node = $createNode({
|
|
12
|
-
data: {
|
|
13
|
-
pending: {
|
|
14
|
-
formID,
|
|
15
|
-
src: domNode.getAttribute('src') || ''
|
|
16
|
-
}
|
|
17
|
-
}
|
|
18
|
-
});
|
|
19
|
-
return {
|
|
20
|
-
node
|
|
21
|
-
};
|
|
22
|
-
}
|
|
23
|
-
}
|
|
24
|
-
if (domNode.hasAttribute('data-lexical-upload-relation-to') && domNode.hasAttribute('data-lexical-upload-id')) {
|
|
25
|
-
const id = domNode.getAttribute('data-lexical-upload-id');
|
|
26
|
-
const relationTo = domNode.getAttribute('data-lexical-upload-relation-to');
|
|
27
|
-
if (id != null && relationTo != null) {
|
|
28
|
-
const node = $createNode({
|
|
29
|
-
data: {
|
|
30
|
-
fields: {},
|
|
31
|
-
relationTo,
|
|
32
|
-
value: id
|
|
33
|
-
}
|
|
34
|
-
});
|
|
35
|
-
return {
|
|
36
|
-
node
|
|
37
|
-
};
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
// Create pending UploadNode. Auto-Upload functionality will then be handled by the node transform
|
|
41
|
-
const node = $createNode({
|
|
42
|
-
data: {
|
|
43
|
-
pending: {
|
|
44
|
-
formID: new ObjectID.default().toHexString(),
|
|
45
|
-
src: domNode.getAttribute('src') || ''
|
|
46
|
-
}
|
|
47
|
-
}
|
|
48
|
-
});
|
|
49
|
-
return {
|
|
50
|
-
node
|
|
51
|
-
};
|
|
52
|
-
}
|
|
53
|
-
//# sourceMappingURL=conversions.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"conversions.js","names":["ObjectID","isGoogleDocCheckboxImg","img","parentElement","tagName","previousSibling","getAttribute","$convertUploadElement","domNode","$createNode","hasAttribute","formID","node","data","pending","src","id","relationTo","fields","value","default","toHexString"],"sources":["../../../../../src/features/upload/server/nodes/conversions.ts"],"sourcesContent":["// This file contains functions used to convert dom elements to upload or pending upload lexical nodes. It requires the actual node\n// creation functions to be passed in to stay compatible with both client and server code.\nimport type { DOMConversionOutput } from 'lexical'\n\nimport ObjectID from 'bson-objectid'\n\nimport type { $createUploadNode } from '../../client/nodes/UploadNode.js'\nimport type { $createUploadServerNode, Internal_UploadData } from './UploadNode.js'\n\nexport function isGoogleDocCheckboxImg(img: HTMLImageElement): boolean {\n return (\n img.parentElement != null &&\n img.parentElement.tagName === 'LI' &&\n img.previousSibling === null &&\n img.getAttribute('aria-roledescription') === 'checkbox'\n )\n}\n\nexport function $convertUploadElement(\n domNode: HTMLImageElement,\n $createNode: typeof $createUploadNode | typeof $createUploadServerNode,\n): DOMConversionOutput | null {\n if (domNode.hasAttribute('data-lexical-pending-upload-form-id')) {\n const formID = domNode.getAttribute('data-lexical-pending-upload-form-id')\n\n if (formID != null) {\n const node = $createNode({\n data: {\n pending: {\n formID,\n src: domNode.getAttribute('src') || '',\n },\n } as Internal_UploadData,\n })\n return { node }\n }\n }\n if (\n domNode.hasAttribute('data-lexical-upload-relation-to') &&\n domNode.hasAttribute('data-lexical-upload-id')\n ) {\n const id = domNode.getAttribute('data-lexical-upload-id')\n const relationTo = domNode.getAttribute('data-lexical-upload-relation-to')\n\n if (id != null && relationTo != null) {\n const node = $createNode({\n data: {\n fields: {},\n relationTo,\n value: id,\n },\n })\n return { node }\n }\n }\n\n // Create pending UploadNode. Auto-Upload functionality will then be handled by the node transform\n const node = $createNode({\n data: {\n pending: {\n formID: new ObjectID.default().toHexString(),\n src: domNode.getAttribute('src') || '',\n },\n } as Internal_UploadData,\n })\n\n return { node }\n}\n"],"mappings":"AAAA;AACA;AAGA,OAAOA,QAAA,MAAc;AAKrB,OAAO,SAASC,uBAAuBC,GAAqB;EAC1D,OACEA,GAAA,CAAIC,aAAa,IAAI,QACrBD,GAAA,CAAIC,aAAa,CAACC,OAAO,KAAK,QAC9BF,GAAA,CAAIG,eAAe,KAAK,QACxBH,GAAA,CAAII,YAAY,CAAC,4BAA4B;AAEjD;AAEA,OAAO,SAASC,sBACdC,OAAyB,EACzBC,WAAsE;EAEtE,IAAID,OAAA,CAAQE,YAAY,CAAC,wCAAwC;IAC/D,MAAMC,MAAA,GAASH,OAAA,CAAQF,YAAY,CAAC;IAEpC,IAAIK,MAAA,IAAU,MAAM;MAClB,MAAMC,IAAA,GAAOH,WAAA,CAAY;QACvBI,IAAA,EAAM;UACJC,OAAA,EAAS;YACPH,MAAA;YACAI,GAAA,EAAKP,OAAA,CAAQF,YAAY,CAAC,UAAU;UACtC;QACF;MACF;MACA,OAAO;QAAEM;MAAK;IAChB;EACF;EACA,IACEJ,OAAA,CAAQE,YAAY,CAAC,sCACrBF,OAAA,CAAQE,YAAY,CAAC,2BACrB;IACA,MAAMM,EAAA,GAAKR,OAAA,CAAQF,YAAY,CAAC;IAChC,MAAMW,UAAA,GAAaT,OAAA,CAAQF,YAAY,CAAC;IAExC,IAAIU,EAAA,IAAM,QAAQC,UAAA,IAAc,MAAM;MACpC,MAAML,IAAA,GAAOH,WAAA,CAAY;QACvBI,IAAA,EAAM;UACJK,MAAA,EAAQ,CAAC;UACTD,UAAA;UACAE,KAAA,EAAOH;QACT;MACF;MACA,OAAO;QAAEJ;MAAK;IAChB;EACF;EAEA;EACA,MAAMA,IAAA,GAAOH,WAAA,CAAY;IACvBI,IAAA,EAAM;MACJC,OAAA,EAAS;QACPH,MAAA,EAAQ,IAAIX,QAAA,CAASoB,OAAO,GAAGC,WAAW;QAC1CN,GAAA,EAAKP,OAAA,CAAQF,YAAY,CAAC,UAAU;MACtC;IACF;EACF;EAEA,OAAO;IAAEM;EAAK;AAChB","ignoreList":[]}
|
|
File without changes
|
|
File without changes
|