@payloadcms/richtext-lexical 3.59.0-internal.cb85185 → 3.59.0-internal.cf8cc72
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/exports/client/Field-EHRBYNHO.js +2 -0
- package/dist/exports/client/Field-EHRBYNHO.js.map +7 -0
- package/dist/exports/client/RelationshipComponent-APF3CN47.js +2 -0
- package/dist/exports/client/RelationshipComponent-APF3CN47.js.map +7 -0
- package/dist/exports/client/bundled.css +1 -1
- package/dist/exports/client/{chunk-TLQLXR6Q.js → chunk-2Y72RT72.js} +2 -2
- package/dist/exports/client/chunk-3BY5IZJD.js +2 -0
- package/dist/exports/client/chunk-3BY5IZJD.js.map +7 -0
- package/dist/exports/client/chunk-CYLMY5ZJ.js +2 -0
- package/dist/exports/client/chunk-CYLMY5ZJ.js.map +7 -0
- package/dist/exports/client/component-VDJI45F2.js +2 -0
- package/dist/exports/client/component-VDJI45F2.js.map +7 -0
- package/dist/exports/client/componentInline-7TPI7ZBC.js +2 -0
- package/dist/exports/client/index.d.ts +0 -3
- package/dist/exports/client/index.d.ts.map +1 -1
- package/dist/exports/client/index.js +10 -35
- package/dist/exports/client/index.js.map +4 -4
- package/dist/features/align/server/i18n.d.ts.map +1 -1
- package/dist/features/align/server/i18n.js +6 -0
- package/dist/features/align/server/i18n.js.map +1 -1
- package/dist/features/blockquote/server/i18n.d.ts.map +1 -1
- package/dist/features/blockquote/server/i18n.js +3 -0
- package/dist/features/blockquote/server/i18n.js.map +1 -1
- package/dist/features/blocks/client/component/BlockContent.d.ts +27 -52
- package/dist/features/blocks/client/component/BlockContent.d.ts.map +1 -1
- package/dist/features/blocks/client/component/BlockContent.js +51 -48
- package/dist/features/blocks/client/component/BlockContent.js.map +1 -1
- package/dist/features/blocks/client/component/components/BlockCollapsible.d.ts +9 -2
- package/dist/features/blocks/client/component/components/BlockCollapsible.d.ts.map +1 -1
- package/dist/features/blocks/client/component/components/BlockCollapsible.js +25 -7
- package/dist/features/blocks/client/component/components/BlockCollapsible.js.map +1 -1
- package/dist/features/blocks/client/component/index.d.ts.map +1 -1
- package/dist/features/blocks/client/component/index.js +56 -71
- package/dist/features/blocks/client/component/index.js.map +1 -1
- package/dist/features/blocks/client/componentInline/index.d.ts.map +1 -1
- package/dist/features/blocks/client/componentInline/index.js +11 -13
- package/dist/features/blocks/client/componentInline/index.js.map +1 -1
- package/dist/features/blocks/client/index.d.ts.map +1 -1
- package/dist/features/blocks/client/index.js +0 -5
- package/dist/features/blocks/client/index.js.map +1 -1
- package/dist/features/blocks/client/markdownTransformer.d.ts +17 -0
- package/dist/features/blocks/client/markdownTransformer.d.ts.map +1 -0
- package/dist/features/blocks/client/markdownTransformer.js +144 -0
- package/dist/features/blocks/client/markdownTransformer.js.map +1 -0
- package/dist/features/blocks/server/i18n.d.ts.map +1 -1
- package/dist/features/blocks/server/i18n.js +47 -38
- package/dist/features/blocks/server/i18n.js.map +1 -1
- package/dist/features/blocks/server/index.js +1 -1
- package/dist/features/blocks/server/index.js.map +1 -1
- package/dist/features/blocks/server/linesFromMatchToContentAndPropsString.d.ts.map +1 -0
- package/dist/features/blocks/server/linesFromMatchToContentAndPropsString.js.map +1 -0
- package/dist/features/blocks/server/markdownTransformer.d.ts +22 -0
- package/dist/features/blocks/server/markdownTransformer.d.ts.map +1 -0
- package/dist/features/blocks/server/{markdown/markdownTransformer.js → markdownTransformer.js} +48 -6
- package/dist/features/blocks/server/markdownTransformer.js.map +1 -0
- package/dist/features/experimental_table/client/plugins/TableHoverActionsPlugin/index.d.ts.map +1 -1
- package/dist/features/experimental_table/client/plugins/TableHoverActionsPlugin/index.js +3 -5
- package/dist/features/experimental_table/client/plugins/TableHoverActionsPlugin/index.js.map +1 -1
- package/dist/features/heading/server/i18n.d.ts.map +1 -1
- package/dist/features/heading/server/i18n.js +3 -0
- package/dist/features/heading/server/i18n.js.map +1 -1
- package/dist/features/horizontalRule/server/i18n.d.ts.map +1 -1
- package/dist/features/horizontalRule/server/i18n.js +3 -0
- package/dist/features/horizontalRule/server/i18n.js.map +1 -1
- package/dist/features/indent/server/i18n.d.ts.map +1 -1
- package/dist/features/indent/server/i18n.js +4 -0
- package/dist/features/indent/server/i18n.js.map +1 -1
- package/dist/features/link/client/plugins/floatingLinkEditor/LinkEditor/index.d.ts.map +1 -1
- package/dist/features/link/client/plugins/floatingLinkEditor/LinkEditor/index.js +1 -3
- package/dist/features/link/client/plugins/floatingLinkEditor/LinkEditor/index.js.map +1 -1
- package/dist/features/link/server/i18n.d.ts.map +1 -1
- package/dist/features/link/server/i18n.js +4 -0
- package/dist/features/link/server/i18n.js.map +1 -1
- package/dist/features/lists/checklist/server/i18n.d.ts.map +1 -1
- package/dist/features/lists/checklist/server/i18n.js +3 -0
- package/dist/features/lists/checklist/server/i18n.js.map +1 -1
- package/dist/features/lists/orderedList/server/i18n.d.ts.map +1 -1
- package/dist/features/lists/orderedList/server/i18n.js +3 -0
- package/dist/features/lists/orderedList/server/i18n.js.map +1 -1
- package/dist/features/lists/unorderedList/server/i18n.d.ts.map +1 -1
- package/dist/features/lists/unorderedList/server/i18n.js +3 -0
- package/dist/features/lists/unorderedList/server/i18n.js.map +1 -1
- package/dist/features/paragraph/server/i18n.d.ts.map +1 -1
- package/dist/features/paragraph/server/i18n.js +4 -0
- package/dist/features/paragraph/server/i18n.js.map +1 -1
- package/dist/features/relationship/client/components/RelationshipComponent.d.ts.map +1 -1
- package/dist/features/relationship/client/components/RelationshipComponent.js +9 -5
- package/dist/features/relationship/client/components/RelationshipComponent.js.map +1 -1
- package/dist/features/relationship/server/i18n.d.ts.map +1 -1
- package/dist/features/relationship/server/i18n.js +3 -0
- package/dist/features/relationship/server/i18n.js.map +1 -1
- package/dist/features/textState/i18n.d.ts.map +1 -1
- package/dist/features/textState/i18n.js +3 -0
- package/dist/features/textState/i18n.js.map +1 -1
- package/dist/features/toolbars/fixed/client/Toolbar/index.d.ts.map +1 -1
- package/dist/features/toolbars/fixed/client/Toolbar/index.js +1 -7
- package/dist/features/toolbars/fixed/client/Toolbar/index.js.map +1 -1
- package/dist/features/toolbars/inline/client/Toolbar/index.d.ts.map +1 -1
- package/dist/features/toolbars/inline/client/Toolbar/index.js +1 -3
- package/dist/features/toolbars/inline/client/Toolbar/index.js.map +1 -1
- package/dist/features/upload/client/component/index.d.ts.map +1 -1
- package/dist/features/upload/client/component/index.js +5 -6
- package/dist/features/upload/client/component/index.js.map +1 -1
- package/dist/features/upload/server/i18n.d.ts.map +1 -1
- package/dist/features/upload/server/i18n.js +3 -0
- package/dist/features/upload/server/i18n.js.map +1 -1
- package/dist/field/bundled.css +1 -1
- package/dist/field/rscEntry.d.ts.map +1 -1
- package/dist/field/rscEntry.js +0 -2
- package/dist/field/rscEntry.js.map +1 -1
- package/dist/i18n.d.ts.map +1 -1
- package/dist/i18n.js +6 -0
- package/dist/i18n.js.map +1 -1
- package/dist/index.d.ts +0 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +0 -1
- package/dist/index.js.map +1 -1
- package/dist/lexical/LexicalEditor.d.ts.map +1 -1
- package/dist/lexical/LexicalEditor.js +8 -10
- package/dist/lexical/LexicalEditor.js.map +1 -1
- package/dist/lexical/ui/icons/CodeBlock/index.d.ts.map +1 -1
- package/dist/lexical/ui/icons/CodeBlock/index.js +15 -10
- package/dist/lexical/ui/icons/CodeBlock/index.js.map +1 -1
- package/dist/utilities/buildInitialState.d.ts +0 -1
- package/dist/utilities/buildInitialState.d.ts.map +1 -1
- package/dist/utilities/buildInitialState.js +0 -1
- package/dist/utilities/buildInitialState.js.map +1 -1
- package/dist/utilities/fieldsDrawer/DrawerContent.d.ts.map +1 -1
- package/dist/utilities/fieldsDrawer/DrawerContent.js +3 -7
- package/dist/utilities/fieldsDrawer/DrawerContent.js.map +1 -1
- package/dist/utilities/jsx/collectTopLevelJSXInLines.js +2 -2
- package/dist/utilities/jsx/collectTopLevelJSXInLines.js.map +1 -1
- package/package.json +6 -6
- package/dist/exports/client/Field-RQHCQRLV.js +0 -2
- package/dist/exports/client/Field-RQHCQRLV.js.map +0 -7
- package/dist/exports/client/RelationshipComponent-TSIENULZ.js +0 -2
- package/dist/exports/client/RelationshipComponent-TSIENULZ.js.map +0 -7
- package/dist/exports/client/chunk-BQCXN3B4.js +0 -2
- package/dist/exports/client/chunk-BQCXN3B4.js.map +0 -7
- package/dist/exports/client/chunk-XNERFY6G.js +0 -2
- package/dist/exports/client/chunk-XNERFY6G.js.map +0 -7
- package/dist/exports/client/component-CYFKY3IL.js +0 -2
- package/dist/exports/client/component-CYFKY3IL.js.map +0 -7
- package/dist/exports/client/componentInline-NGTRUSGB.js +0 -2
- package/dist/features/blocks/client/markdown/getLexicalToMarkdown.d.ts +0 -6
- package/dist/features/blocks/client/markdown/getLexicalToMarkdown.d.ts.map +0 -1
- package/dist/features/blocks/client/markdown/getLexicalToMarkdown.js +0 -24
- package/dist/features/blocks/client/markdown/getLexicalToMarkdown.js.map +0 -1
- package/dist/features/blocks/client/markdown/getMarkdownToLexical.d.ts +0 -6
- package/dist/features/blocks/client/markdown/getMarkdownToLexical.d.ts.map +0 -1
- package/dist/features/blocks/client/markdown/getMarkdownToLexical.js +0 -20
- package/dist/features/blocks/client/markdown/getMarkdownToLexical.js.map +0 -1
- package/dist/features/blocks/client/markdown/markdownTransformer.d.ts +0 -12
- package/dist/features/blocks/client/markdown/markdownTransformer.d.ts.map +0 -1
- package/dist/features/blocks/client/markdown/markdownTransformer.js +0 -348
- package/dist/features/blocks/client/markdown/markdownTransformer.js.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/Component/Block.d.ts +0 -5
- package/dist/features/blocks/premade/CodeBlock/Component/Block.d.ts.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/Component/Block.js +0 -156
- package/dist/features/blocks/premade/CodeBlock/Component/Block.js.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/Component/Code.d.ts +0 -61
- package/dist/features/blocks/premade/CodeBlock/Component/Code.d.ts.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/Component/Code.js +0 -99
- package/dist/features/blocks/premade/CodeBlock/Component/Code.js.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/Component/Collapse/index.d.ts +0 -4
- package/dist/features/blocks/premade/CodeBlock/Component/Collapse/index.d.ts.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/Component/Collapse/index.js +0 -17
- package/dist/features/blocks/premade/CodeBlock/Component/Collapse/index.js.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/Component/FloatingCollapse/index.d.ts +0 -4
- package/dist/features/blocks/premade/CodeBlock/Component/FloatingCollapse/index.d.ts.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/Component/FloatingCollapse/index.js +0 -26
- package/dist/features/blocks/premade/CodeBlock/Component/FloatingCollapse/index.js.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/converter.d.ts +0 -7
- package/dist/features/blocks/premade/CodeBlock/converter.d.ts.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/converter.js +0 -46
- package/dist/features/blocks/premade/CodeBlock/converter.js.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/converterClient.d.ts +0 -2
- package/dist/features/blocks/premade/CodeBlock/converterClient.d.ts.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/converterClient.js +0 -4
- package/dist/features/blocks/premade/CodeBlock/converterClient.js.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/index.d.ts +0 -7
- package/dist/features/blocks/premade/CodeBlock/index.d.ts.map +0 -1
- package/dist/features/blocks/premade/CodeBlock/index.js +0 -52
- package/dist/features/blocks/premade/CodeBlock/index.js.map +0 -1
- package/dist/features/blocks/server/markdown/getLexicalToMarkdown.d.ts +0 -6
- package/dist/features/blocks/server/markdown/getLexicalToMarkdown.d.ts.map +0 -1
- package/dist/features/blocks/server/markdown/getLexicalToMarkdown.js +0 -27
- package/dist/features/blocks/server/markdown/getLexicalToMarkdown.js.map +0 -1
- package/dist/features/blocks/server/markdown/getMarkdownToLexical.d.ts +0 -7
- package/dist/features/blocks/server/markdown/getMarkdownToLexical.d.ts.map +0 -1
- package/dist/features/blocks/server/markdown/getMarkdownToLexical.js +0 -22
- package/dist/features/blocks/server/markdown/getMarkdownToLexical.js.map +0 -1
- package/dist/features/blocks/server/markdown/linesFromMatchToContentAndPropsString.d.ts.map +0 -1
- package/dist/features/blocks/server/markdown/linesFromMatchToContentAndPropsString.js.map +0 -1
- package/dist/features/blocks/server/markdown/markdownTransformer.d.ts +0 -15
- package/dist/features/blocks/server/markdown/markdownTransformer.d.ts.map +0 -1
- package/dist/features/blocks/server/markdown/markdownTransformer.js.map +0 -1
- package/dist/lexical/ui/icons/Collapse/index.d.ts +0 -3
- package/dist/lexical/ui/icons/Collapse/index.d.ts.map +0 -1
- package/dist/lexical/ui/icons/Collapse/index.js +0 -18
- package/dist/lexical/ui/icons/Collapse/index.js.map +0 -1
- /package/dist/exports/client/{chunk-TLQLXR6Q.js.map → chunk-2Y72RT72.js.map} +0 -0
- /package/dist/exports/client/{componentInline-NGTRUSGB.js.map → componentInline-7TPI7ZBC.js.map} +0 -0
- /package/dist/features/blocks/server/{markdown/linesFromMatchToContentAndPropsString.d.ts → linesFromMatchToContentAndPropsString.d.ts} +0 -0
- /package/dist/features/blocks/server/{markdown/linesFromMatchToContentAndPropsString.js → linesFromMatchToContentAndPropsString.js} +0 -0
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
"use client";import{a as S,b as y,c as R}from"./chunk-BQCXN3B4.js";import{a as x}from"./chunk-INBEEENE.js";import{a as N}from"./chunk-F26IQ5RE.js";import{b as p}from"./chunk-BZZVLW4U.js";import{jsx as t,jsxs as n}from"react/jsx-runtime";import{useLexicalComposerContext as Y}from"@lexical/react/LexicalComposerContext.js";import{useLexicalEditable as Z}from"@lexical/react/useLexicalEditable";import{getTranslation as E}from"@payloadcms/translations";import{Button as h,formatDrawerSlug as ee,Thumbnail as te,useConfig as oe,useEditDepth as ae,usePayloadAPI as le,useTranslation as ie}from"@payloadcms/ui";import{$getNodeByKey as L}from"lexical";import{isImage as re}from"payload/shared";import{useCallback as f,useId as se,useReducer as ne,useRef as de,useState as ce}from"react";var e="lexical-upload",P={depth:0},ue=d=>{let{data:{fields:T,relationTo:g,value:c},nodeKey:l}=d;if(typeof c=="object")throw new Error("Upload value should be a string or number. The Lexical Upload component should not receive the populated value object.");let{config:{routes:{api:U},serverURL:F},getEntityConfig:j}=oe(),I=de(null),{uuid:z}=p(),B=ae(),[i]=Y(),{editorConfig:k,fieldProps:{schemaPath:A}}=p(),u=Z(),{i18n:_,t:r}=ie(),[b,K]=ne(s=>s+1,0),[a]=ce(()=>j({collectionSlug:g})),M=se(),w=ee({slug:"lexical-upload-drawer-"+z+M,depth:B}),{toggleDrawer:O}=x(w,!0),{closeDocumentDrawer:D,DocumentDrawer:W,DocumentDrawerToggler:H}=N({id:c,collectionSlug:a.slug}),[{data:o},{setParams:v}]=le(`${F}${U}/${a.slug}/${c}`,{initialParams:P}),C=o?.thumbnailURL||o?.url,q=f(()=>{i.update(()=>{L(l)?.remove()})},[i,l]),G=f(s=>{v({...P,cacheBust:b}),K(),D()},[v,b,D]),$=k?.resolvedFeatureMap?.get("upload")?.sanitizedClientFeatureProps.collections?.[a.slug]?.hasExtraFields,J=f((s,V)=>{i.update(()=>{let m=L(l);if(m){let X={...m.getData(),fields:V};m.setData(X)}})},[i,l]),Q=C&&o?.width&&o?.height?o.width>o.height?"landscape":"portrait":"landscape";return n("div",{className:`${e} ${e}--${Q}`,"data-filename":o?.filename,ref:I,children:[n("div",{className:`${e}__card`,children:[n("div",{className:`${e}__media`,children:[t(te,{collectionSlug:g,fileSrc:re(o?.mimeType)?C:null,height:o?.height,size:"none",width:o?.width}),u&&t("div",{className:`${e}__overlay ${e}__floater`,children:n("div",{className:`${e}__actions`,role:"toolbar",children:[$?t(h,{buttonStyle:"icon-label",className:`${e}__upload-drawer-toggler`,disabled:!u,el:"button",icon:"edit",onClick:O,round:!0,size:"medium",tooltip:r("fields:editRelationship")}):null,t(h,{buttonStyle:"icon-label",className:`${e}__swap-drawer-toggler`,disabled:!u,el:"button",icon:"swap",onClick:()=>{i.dispatchCommand(R,{replace:{nodeKey:l}})},round:!0,size:"medium",tooltip:r("fields:swapUpload")}),t(h,{buttonStyle:"icon-label",className:`${e}__removeButton`,disabled:!u,icon:"x",onClick:s=>{s.preventDefault(),q()},round:!0,size:"medium",tooltip:r("fields:removeUpload")})]})})]}),n("div",{className:`${e}__metaOverlay ${e}__floater`,children:[t(H,{className:`${e}__doc-drawer-toggler`,children:t("strong",{className:`${e}__filename`,children:o?.filename||r("general:untitled")})}),t("div",{className:`${e}__collectionLabel`,children:E(a.labels.singular,_)})]})]}),c?t(W,{onSave:G}):null,$?t(S,{data:T,drawerSlug:w,drawerTitle:r("general:editLabel",{label:E(a.labels.singular,_)}),featureKey:"upload",handleDrawerSubmit:J,schemaPath:A,schemaPathSuffix:a.slug}):null]})},ye=d=>t(y,{...d,uploads:!0,children:t(ue,{...d})});export{ye as UploadComponent};
|
|
2
|
-
//# sourceMappingURL=component-CYFKY3IL.js.map
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"version": 3,
|
|
3
|
-
"sources": ["../../../src/features/upload/client/component/index.tsx"],
|
|
4
|
-
"sourcesContent": ["'use client'\nimport type { ClientCollectionConfig, Data, FormState, JsonObject } from 'payload'\n\nimport { useLexicalComposerContext } from '@lexical/react/LexicalComposerContext.js'\nimport { useLexicalEditable } from '@lexical/react/useLexicalEditable'\nimport { getTranslation } from '@payloadcms/translations'\nimport {\n Button,\n formatDrawerSlug,\n Thumbnail,\n useConfig,\n useEditDepth,\n usePayloadAPI,\n useTranslation,\n} from '@payloadcms/ui'\nimport { $getNodeByKey, type ElementFormatType } from 'lexical'\nimport { isImage } from 'payload/shared'\nimport React, { useCallback, useId, useReducer, useRef, useState } from 'react'\n\nimport type { BaseClientFeatureProps } from '../../../typesClient.js'\nimport type { UploadData } from '../../server/nodes/UploadNode.js'\nimport type { UploadFeaturePropsClient } from '../index.js'\nimport type { UploadNode } from '../nodes/UploadNode.js'\n\nimport { useEditorConfigContext } from '../../../../lexical/config/client/EditorConfigProvider.js'\nimport { FieldsDrawer } from '../../../../utilities/fieldsDrawer/Drawer.js'\nimport { useLexicalDocumentDrawer } from '../../../../utilities/fieldsDrawer/useLexicalDocumentDrawer.js'\nimport { useLexicalDrawer } from '../../../../utilities/fieldsDrawer/useLexicalDrawer.js'\nimport { EnabledRelationshipsCondition } from '../../../relationship/client/utils/EnabledRelationshipsCondition.js'\nimport './index.scss'\nimport { INSERT_UPLOAD_WITH_DRAWER_COMMAND } from '../drawer/commands.js'\n\nconst baseClass = 'lexical-upload'\n\nconst initialParams = {\n depth: 0,\n}\n\nexport type ElementProps = {\n data: UploadData\n format?: ElementFormatType\n nodeKey: string\n}\n\nconst Component: React.FC<ElementProps> = (props) => {\n const {\n data: { fields, relationTo, value },\n nodeKey,\n } = props\n\n if (typeof value === 'object') {\n throw new Error(\n 'Upload value should be a string or number. The Lexical Upload component should not receive the populated value object.',\n )\n }\n\n const {\n config: {\n routes: { api },\n serverURL,\n },\n getEntityConfig,\n } = useConfig()\n const uploadRef = useRef<HTMLDivElement | null>(null)\n const { uuid } = useEditorConfigContext()\n const editDepth = useEditDepth()\n const [editor] = useLexicalComposerContext()\n\n const {\n editorConfig,\n fieldProps: { schemaPath },\n } = useEditorConfigContext()\n const isEditable = useLexicalEditable()\n const { i18n, t } = useTranslation()\n const [cacheBust, dispatchCacheBust] = useReducer((state) => state + 1, 0)\n const [relatedCollection] = useState<ClientCollectionConfig>(() =>\n getEntityConfig({ collectionSlug: relationTo }),\n )\n\n const componentID = useId()\n\n const extraFieldsDrawerSlug = formatDrawerSlug({\n slug: `lexical-upload-drawer-` + uuid + componentID, // There can be multiple upload components, each with their own drawer, in one single editor => separate them by componentID\n depth: editDepth,\n })\n\n // Need to use hook to initialize useEffect that restores cursor position\n const { toggleDrawer } = useLexicalDrawer(extraFieldsDrawerSlug, true)\n\n const { closeDocumentDrawer, DocumentDrawer, DocumentDrawerToggler } = useLexicalDocumentDrawer({\n id: value,\n collectionSlug: relatedCollection.slug,\n })\n\n // Get the referenced document\n const [{ data }, { setParams }] = usePayloadAPI(\n `${serverURL}${api}/${relatedCollection.slug}/${value}`,\n { initialParams },\n )\n\n const thumbnailSRC = data?.thumbnailURL || data?.url\n\n const removeUpload = useCallback(() => {\n editor.update(() => {\n $getNodeByKey(nodeKey)?.remove()\n })\n }, [editor, nodeKey])\n\n const updateUpload = useCallback(\n (data: Data) => {\n setParams({\n ...initialParams,\n cacheBust, // do this to get the usePayloadAPI to re-fetch the data even though the URL string hasn't changed\n })\n\n dispatchCacheBust()\n closeDocumentDrawer()\n },\n [setParams, cacheBust, closeDocumentDrawer],\n )\n\n const hasExtraFields = (\n editorConfig?.resolvedFeatureMap?.get('upload')\n ?.sanitizedClientFeatureProps as BaseClientFeatureProps<UploadFeaturePropsClient>\n ).collections?.[relatedCollection.slug]?.hasExtraFields\n\n const onExtraFieldsDrawerSubmit = useCallback(\n (_: FormState, data: JsonObject) => {\n // Update lexical node (with key nodeKey) with new data\n editor.update(() => {\n const uploadNode: null | UploadNode = $getNodeByKey(nodeKey)\n if (uploadNode) {\n const newData: UploadData = {\n ...uploadNode.getData(),\n fields: data,\n }\n uploadNode.setData(newData)\n }\n })\n },\n [editor, nodeKey],\n )\n\n const aspectRatio =\n thumbnailSRC && data?.width && data?.height\n ? data.width > data.height\n ? 'landscape'\n : 'portrait'\n : 'landscape'\n\n return (\n <div\n className={`${baseClass} ${baseClass}--${aspectRatio}`}\n data-filename={data?.filename}\n ref={uploadRef}\n >\n <div className={`${baseClass}__card`}>\n <div className={`${baseClass}__media`}>\n <Thumbnail\n collectionSlug={relationTo}\n fileSrc={isImage(data?.mimeType) ? thumbnailSRC : null}\n height={data?.height}\n size=\"none\"\n width={data?.width}\n />\n\n {isEditable && (\n <div className={`${baseClass}__overlay ${baseClass}__floater`}>\n <div className={`${baseClass}__actions`} role=\"toolbar\">\n {hasExtraFields ? (\n <Button\n buttonStyle=\"icon-label\"\n className={`${baseClass}__upload-drawer-toggler`}\n disabled={!isEditable}\n el=\"button\"\n icon=\"edit\"\n onClick={toggleDrawer}\n round\n size=\"medium\"\n tooltip={t('fields:editRelationship')}\n />\n ) : null}\n\n <Button\n buttonStyle=\"icon-label\"\n className={`${baseClass}__swap-drawer-toggler`}\n disabled={!isEditable}\n el=\"button\"\n icon=\"swap\"\n onClick={() => {\n editor.dispatchCommand(INSERT_UPLOAD_WITH_DRAWER_COMMAND, {\n replace: { nodeKey },\n })\n }}\n round\n size=\"medium\"\n tooltip={t('fields:swapUpload')}\n />\n\n <Button\n buttonStyle=\"icon-label\"\n className={`${baseClass}__removeButton`}\n disabled={!isEditable}\n icon=\"x\"\n onClick={(e) => {\n e.preventDefault()\n removeUpload()\n }}\n round\n size=\"medium\"\n tooltip={t('fields:removeUpload')}\n />\n </div>\n </div>\n )}\n </div>\n\n <div className={`${baseClass}__metaOverlay ${baseClass}__floater`}>\n <DocumentDrawerToggler className={`${baseClass}__doc-drawer-toggler`}>\n <strong className={`${baseClass}__filename`}>\n {data?.filename || t('general:untitled')}\n </strong>\n </DocumentDrawerToggler>\n <div className={`${baseClass}__collectionLabel`}>\n {getTranslation(relatedCollection.labels.singular, i18n)}\n </div>\n </div>\n </div>\n\n {value ? <DocumentDrawer onSave={updateUpload} /> : null}\n {hasExtraFields ? (\n <FieldsDrawer\n data={fields}\n drawerSlug={extraFieldsDrawerSlug}\n drawerTitle={t('general:editLabel', {\n label: getTranslation(relatedCollection.labels.singular, i18n),\n })}\n featureKey=\"upload\"\n handleDrawerSubmit={onExtraFieldsDrawerSubmit}\n schemaPath={schemaPath}\n schemaPathSuffix={relatedCollection.slug}\n />\n ) : null}\n </div>\n )\n}\n\nexport const UploadComponent = (props: ElementProps): React.ReactNode => {\n return (\n <EnabledRelationshipsCondition {...props} uploads>\n <Component {...props} />\n </EnabledRelationshipsCondition>\n )\n}\n"],
|
|
5
|
-
"mappings": "6OAGA,OAASA,6BAAAA,MAAiC,2CAC1C,OAASC,sBAAAA,MAA0B,oCACnC,OAASC,kBAAAA,MAAsB,2BAC/B,OACEC,UAAAA,EACAC,oBAAAA,GACAC,aAAAA,GACAC,aAAAA,GACAC,gBAAAA,GACAC,iBAAAA,GACAC,kBAAAA,OACK,iBACP,OAASC,iBAAAA,MAA6C,UACtD,OAASC,WAAAA,OAAe,iBACxB,OAAgBC,eAAAA,EAAaC,SAAAA,GAAOC,cAAAA,GAAYC,UAAAA,GAAQC,YAAAA,OAAgB,QAexE,IAAMC,EAAY,iBAEZC,EAAgB,CACpBC,MAAO,CACT,EAQMC,GAAqCC,GAAA,CACzC,GAAM,CACJC,KAAM,CAAEC,OAAAA,EAAQC,WAAAA,EAAYC,MAAAA,CAAK,EACjCC,QAAAA,CAAO,EACLL,EAEJ,GAAI,OAAOI,GAAU,SACnB,MAAM,IAAIE,MACR,wHAAA,EAIJ,GAAM,CACJC,OAAQ,CACNC,OAAQ,CAAEC,IAAAA,CAAG,EACbC,UAAAA,CAAS,EAEXC,gBAAAA,CAAe,EACbC,GAAA,EACEC,EAAYC,GAA8B,IAAA,EAC1C,CAAEC,KAAAA,CAAI,EAAKC,EAAA,EACXC,EAAYC,GAAA,EACZ,CAACC,CAAA,EAAUC,EAAA,EAEX,CACJC,aAAAA,EACAC,WAAY,CAAEC,WAAAA,CAAU,CAAE,EACxBP,EAAA,EACEQ,EAAaC,EAAA,EACb,CAAEC,KAAAA,EAAMC,EAAAA,CAAC,EAAKC,GAAA,EACd,CAACC,EAAWC,CAAA,EAAqBC,GAAYC,GAAUA,EAAQ,EAAG,CAAA,EAClE,CAACC,CAAA,EAAqBC,GAAiC,IAC3DvB,EAAgB,CAAEwB,eAAgBhC,CAAW,CAAA,CAAA,EAGzCiC,EAAcC,GAAA,EAEdC,EAAwBC,GAAiB,CAC7CC,KAAM,yBAA2BzB,EAAOqB,EACxCtC,MAAOmB,CACT,CAAA,EAGM,CAAEwB,aAAAA,CAAY,EAAKC,EAAiBJ,EAAuB,EAAA,EAE3D,CAAEK,oBAAAA,EAAqBC,eAAAA,EAAgBC,sBAAAA,CAAqB,EAAKC,EAAyB,CAC9FC,GAAI3C,EACJ+B,eAAgBF,EAAkBO,IACpC,CAAA,EAGM,CAAC,CAAEvC,KAAAA,CAAI,EAAI,CAAE+C,UAAAA,CAAS,CAAE,EAAIC,GAChC,GAAGvC,CAAA,GAAYD,CAAA,IAAOwB,EAAkBO,IAAI,IAAIpC,CAAA,GAChD,CAAEP,cAAAA,CAAc,CAAA,EAGZqD,EAAejD,GAAMkD,cAAgBlD,GAAMmD,IAE3CC,EAAeC,EAAY,IAAA,CAC/BnC,EAAOoC,OAAO,IAAA,CACZC,EAAcnD,CAAA,GAAUoD,OAAA,CAC1B,CAAA,CACF,EAAG,CAACtC,EAAQd,CAAA,CAAQ,EAEdqD,EAAeJ,EAClBrD,GAAA,CACC+C,EAAU,CACR,GAAGnD,EACHgC,UAAAA,CACF,CAAA,EAEAC,EAAA,EACAa,EAAA,CACF,EACA,CAACK,EAAWnB,EAAWc,CAAA,CAAoB,EAGvCgB,EAAiBtC,GACPuC,oBAAoBC,IAAI,QAAA,GAClCC,4BACJC,cAAc9B,EAAkBO,IAAI,GAAGmB,eAEnCK,EAA4BV,EAChC,CAACW,EAAchE,IAAA,CAEbkB,EAAOoC,OAAO,IAAA,CACZ,IAAMW,EAAgCV,EAAcnD,CAAA,EACpD,GAAI6D,EAAY,CACd,IAAMC,EAAsB,CAC1B,GAAGD,EAAWE,QAAO,EACrBlE,OAAQD,CACV,EACAiE,EAAWG,QAAQF,CAAA,CACrB,CACF,CAAA,CACF,EACA,CAAChD,EAAQd,CAAA,CAAQ,EAGbiE,EACJpB,GAAgBjD,GAAMsE,OAAStE,GAAMuE,OACjCvE,EAAKsE,MAAQtE,EAAKuE,OAChB,YACA,WACF,YAEN,OACEC,EAAC,MAAA,CACCC,UAAW,GAAG9E,CAAA,IAAaA,CAAA,KAAc0E,CAAA,GACzC,gBAAerE,GAAM0E,SACrBC,IAAK/D,YAEL4D,EAAC,MAAA,CAAIC,UAAW,GAAG9E,CAAA,mBACjB6E,EAAC,MAAA,CAAIC,UAAW,GAAG9E,CAAA,oBACjBiF,EAACC,GAAA,CACC3C,eAAgBhC,EAChB4E,QAASC,GAAQ/E,GAAMgF,QAAA,EAAY/B,EAAe,KAClDsB,OAAQvE,GAAMuE,OACdU,KAAK,OACLX,MAAOtE,GAAMsE,QAGd/C,GACCqD,EAAC,MAAA,CAAIH,UAAW,GAAG9E,CAAA,aAAsBA,CAAA,qBACvC6E,EAAC,MAAA,CAAIC,UAAW,GAAG9E,CAAA,YAAsBuF,KAAK,oBAC3CxB,EACCkB,EAACO,EAAA,CACCC,YAAY,aACZX,UAAW,GAAG9E,CAAA,0BACd0F,SAAU,CAAC9D,EACX+D,GAAG,SACHC,KAAK,OACLC,QAAShD,EACTiD,MAAK,GACLR,KAAK,SACLS,QAAShE,EAAE,yBAAA,IAEX,KAEJkD,EAACO,EAAA,CACCC,YAAY,aACZX,UAAW,GAAG9E,CAAA,wBACd0F,SAAU,CAAC9D,EACX+D,GAAG,SACHC,KAAK,OACLC,QAASA,IAAA,CACPtE,EAAOyE,gBAAgBC,EAAmC,CACxDC,QAAS,CAAEzF,QAAAA,CAAQ,CACrB,CAAA,CACF,EACAqF,MAAK,GACLR,KAAK,SACLS,QAAShE,EAAE,mBAAA,IAGbkD,EAACO,EAAA,CACCC,YAAY,aACZX,UAAW,GAAG9E,CAAA,iBACd0F,SAAU,CAAC9D,EACXgE,KAAK,IACLC,QAAUM,GAAA,CACRA,EAAEC,eAAc,EAChB3C,EAAA,CACF,EACAqC,MAAK,GACLR,KAAK,SACLS,QAAShE,EAAE,qBAAA,YAOrB8C,EAAC,MAAA,CAAIC,UAAW,GAAG9E,CAAA,iBAA0BA,CAAA,sBAC3CiF,EAAChC,EAAA,CAAsB6B,UAAW,GAAG9E,CAAA,gCACnCiF,EAAC,SAAA,CAAOH,UAAW,GAAG9E,CAAA,sBACnBK,GAAM0E,UAAYhD,EAAE,kBAAA,MAGzBkD,EAAC,MAAA,CAAIH,UAAW,GAAG9E,CAAA,6BAChBqG,EAAehE,EAAkBiE,OAAOC,SAAUzE,CAAA,UAKxDtB,EAAQyE,EAACjC,EAAA,CAAewD,OAAQ1C,IAAmB,KACnDC,EACCkB,EAACwB,EAAA,CACCpG,KAAMC,EACNoG,WAAYhE,EACZiE,YAAa5E,EAAE,oBAAqB,CAClC6E,MAAOP,EAAehE,EAAkBiE,OAAOC,SAAUzE,CAAA,CAC3D,CAAA,EACA+E,WAAW,SACXC,mBAAoB1C,EACpBzC,WAAYA,EACZoF,iBAAkB1E,EAAkBO,OAEpC,IAAA,GAGV,EAEaoE,GAAmB5G,GAE5B6E,EAACgC,EAAA,CAA+B,GAAG7G,EAAO8G,QAAO,YAC/CjC,EAAC9E,GAAA,CAAW,GAAGC",
|
|
6
|
-
"names": ["useLexicalComposerContext", "useLexicalEditable", "getTranslation", "Button", "formatDrawerSlug", "Thumbnail", "useConfig", "useEditDepth", "usePayloadAPI", "useTranslation", "$getNodeByKey", "isImage", "useCallback", "useId", "useReducer", "useRef", "useState", "baseClass", "initialParams", "depth", "Component", "props", "data", "fields", "relationTo", "value", "nodeKey", "Error", "config", "routes", "api", "serverURL", "getEntityConfig", "useConfig", "uploadRef", "useRef", "uuid", "useEditorConfigContext", "editDepth", "useEditDepth", "editor", "useLexicalComposerContext", "editorConfig", "fieldProps", "schemaPath", "isEditable", "useLexicalEditable", "i18n", "t", "useTranslation", "cacheBust", "dispatchCacheBust", "useReducer", "state", "relatedCollection", "useState", "collectionSlug", "componentID", "useId", "extraFieldsDrawerSlug", "formatDrawerSlug", "slug", "toggleDrawer", "useLexicalDrawer", "closeDocumentDrawer", "DocumentDrawer", "DocumentDrawerToggler", "useLexicalDocumentDrawer", "id", "setParams", "usePayloadAPI", "thumbnailSRC", "thumbnailURL", "url", "removeUpload", "useCallback", "update", "$getNodeByKey", "remove", "updateUpload", "hasExtraFields", "resolvedFeatureMap", "get", "sanitizedClientFeatureProps", "collections", "onExtraFieldsDrawerSubmit", "_", "uploadNode", "newData", "getData", "setData", "aspectRatio", "width", "height", "_jsxs", "className", "filename", "ref", "_jsx", "Thumbnail", "fileSrc", "isImage", "mimeType", "size", "role", "Button", "buttonStyle", "disabled", "el", "icon", "onClick", "round", "tooltip", "dispatchCommand", "INSERT_UPLOAD_WITH_DRAWER_COMMAND", "replace", "e", "preventDefault", "getTranslation", "labels", "singular", "onSave", "FieldsDrawer", "drawerSlug", "drawerTitle", "label", "featureKey", "handleDrawerSubmit", "schemaPathSuffix", "UploadComponent", "EnabledRelationshipsCondition", "uploads"]
|
|
7
|
-
}
|
|
@@ -1,6 +0,0 @@
|
|
|
1
|
-
import type { Klass, LexicalNode, LexicalNodeReplacement } from 'lexical';
|
|
2
|
-
import { type Transformer } from '../../../../packages/@lexical/markdown/index.js';
|
|
3
|
-
export declare function getLexicalToMarkdown(allNodes: Array<Klass<LexicalNode> | LexicalNodeReplacement>, allTransformers: Transformer[]): (args: {
|
|
4
|
-
editorState: Record<string, any>;
|
|
5
|
-
}) => string;
|
|
6
|
-
//# sourceMappingURL=getLexicalToMarkdown.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"getLexicalToMarkdown.d.ts","sourceRoot":"","sources":["../../../../../src/features/blocks/client/markdown/getLexicalToMarkdown.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,KAAK,EAAE,WAAW,EAAE,sBAAsB,EAAE,MAAM,SAAS,CAAA;AAIzE,OAAO,EAEL,KAAK,WAAW,EACjB,MAAM,iDAAiD,CAAA;AAExD,wBAAgB,oBAAoB,CAClC,QAAQ,EAAE,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,sBAAsB,CAAC,EAC5D,eAAe,EAAE,WAAW,EAAE,GAC7B,CAAC,IAAI,EAAE;IAAE,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAA;CAAE,KAAK,MAAM,CAoBxD"}
|
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
import { createHeadlessEditor } from '@lexical/headless';
|
|
2
|
-
import { $convertToMarkdownString } from '../../../../packages/@lexical/markdown/index.js';
|
|
3
|
-
export function getLexicalToMarkdown(allNodes, allTransformers) {
|
|
4
|
-
const lexicalToMarkdown = ({
|
|
5
|
-
editorState
|
|
6
|
-
}) => {
|
|
7
|
-
const headlessEditor = createHeadlessEditor({
|
|
8
|
-
nodes: allNodes
|
|
9
|
-
});
|
|
10
|
-
try {
|
|
11
|
-
headlessEditor.setEditorState(headlessEditor.parseEditorState(editorState)) // This should commit the editor state immediately
|
|
12
|
-
;
|
|
13
|
-
} catch (e) {
|
|
14
|
-
console.error('getLexicalToMarkdown: ERROR parsing editor state', e);
|
|
15
|
-
}
|
|
16
|
-
let markdown = '';
|
|
17
|
-
headlessEditor.getEditorState().read(() => {
|
|
18
|
-
markdown = $convertToMarkdownString(allTransformers);
|
|
19
|
-
});
|
|
20
|
-
return markdown;
|
|
21
|
-
};
|
|
22
|
-
return lexicalToMarkdown;
|
|
23
|
-
}
|
|
24
|
-
//# sourceMappingURL=getLexicalToMarkdown.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"getLexicalToMarkdown.js","names":["createHeadlessEditor","$convertToMarkdownString","getLexicalToMarkdown","allNodes","allTransformers","lexicalToMarkdown","editorState","headlessEditor","nodes","setEditorState","parseEditorState","e","console","error","markdown","getEditorState","read"],"sources":["../../../../../src/features/blocks/client/markdown/getLexicalToMarkdown.ts"],"sourcesContent":["import type { Klass, LexicalNode, LexicalNodeReplacement } from 'lexical'\n\nimport { createHeadlessEditor } from '@lexical/headless'\n\nimport {\n $convertToMarkdownString,\n type Transformer,\n} from '../../../../packages/@lexical/markdown/index.js'\n\nexport function getLexicalToMarkdown(\n allNodes: Array<Klass<LexicalNode> | LexicalNodeReplacement>,\n allTransformers: Transformer[],\n): (args: { editorState: Record<string, any> }) => string {\n const lexicalToMarkdown = ({ editorState }: { editorState: Record<string, any> }): string => {\n const headlessEditor = createHeadlessEditor({\n nodes: allNodes,\n })\n\n try {\n headlessEditor.setEditorState(headlessEditor.parseEditorState(editorState as any)) // This should commit the editor state immediately\n } catch (e) {\n console.error('getLexicalToMarkdown: ERROR parsing editor state', e)\n }\n\n let markdown: string = ''\n headlessEditor.getEditorState().read(() => {\n markdown = $convertToMarkdownString(allTransformers)\n })\n\n return markdown\n }\n return lexicalToMarkdown\n}\n"],"mappings":"AAEA,SAASA,oBAAoB,QAAQ;AAErC,SACEC,wBAAwB,QAEnB;AAEP,OAAO,SAASC,qBACdC,QAA4D,EAC5DC,eAA8B;EAE9B,MAAMC,iBAAA,GAAoBA,CAAC;IAAEC;EAAW,CAAwC;IAC9E,MAAMC,cAAA,GAAiBP,oBAAA,CAAqB;MAC1CQ,KAAA,EAAOL;IACT;IAEA,IAAI;MACFI,cAAA,CAAeE,cAAc,CAACF,cAAA,CAAeG,gBAAgB,CAACJ,WAAA,GAAqB;MAAA;IACrF,EAAE,OAAOK,CAAA,EAAG;MACVC,OAAA,CAAQC,KAAK,CAAC,oDAAoDF,CAAA;IACpE;IAEA,IAAIG,QAAA,GAAmB;IACvBP,cAAA,CAAeQ,cAAc,GAAGC,IAAI,CAAC;MACnCF,QAAA,GAAWb,wBAAA,CAAyBG,eAAA;IACtC;IAEA,OAAOU,QAAA;EACT;EACA,OAAOT,iBAAA;AACT","ignoreList":[]}
|
|
@@ -1,6 +0,0 @@
|
|
|
1
|
-
import type { Klass, LexicalNode, LexicalNodeReplacement, SerializedEditorState } from 'lexical';
|
|
2
|
-
import { type Transformer } from '../../../../packages/@lexical/markdown/index.js';
|
|
3
|
-
export declare function getMarkdownToLexical(allNodes: Array<Klass<LexicalNode> | LexicalNodeReplacement>, allTransformers: Transformer[]): (args: {
|
|
4
|
-
markdown: string;
|
|
5
|
-
}) => SerializedEditorState;
|
|
6
|
-
//# sourceMappingURL=getMarkdownToLexical.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"getMarkdownToLexical.d.ts","sourceRoot":"","sources":["../../../../../src/features/blocks/client/markdown/getMarkdownToLexical.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,KAAK,EAAE,WAAW,EAAE,sBAAsB,EAAE,qBAAqB,EAAE,MAAM,SAAS,CAAA;AAIhG,OAAO,EAEL,KAAK,WAAW,EACjB,MAAM,iDAAiD,CAAA;AAExD,wBAAgB,oBAAoB,CAClC,QAAQ,EAAE,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,sBAAsB,CAAC,EAC5D,eAAe,EAAE,WAAW,EAAE,GAC7B,CAAC,IAAI,EAAE;IAAE,QAAQ,EAAE,MAAM,CAAA;CAAE,KAAK,qBAAqB,CAkBvD"}
|
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
import { createHeadlessEditor } from '@lexical/headless';
|
|
2
|
-
import { $convertFromMarkdownString } from '../../../../packages/@lexical/markdown/index.js';
|
|
3
|
-
export function getMarkdownToLexical(allNodes, allTransformers) {
|
|
4
|
-
const markdownToLexical = ({
|
|
5
|
-
markdown
|
|
6
|
-
}) => {
|
|
7
|
-
const headlessEditor = createHeadlessEditor({
|
|
8
|
-
nodes: allNodes
|
|
9
|
-
});
|
|
10
|
-
headlessEditor.update(() => {
|
|
11
|
-
$convertFromMarkdownString(markdown, allTransformers);
|
|
12
|
-
}, {
|
|
13
|
-
discrete: true
|
|
14
|
-
});
|
|
15
|
-
const editorJSON = headlessEditor.getEditorState().toJSON();
|
|
16
|
-
return editorJSON;
|
|
17
|
-
};
|
|
18
|
-
return markdownToLexical;
|
|
19
|
-
}
|
|
20
|
-
//# sourceMappingURL=getMarkdownToLexical.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"getMarkdownToLexical.js","names":["createHeadlessEditor","$convertFromMarkdownString","getMarkdownToLexical","allNodes","allTransformers","markdownToLexical","markdown","headlessEditor","nodes","update","discrete","editorJSON","getEditorState","toJSON"],"sources":["../../../../../src/features/blocks/client/markdown/getMarkdownToLexical.ts"],"sourcesContent":["import type { Klass, LexicalNode, LexicalNodeReplacement, SerializedEditorState } from 'lexical'\n\nimport { createHeadlessEditor } from '@lexical/headless'\n\nimport {\n $convertFromMarkdownString,\n type Transformer,\n} from '../../../../packages/@lexical/markdown/index.js'\n\nexport function getMarkdownToLexical(\n allNodes: Array<Klass<LexicalNode> | LexicalNodeReplacement>,\n allTransformers: Transformer[],\n): (args: { markdown: string }) => SerializedEditorState {\n const markdownToLexical = ({ markdown }: { markdown: string }): SerializedEditorState => {\n const headlessEditor = createHeadlessEditor({\n nodes: allNodes,\n })\n\n headlessEditor.update(\n () => {\n $convertFromMarkdownString(markdown, allTransformers)\n },\n { discrete: true },\n )\n\n const editorJSON = headlessEditor.getEditorState().toJSON()\n\n return editorJSON\n }\n return markdownToLexical\n}\n"],"mappings":"AAEA,SAASA,oBAAoB,QAAQ;AAErC,SACEC,0BAA0B,QAErB;AAEP,OAAO,SAASC,qBACdC,QAA4D,EAC5DC,eAA8B;EAE9B,MAAMC,iBAAA,GAAoBA,CAAC;IAAEC;EAAQ,CAAwB;IAC3D,MAAMC,cAAA,GAAiBP,oBAAA,CAAqB;MAC1CQ,KAAA,EAAOL;IACT;IAEAI,cAAA,CAAeE,MAAM,CACnB;MACER,0BAAA,CAA2BK,QAAA,EAAUF,eAAA;IACvC,GACA;MAAEM,QAAA,EAAU;IAAK;IAGnB,MAAMC,UAAA,GAAaJ,cAAA,CAAeK,cAAc,GAAGC,MAAM;IAEzD,OAAOF,UAAA;EACT;EACA,OAAON,iBAAA;AACT","ignoreList":[]}
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
import type { ClientBlock } from 'payload';
|
|
2
|
-
import { type Klass, type LexicalNode, type LexicalNodeReplacement } from 'lexical';
|
|
3
|
-
import type { Transformer } from '../../../../packages/@lexical/markdown/index.js';
|
|
4
|
-
import type { MultilineElementTransformer, TextMatchTransformer } from '../../../../packages/@lexical/markdown/MarkdownTransformers.js';
|
|
5
|
-
export declare const getBlockMarkdownTransformers: ({ blocks, inlineBlocks, }: {
|
|
6
|
-
blocks: ClientBlock[];
|
|
7
|
-
inlineBlocks: ClientBlock[];
|
|
8
|
-
}) => ((props: {
|
|
9
|
-
allNodes: Array<Klass<LexicalNode> | LexicalNodeReplacement>;
|
|
10
|
-
allTransformers: Transformer[];
|
|
11
|
-
}) => MultilineElementTransformer | TextMatchTransformer)[];
|
|
12
|
-
//# sourceMappingURL=markdownTransformer.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"markdownTransformer.d.ts","sourceRoot":"","sources":["../../../../../src/features/blocks/client/markdown/markdownTransformer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,SAAS,CAAA;AAE1C,OAAO,EAGL,KAAK,KAAK,EACV,KAAK,WAAW,EAChB,KAAK,sBAAsB,EAE5B,MAAM,SAAS,CAAA;AAEhB,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,iDAAiD,CAAA;AAClF,OAAO,KAAK,EACV,2BAA2B,EAC3B,oBAAoB,EACrB,MAAM,gEAAgE,CAAA;AAqBvE,eAAO,MAAM,4BAA4B,8BAGtC;IACD,MAAM,EAAE,WAAW,EAAE,CAAA;IACrB,YAAY,EAAE,WAAW,EAAE,CAAA;CAC5B,KAAG,CAAC,CAAC,KAAK,EAAE;IACX,QAAQ,EAAE,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,sBAAsB,CAAC,CAAA;IAC5D,eAAe,EAAE,WAAW,EAAE,CAAA;CAC/B,KAAK,2BAA2B,GAAG,oBAAoB,CAAC,EA8BxD,CAAA"}
|
|
@@ -1,348 +0,0 @@
|
|
|
1
|
-
import { $parseSerializedNode } from 'lexical';
|
|
2
|
-
import { extractPropsFromJSXPropsString } from '../../../../utilities/jsx/extractPropsFromJSXPropsString.js';
|
|
3
|
-
import { propsToJSXString } from '../../../../utilities/jsx/jsx.js';
|
|
4
|
-
import { linesFromStartToContentAndPropsString } from '../../server/markdown/linesFromMatchToContentAndPropsString.js';
|
|
5
|
-
import { $createBlockNode, $isBlockNode, BlockNode } from '../nodes/BlocksNode.js';
|
|
6
|
-
import { $createInlineBlockNode, $isInlineBlockNode, InlineBlockNode } from '../nodes/InlineBlocksNode.js';
|
|
7
|
-
import { getLexicalToMarkdown } from './getLexicalToMarkdown.js';
|
|
8
|
-
import { getMarkdownToLexical } from './getMarkdownToLexical.js';
|
|
9
|
-
function createTagRegexes(tagName) {
|
|
10
|
-
const escapedTagName = tagName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
11
|
-
return {
|
|
12
|
-
regExpEnd: new RegExp(`</(${escapedTagName})\\s*>|<${escapedTagName}[^>]*?/>`, 'i'),
|
|
13
|
-
regExpStart: new RegExp(`<(${escapedTagName})([^>]*?)\\s*(/?)>`, 'i')
|
|
14
|
-
};
|
|
15
|
-
}
|
|
16
|
-
export const getBlockMarkdownTransformers = ({
|
|
17
|
-
blocks,
|
|
18
|
-
inlineBlocks
|
|
19
|
-
}) => {
|
|
20
|
-
if (!blocks?.length && !inlineBlocks?.length) {
|
|
21
|
-
return [];
|
|
22
|
-
}
|
|
23
|
-
let transformers = [];
|
|
24
|
-
if (blocks?.length) {
|
|
25
|
-
for (const block of blocks) {
|
|
26
|
-
const transformer = getMarkdownTransformerForBlock(block, false);
|
|
27
|
-
if (transformer) {
|
|
28
|
-
transformers = transformers.concat(transformer);
|
|
29
|
-
}
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
if (inlineBlocks?.length) {
|
|
33
|
-
for (const block of inlineBlocks) {
|
|
34
|
-
const transformer = getMarkdownTransformerForBlock(block, true);
|
|
35
|
-
if (transformer) {
|
|
36
|
-
transformers = transformers.concat(transformer);
|
|
37
|
-
}
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
return transformers;
|
|
41
|
-
};
|
|
42
|
-
function getMarkdownTransformerForBlock(block, isInlineBlock) {
|
|
43
|
-
if (!block.jsx) {
|
|
44
|
-
return null;
|
|
45
|
-
}
|
|
46
|
-
const regex = createTagRegexes(block.slug);
|
|
47
|
-
const toReturn = [];
|
|
48
|
-
if (isInlineBlock) {
|
|
49
|
-
toReturn.push(({
|
|
50
|
-
allNodes,
|
|
51
|
-
allTransformers
|
|
52
|
-
}) => ({
|
|
53
|
-
type: 'text-match',
|
|
54
|
-
dependencies: [InlineBlockNode],
|
|
55
|
-
export: node => {
|
|
56
|
-
if (!$isInlineBlockNode(node)) {
|
|
57
|
-
return null;
|
|
58
|
-
}
|
|
59
|
-
if (node.getFields()?.blockType?.toLowerCase() !== block.slug.toLowerCase()) {
|
|
60
|
-
return null;
|
|
61
|
-
}
|
|
62
|
-
const nodeFields = node.getFields();
|
|
63
|
-
const lexicalToMarkdown = getLexicalToMarkdown(allNodes, allTransformers);
|
|
64
|
-
const exportResult = block.jsx.export({
|
|
65
|
-
fields: nodeFields,
|
|
66
|
-
lexicalToMarkdown
|
|
67
|
-
});
|
|
68
|
-
if (exportResult === false) {
|
|
69
|
-
return null;
|
|
70
|
-
}
|
|
71
|
-
if (typeof exportResult === 'string') {
|
|
72
|
-
return exportResult;
|
|
73
|
-
}
|
|
74
|
-
const hasProps = exportResult.props && Object.keys(exportResult.props)?.length > 0;
|
|
75
|
-
const props = exportResult.props ?? {};
|
|
76
|
-
if (exportResult?.children?.length) {
|
|
77
|
-
return `<${nodeFields.blockType}${hasProps ? ' ' + propsToJSXString({
|
|
78
|
-
props
|
|
79
|
-
}) : ''}>${exportResult.children}</${nodeFields.blockType}>`;
|
|
80
|
-
}
|
|
81
|
-
return `<${nodeFields.blockType}${hasProps ? ' ' + propsToJSXString({
|
|
82
|
-
props
|
|
83
|
-
}) : ''}/>`;
|
|
84
|
-
},
|
|
85
|
-
getEndIndex: (node, match) => {
|
|
86
|
-
const {
|
|
87
|
-
endlineLastCharIndex
|
|
88
|
-
} = linesFromStartToContentAndPropsString({
|
|
89
|
-
isEndOptional: false,
|
|
90
|
-
lines: [node.getTextContent()],
|
|
91
|
-
regexpEndRegex: regex.regExpEnd,
|
|
92
|
-
startLineIndex: 0,
|
|
93
|
-
startMatch: match,
|
|
94
|
-
trimChildren: false
|
|
95
|
-
});
|
|
96
|
-
return endlineLastCharIndex;
|
|
97
|
-
},
|
|
98
|
-
importRegExp: block.jsx?.customStartRegex ?? regex.regExpStart,
|
|
99
|
-
regExp: /___ignoreignoreignore___/g,
|
|
100
|
-
replace(node, match) {
|
|
101
|
-
const {
|
|
102
|
-
content,
|
|
103
|
-
propsString
|
|
104
|
-
} = linesFromStartToContentAndPropsString({
|
|
105
|
-
isEndOptional: false,
|
|
106
|
-
lines: [node.getTextContent()],
|
|
107
|
-
regexpEndRegex: regex.regExpEnd,
|
|
108
|
-
startLineIndex: 0,
|
|
109
|
-
startMatch: {
|
|
110
|
-
...match,
|
|
111
|
-
index: 0
|
|
112
|
-
},
|
|
113
|
-
trimChildren: false
|
|
114
|
-
});
|
|
115
|
-
if (!block?.jsx?.import) {
|
|
116
|
-
// No multiline transformer handled this line successfully
|
|
117
|
-
return;
|
|
118
|
-
}
|
|
119
|
-
const markdownToLexical = getMarkdownToLexical(allNodes, allTransformers);
|
|
120
|
-
const blockFields = block.jsx.import({
|
|
121
|
-
children: content,
|
|
122
|
-
closeMatch: null,
|
|
123
|
-
htmlToLexical: null,
|
|
124
|
-
markdownToLexical,
|
|
125
|
-
openMatch: match,
|
|
126
|
-
props: propsString ? extractPropsFromJSXPropsString({
|
|
127
|
-
propsString
|
|
128
|
-
}) : {}
|
|
129
|
-
});
|
|
130
|
-
if (blockFields === false) {
|
|
131
|
-
return;
|
|
132
|
-
}
|
|
133
|
-
const inlineBlockNode = $createInlineBlockNode({
|
|
134
|
-
blockType: block.slug,
|
|
135
|
-
...blockFields
|
|
136
|
-
});
|
|
137
|
-
node.replace(inlineBlockNode);
|
|
138
|
-
}
|
|
139
|
-
}));
|
|
140
|
-
return toReturn;
|
|
141
|
-
}
|
|
142
|
-
toReturn.push(({
|
|
143
|
-
allNodes,
|
|
144
|
-
allTransformers
|
|
145
|
-
}) => ({
|
|
146
|
-
dependencies: [BlockNode],
|
|
147
|
-
export: node => {
|
|
148
|
-
if (!$isBlockNode(node)) {
|
|
149
|
-
return null;
|
|
150
|
-
}
|
|
151
|
-
if (node.getFields()?.blockType?.toLowerCase() !== block.slug.toLowerCase()) {
|
|
152
|
-
return null;
|
|
153
|
-
}
|
|
154
|
-
const nodeFields = node.getFields();
|
|
155
|
-
const lexicalToMarkdown = getLexicalToMarkdown(allNodes, allTransformers);
|
|
156
|
-
const exportResult = block.jsx.export({
|
|
157
|
-
fields: nodeFields,
|
|
158
|
-
lexicalToMarkdown
|
|
159
|
-
});
|
|
160
|
-
if (exportResult === false) {
|
|
161
|
-
return null;
|
|
162
|
-
}
|
|
163
|
-
if (typeof exportResult === 'string') {
|
|
164
|
-
return exportResult;
|
|
165
|
-
}
|
|
166
|
-
const hasProps = exportResult.props && Object.keys(exportResult.props)?.length > 0;
|
|
167
|
-
const props = exportResult.props ?? {};
|
|
168
|
-
if (exportResult?.children?.length) {
|
|
169
|
-
const children = exportResult.children;
|
|
170
|
-
let sanitizedChildren = '';
|
|
171
|
-
// Ensure it has a leftpad of at least 2 spaces. The data is saved without those spaces, so we can just blindly add it to every child
|
|
172
|
-
if (children.includes('\n')) {
|
|
173
|
-
for (const child of children.split('\n')) {
|
|
174
|
-
let sanitizedChild = '';
|
|
175
|
-
if (!block?.jsx?.doNotTrimChildren && child !== '') {
|
|
176
|
-
sanitizedChild = ' ';
|
|
177
|
-
}
|
|
178
|
-
sanitizedChild += child + '\n';
|
|
179
|
-
sanitizedChildren += sanitizedChild;
|
|
180
|
-
}
|
|
181
|
-
} else {
|
|
182
|
-
sanitizedChildren = (block?.jsx?.doNotTrimChildren ? '' : ' ') + children + '\n';
|
|
183
|
-
}
|
|
184
|
-
return `<${nodeFields.blockType}${hasProps ? ' ' + propsToJSXString({
|
|
185
|
-
props
|
|
186
|
-
}) : ''}>\n${sanitizedChildren}</${nodeFields.blockType}>`;
|
|
187
|
-
}
|
|
188
|
-
return `<${nodeFields.blockType}${hasProps ? ' ' + propsToJSXString({
|
|
189
|
-
props
|
|
190
|
-
}) : ''}/>`;
|
|
191
|
-
},
|
|
192
|
-
handleImportAfterStartMatch: block.jsx?.customEndRegex ? undefined : ({
|
|
193
|
-
lines,
|
|
194
|
-
rootNode,
|
|
195
|
-
startLineIndex,
|
|
196
|
-
startMatch,
|
|
197
|
-
transformer
|
|
198
|
-
}) => {
|
|
199
|
-
const regexpEndRegex = typeof transformer.regExpEnd === 'object' && 'regExp' in transformer.regExpEnd ? transformer.regExpEnd.regExp : transformer.regExpEnd;
|
|
200
|
-
const isEndOptional = transformer.regExpEnd && typeof transformer.regExpEnd === 'object' && 'optional' in transformer.regExpEnd ? transformer.regExpEnd.optional : !transformer.regExpEnd;
|
|
201
|
-
const {
|
|
202
|
-
afterEndLine,
|
|
203
|
-
beforeStartLine,
|
|
204
|
-
content: unsanitizedContent,
|
|
205
|
-
endLineIndex,
|
|
206
|
-
propsString
|
|
207
|
-
} = linesFromStartToContentAndPropsString({
|
|
208
|
-
isEndOptional,
|
|
209
|
-
lines,
|
|
210
|
-
regexpEndRegex,
|
|
211
|
-
startLineIndex,
|
|
212
|
-
startMatch,
|
|
213
|
-
trimChildren: false
|
|
214
|
-
});
|
|
215
|
-
let content = '';
|
|
216
|
-
if (block?.jsx?.doNotTrimChildren) {
|
|
217
|
-
content = unsanitizedContent.endsWith('\n') ? unsanitizedContent.slice(0, -1) : unsanitizedContent;
|
|
218
|
-
} else {
|
|
219
|
-
// Ensure it has a leftpad of at least 2 spaces. The data is saved without those spaces, so we can just blindly add it to every child
|
|
220
|
-
if (unsanitizedContent.includes('\n')) {
|
|
221
|
-
const split = unsanitizedContent.split('\n');
|
|
222
|
-
let index = 0;
|
|
223
|
-
for (const child of split) {
|
|
224
|
-
index++;
|
|
225
|
-
if (child.startsWith(' ')) {
|
|
226
|
-
content += child.slice(2);
|
|
227
|
-
} else {
|
|
228
|
-
// If one child is misaligned, skip aligning completely, unless it's just empty
|
|
229
|
-
if (child === '') {
|
|
230
|
-
content += child;
|
|
231
|
-
} else {
|
|
232
|
-
content = unsanitizedContent.endsWith('\n') ? unsanitizedContent.slice(0, -1) : unsanitizedContent;
|
|
233
|
-
break;
|
|
234
|
-
}
|
|
235
|
-
}
|
|
236
|
-
content += index === split.length ? '' : '\n';
|
|
237
|
-
}
|
|
238
|
-
} else {
|
|
239
|
-
content = (!unsanitizedContent.startsWith(' ') ? unsanitizedContent : unsanitizedContent.slice(2)) + '\n';
|
|
240
|
-
}
|
|
241
|
-
}
|
|
242
|
-
if (!block?.jsx?.import) {
|
|
243
|
-
// No multiline transformer handled this line successfully
|
|
244
|
-
return [false, startLineIndex];
|
|
245
|
-
}
|
|
246
|
-
const markdownToLexical = getMarkdownToLexical(allNodes, allTransformers);
|
|
247
|
-
const blockFields = block.jsx.import({
|
|
248
|
-
children: content,
|
|
249
|
-
closeMatch: null,
|
|
250
|
-
htmlToLexical: null,
|
|
251
|
-
markdownToLexical,
|
|
252
|
-
openMatch: startMatch,
|
|
253
|
-
props: propsString ? extractPropsFromJSXPropsString({
|
|
254
|
-
propsString
|
|
255
|
-
}) : {}
|
|
256
|
-
});
|
|
257
|
-
if (blockFields === false) {
|
|
258
|
-
return [false, startLineIndex];
|
|
259
|
-
}
|
|
260
|
-
const node = $createBlockNode({
|
|
261
|
-
blockType: block.slug,
|
|
262
|
-
...blockFields
|
|
263
|
-
});
|
|
264
|
-
if (node) {
|
|
265
|
-
// Now handle beforeStartLine and afterEndLine. If those are not empty, we need to add them as text nodes before and after the block node.
|
|
266
|
-
// However, those themselves can contain other markdown matches, so we need to parse them as well.
|
|
267
|
-
// Example where this is needed: "Hello <InlineCode>inline code</InlineCode> test."
|
|
268
|
-
let prevNodes = null;
|
|
269
|
-
let nextNodes = null;
|
|
270
|
-
// TODO: Might not need this prevNodes and nextNodes handling if inline nodes are handled by textmatch transformers
|
|
271
|
-
if (beforeStartLine?.length) {
|
|
272
|
-
prevNodes = markdownToLexical({
|
|
273
|
-
markdown: beforeStartLine
|
|
274
|
-
})?.root?.children ?? [];
|
|
275
|
-
const firstPrevNode = prevNodes?.[0];
|
|
276
|
-
if (firstPrevNode) {
|
|
277
|
-
rootNode.append($parseSerializedNode(firstPrevNode));
|
|
278
|
-
}
|
|
279
|
-
}
|
|
280
|
-
rootNode.append(node);
|
|
281
|
-
if (afterEndLine?.length) {
|
|
282
|
-
nextNodes = markdownToLexical({
|
|
283
|
-
markdown: afterEndLine
|
|
284
|
-
})?.root?.children;
|
|
285
|
-
const lastChild = rootNode.getChildren()[rootNode.getChildren().length - 1];
|
|
286
|
-
const children = $parseSerializedNode(nextNodes[0])?.getChildren();
|
|
287
|
-
if (children?.length) {
|
|
288
|
-
for (const child of children) {
|
|
289
|
-
lastChild.append(child);
|
|
290
|
-
}
|
|
291
|
-
}
|
|
292
|
-
}
|
|
293
|
-
}
|
|
294
|
-
return [true, endLineIndex];
|
|
295
|
-
},
|
|
296
|
-
regExpEnd: block.jsx?.customEndRegex ?? regex.regExpEnd,
|
|
297
|
-
regExpStart: block.jsx?.customStartRegex ?? regex.regExpStart,
|
|
298
|
-
// This replace is ONLY run for ``` code blocks (so any blocks with custom start and end regexes). For others, we use the special JSX handling above:
|
|
299
|
-
type: 'multiline-element',
|
|
300
|
-
replace: (rootNode, children, openMatch, closeMatch, linesInBetween) => {
|
|
301
|
-
if (block?.jsx?.import) {
|
|
302
|
-
if (!linesInBetween) {
|
|
303
|
-
// convert children to linesInBetween
|
|
304
|
-
let line = '';
|
|
305
|
-
if (children) {
|
|
306
|
-
for (const child of children) {
|
|
307
|
-
line += child.getTextContent();
|
|
308
|
-
}
|
|
309
|
-
}
|
|
310
|
-
linesInBetween = [line];
|
|
311
|
-
}
|
|
312
|
-
let childrenString = '';
|
|
313
|
-
if (block?.jsx?.doNotTrimChildren) {
|
|
314
|
-
childrenString = linesInBetween.join('\n');
|
|
315
|
-
} else {
|
|
316
|
-
childrenString = linesInBetween.join('\n').trim();
|
|
317
|
-
}
|
|
318
|
-
const propsString = openMatch[1]?.trim();
|
|
319
|
-
const markdownToLexical = getMarkdownToLexical(allNodes, allTransformers);
|
|
320
|
-
const blockFields = block.jsx.import({
|
|
321
|
-
children: childrenString,
|
|
322
|
-
closeMatch: closeMatch,
|
|
323
|
-
htmlToLexical: null,
|
|
324
|
-
markdownToLexical,
|
|
325
|
-
openMatch: openMatch,
|
|
326
|
-
props: propsString ? extractPropsFromJSXPropsString({
|
|
327
|
-
propsString
|
|
328
|
-
}) : {}
|
|
329
|
-
});
|
|
330
|
-
if (blockFields === false) {
|
|
331
|
-
return false;
|
|
332
|
-
}
|
|
333
|
-
const node = $createBlockNode({
|
|
334
|
-
blockType: block.slug,
|
|
335
|
-
...blockFields
|
|
336
|
-
});
|
|
337
|
-
if (node) {
|
|
338
|
-
rootNode.append(node);
|
|
339
|
-
}
|
|
340
|
-
return;
|
|
341
|
-
}
|
|
342
|
-
return false // Run next transformer
|
|
343
|
-
;
|
|
344
|
-
}
|
|
345
|
-
}));
|
|
346
|
-
return toReturn;
|
|
347
|
-
}
|
|
348
|
-
//# sourceMappingURL=markdownTransformer.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"markdownTransformer.js","names":["$parseSerializedNode","extractPropsFromJSXPropsString","propsToJSXString","linesFromStartToContentAndPropsString","$createBlockNode","$isBlockNode","BlockNode","$createInlineBlockNode","$isInlineBlockNode","InlineBlockNode","getLexicalToMarkdown","getMarkdownToLexical","createTagRegexes","tagName","escapedTagName","replace","regExpEnd","RegExp","regExpStart","getBlockMarkdownTransformers","blocks","inlineBlocks","length","transformers","block","transformer","getMarkdownTransformerForBlock","concat","isInlineBlock","jsx","regex","slug","toReturn","push","allNodes","allTransformers","type","dependencies","export","node","getFields","blockType","toLowerCase","nodeFields","lexicalToMarkdown","exportResult","fields","hasProps","props","Object","keys","children","getEndIndex","match","endlineLastCharIndex","isEndOptional","lines","getTextContent","regexpEndRegex","startLineIndex","startMatch","trimChildren","importRegExp","customStartRegex","regExp","content","propsString","index","import","markdownToLexical","blockFields","closeMatch","htmlToLexical","openMatch","inlineBlockNode","sanitizedChildren","includes","child","split","sanitizedChild","doNotTrimChildren","handleImportAfterStartMatch","customEndRegex","undefined","rootNode","optional","afterEndLine","beforeStartLine","unsanitizedContent","endLineIndex","endsWith","slice","startsWith","prevNodes","nextNodes","markdown","root","firstPrevNode","append","lastChild","getChildren","linesInBetween","line","childrenString","join","trim"],"sources":["../../../../../src/features/blocks/client/markdown/markdownTransformer.ts"],"sourcesContent":["import type { ClientBlock } from 'payload'\n\nimport {\n $parseSerializedNode,\n type ElementNode,\n type Klass,\n type LexicalNode,\n type LexicalNodeReplacement,\n type SerializedLexicalNode,\n} from 'lexical'\n\nimport type { Transformer } from '../../../../packages/@lexical/markdown/index.js'\nimport type {\n MultilineElementTransformer,\n TextMatchTransformer,\n} from '../../../../packages/@lexical/markdown/MarkdownTransformers.js'\n\nimport { extractPropsFromJSXPropsString } from '../../../../utilities/jsx/extractPropsFromJSXPropsString.js'\nimport { propsToJSXString } from '../../../../utilities/jsx/jsx.js'\nimport { linesFromStartToContentAndPropsString } from '../../server/markdown/linesFromMatchToContentAndPropsString.js'\nimport { $createBlockNode, $isBlockNode, BlockNode } from '../nodes/BlocksNode.js'\nimport {\n $createInlineBlockNode,\n $isInlineBlockNode,\n InlineBlockNode,\n} from '../nodes/InlineBlocksNode.js'\nimport { getLexicalToMarkdown } from './getLexicalToMarkdown.js'\nimport { getMarkdownToLexical } from './getMarkdownToLexical.js'\n\nfunction createTagRegexes(tagName: string) {\n const escapedTagName = tagName.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')\n return {\n regExpEnd: new RegExp(`</(${escapedTagName})\\\\s*>|<${escapedTagName}[^>]*?/>`, 'i'),\n regExpStart: new RegExp(`<(${escapedTagName})([^>]*?)\\\\s*(/?)>`, 'i'),\n }\n}\nexport const getBlockMarkdownTransformers = ({\n blocks,\n inlineBlocks,\n}: {\n blocks: ClientBlock[]\n inlineBlocks: ClientBlock[]\n}): ((props: {\n allNodes: Array<Klass<LexicalNode> | LexicalNodeReplacement>\n allTransformers: Transformer[]\n}) => MultilineElementTransformer | TextMatchTransformer)[] => {\n if (!blocks?.length && !inlineBlocks?.length) {\n return []\n }\n let transformers: ((props: {\n allNodes: Array<Klass<LexicalNode> | LexicalNodeReplacement>\n allTransformers: Transformer[]\n }) => MultilineElementTransformer | TextMatchTransformer)[] = []\n\n if (blocks?.length) {\n for (const block of blocks) {\n const transformer = getMarkdownTransformerForBlock(block, false)\n\n if (transformer) {\n transformers = transformers.concat(transformer)\n }\n }\n }\n\n if (inlineBlocks?.length) {\n for (const block of inlineBlocks) {\n const transformer = getMarkdownTransformerForBlock(block, true)\n\n if (transformer) {\n transformers = transformers.concat(transformer)\n }\n }\n }\n\n return transformers\n}\n\nfunction getMarkdownTransformerForBlock(\n block: ClientBlock,\n isInlineBlock: boolean,\n): Array<\n (props: {\n allNodes: Array<Klass<LexicalNode> | LexicalNodeReplacement>\n allTransformers: Transformer[]\n }) => MultilineElementTransformer | TextMatchTransformer\n> | null {\n if (!block.jsx) {\n return null\n }\n const regex = createTagRegexes(block.slug)\n const toReturn: Array<\n (props: {\n allNodes: Array<Klass<LexicalNode> | LexicalNodeReplacement>\n allTransformers: Transformer[]\n }) => MultilineElementTransformer | TextMatchTransformer\n > = []\n\n if (isInlineBlock) {\n toReturn.push(({ allNodes, allTransformers }) => ({\n type: 'text-match',\n dependencies: [InlineBlockNode],\n export: (node) => {\n if (!$isInlineBlockNode(node)) {\n return null\n }\n\n if (node.getFields()?.blockType?.toLowerCase() !== block.slug.toLowerCase()) {\n return null\n }\n\n const nodeFields = node.getFields()\n const lexicalToMarkdown = getLexicalToMarkdown(allNodes, allTransformers)\n\n const exportResult = block.jsx!.export({\n fields: nodeFields,\n lexicalToMarkdown,\n })\n if (exportResult === false) {\n return null\n }\n if (typeof exportResult === 'string') {\n return exportResult\n }\n\n const hasProps = exportResult.props && Object.keys(exportResult.props)?.length > 0\n const props = exportResult.props ?? {}\n\n if (exportResult?.children?.length) {\n return `<${nodeFields.blockType}${hasProps ? ' ' + propsToJSXString({ props }) : ''}>${exportResult.children}</${nodeFields.blockType}>`\n }\n\n return `<${nodeFields.blockType}${hasProps ? ' ' + propsToJSXString({ props }) : ''}/>`\n },\n getEndIndex: (node, match) => {\n const { endlineLastCharIndex } = linesFromStartToContentAndPropsString({\n isEndOptional: false,\n lines: [node.getTextContent()],\n regexpEndRegex: regex.regExpEnd,\n startLineIndex: 0,\n startMatch: match,\n trimChildren: false,\n })\n\n return endlineLastCharIndex\n },\n importRegExp: block.jsx?.customStartRegex ?? regex.regExpStart,\n regExp: /___ignoreignoreignore___/g,\n replace(node, match) {\n const { content, propsString } = linesFromStartToContentAndPropsString({\n isEndOptional: false,\n lines: [node.getTextContent()],\n regexpEndRegex: regex.regExpEnd,\n startLineIndex: 0,\n startMatch: {\n ...match,\n index: 0,\n },\n trimChildren: false,\n })\n\n if (!block?.jsx?.import) {\n // No multiline transformer handled this line successfully\n return\n }\n\n const markdownToLexical = getMarkdownToLexical(allNodes, allTransformers)\n\n const blockFields = block.jsx.import({\n children: content,\n closeMatch: null,\n htmlToLexical: null, // TODO\n markdownToLexical,\n openMatch: match,\n props: propsString\n ? extractPropsFromJSXPropsString({\n propsString,\n })\n : {},\n })\n if (blockFields === false) {\n return\n }\n\n const inlineBlockNode = $createInlineBlockNode({\n blockType: block.slug,\n ...(blockFields as any),\n })\n\n node.replace(inlineBlockNode)\n },\n }))\n\n return toReturn\n }\n\n toReturn.push(({ allNodes, allTransformers }) => ({\n dependencies: [BlockNode],\n export: (node) => {\n if (!$isBlockNode(node)) {\n return null\n }\n\n if (node.getFields()?.blockType?.toLowerCase() !== block.slug.toLowerCase()) {\n return null\n }\n\n const nodeFields = node.getFields()\n const lexicalToMarkdown = getLexicalToMarkdown(allNodes, allTransformers)\n\n const exportResult = block.jsx!.export({\n fields: nodeFields,\n lexicalToMarkdown,\n })\n if (exportResult === false) {\n return null\n }\n if (typeof exportResult === 'string') {\n return exportResult\n }\n\n const hasProps = exportResult.props && Object.keys(exportResult.props)?.length > 0\n const props = exportResult.props ?? {}\n\n if (exportResult?.children?.length) {\n const children = exportResult.children\n let sanitizedChildren = ''\n\n // Ensure it has a leftpad of at least 2 spaces. The data is saved without those spaces, so we can just blindly add it to every child\n if (children.includes('\\n')) {\n for (const child of children.split('\\n')) {\n let sanitizedChild = ''\n if (!block?.jsx?.doNotTrimChildren && child !== '') {\n sanitizedChild = ' '\n }\n sanitizedChild += child + '\\n'\n\n sanitizedChildren += sanitizedChild\n }\n } else {\n sanitizedChildren = (block?.jsx?.doNotTrimChildren ? '' : ' ') + children + '\\n'\n }\n\n return `<${nodeFields.blockType}${hasProps ? ' ' + propsToJSXString({ props }) : ''}>\\n${sanitizedChildren}</${nodeFields.blockType}>`\n }\n\n return `<${nodeFields.blockType}${hasProps ? ' ' + propsToJSXString({ props }) : ''}/>`\n },\n handleImportAfterStartMatch: block.jsx?.customEndRegex\n ? undefined\n : ({ lines, rootNode, startLineIndex, startMatch, transformer }) => {\n const regexpEndRegex: RegExp | undefined =\n typeof transformer.regExpEnd === 'object' && 'regExp' in transformer.regExpEnd\n ? transformer.regExpEnd.regExp\n : transformer.regExpEnd\n\n const isEndOptional =\n transformer.regExpEnd &&\n typeof transformer.regExpEnd === 'object' &&\n 'optional' in transformer.regExpEnd\n ? transformer.regExpEnd.optional\n : !transformer.regExpEnd\n\n const {\n afterEndLine,\n beforeStartLine,\n content: unsanitizedContent,\n endLineIndex,\n propsString,\n } = linesFromStartToContentAndPropsString({\n isEndOptional,\n lines,\n regexpEndRegex,\n startLineIndex,\n startMatch,\n trimChildren: false,\n })\n\n let content = ''\n\n if (block?.jsx?.doNotTrimChildren) {\n content = unsanitizedContent.endsWith('\\n')\n ? unsanitizedContent.slice(0, -1)\n : unsanitizedContent\n } else {\n // Ensure it has a leftpad of at least 2 spaces. The data is saved without those spaces, so we can just blindly add it to every child\n if (unsanitizedContent.includes('\\n')) {\n const split = unsanitizedContent.split('\\n')\n let index = 0\n for (const child of split) {\n index++\n\n if (child.startsWith(' ')) {\n content += child.slice(2)\n } else {\n // If one child is misaligned, skip aligning completely, unless it's just empty\n if (child === '') {\n content += child\n } else {\n content = unsanitizedContent.endsWith('\\n')\n ? unsanitizedContent.slice(0, -1)\n : unsanitizedContent\n break\n }\n }\n\n content += index === split.length ? '' : '\\n'\n }\n } else {\n content =\n (!unsanitizedContent.startsWith(' ')\n ? unsanitizedContent\n : unsanitizedContent.slice(2)) + '\\n'\n }\n }\n\n if (!block?.jsx?.import) {\n // No multiline transformer handled this line successfully\n return [false, startLineIndex]\n }\n\n const markdownToLexical = getMarkdownToLexical(allNodes, allTransformers)\n\n const blockFields = block.jsx.import({\n children: content,\n closeMatch: null,\n htmlToLexical: null, // TODO\n markdownToLexical,\n openMatch: startMatch,\n props: propsString\n ? extractPropsFromJSXPropsString({\n propsString,\n })\n : {},\n })\n if (blockFields === false) {\n return [false, startLineIndex]\n }\n\n const node = $createBlockNode({\n blockType: block.slug,\n ...blockFields,\n } as any)\n\n if (node) {\n // Now handle beforeStartLine and afterEndLine. If those are not empty, we need to add them as text nodes before and after the block node.\n // However, those themselves can contain other markdown matches, so we need to parse them as well.\n // Example where this is needed: \"Hello <InlineCode>inline code</InlineCode> test.\"\n let prevNodes: null | SerializedLexicalNode[] = null\n let nextNodes: null | SerializedLexicalNode[] = null\n // TODO: Might not need this prevNodes and nextNodes handling if inline nodes are handled by textmatch transformers\n\n if (beforeStartLine?.length) {\n prevNodes = markdownToLexical({ markdown: beforeStartLine })?.root?.children ?? []\n\n const firstPrevNode = prevNodes?.[0]\n if (firstPrevNode) {\n rootNode.append($parseSerializedNode(firstPrevNode))\n }\n }\n\n rootNode.append(node)\n\n if (afterEndLine?.length) {\n nextNodes = markdownToLexical({ markdown: afterEndLine })?.root?.children\n const lastChild = rootNode.getChildren()[rootNode.getChildren().length - 1]\n\n const children = ($parseSerializedNode(nextNodes[0]!) as ElementNode)?.getChildren()\n if (children?.length) {\n for (const child of children) {\n ;(lastChild as ElementNode).append(child)\n }\n }\n }\n }\n\n return [true, endLineIndex]\n },\n regExpEnd: block.jsx?.customEndRegex ?? regex.regExpEnd,\n regExpStart: block.jsx?.customStartRegex ?? regex.regExpStart,\n // This replace is ONLY run for ``` code blocks (so any blocks with custom start and end regexes). For others, we use the special JSX handling above:\n type: 'multiline-element',\n replace: (rootNode, children, openMatch, closeMatch, linesInBetween) => {\n if (block?.jsx?.import) {\n if (!linesInBetween) {\n // convert children to linesInBetween\n let line = ''\n if (children) {\n for (const child of children) {\n line += child.getTextContent()\n }\n }\n\n linesInBetween = [line]\n }\n\n let childrenString = ''\n if (block?.jsx?.doNotTrimChildren) {\n childrenString = linesInBetween.join('\\n')\n } else {\n childrenString = linesInBetween.join('\\n').trim()\n }\n\n const propsString = openMatch[1]?.trim()\n\n const markdownToLexical = getMarkdownToLexical(allNodes, allTransformers)\n\n const blockFields = block.jsx.import({\n children: childrenString,\n closeMatch: closeMatch as RegExpMatchArray,\n htmlToLexical: null, // TODO\n markdownToLexical,\n openMatch: openMatch as RegExpMatchArray,\n props: propsString\n ? extractPropsFromJSXPropsString({\n propsString,\n })\n : {},\n })\n if (blockFields === false) {\n return false\n }\n\n const node = $createBlockNode({\n blockType: block.slug,\n ...blockFields,\n } as any)\n\n if (node) {\n rootNode.append(node)\n }\n\n return\n }\n return false // Run next transformer\n },\n }))\n\n return toReturn\n}\n"],"mappings":"AAEA,SACEA,oBAAoB,QAMf;AAQP,SAASC,8BAA8B,QAAQ;AAC/C,SAASC,gBAAgB,QAAQ;AACjC,SAASC,qCAAqC,QAAQ;AACtD,SAASC,gBAAgB,EAAEC,YAAY,EAAEC,SAAS,QAAQ;AAC1D,SACEC,sBAAsB,EACtBC,kBAAkB,EAClBC,eAAe,QACV;AACP,SAASC,oBAAoB,QAAQ;AACrC,SAASC,oBAAoB,QAAQ;AAErC,SAASC,iBAAiBC,OAAe;EACvC,MAAMC,cAAA,GAAiBD,OAAA,CAAQE,OAAO,CAAC,uBAAuB;EAC9D,OAAO;IACLC,SAAA,EAAW,IAAIC,MAAA,CAAO,MAAMH,cAAA,WAAyBA,cAAA,UAAwB,EAAE;IAC/EI,WAAA,EAAa,IAAID,MAAA,CAAO,KAAKH,cAAA,oBAAkC,EAAE;EACnE;AACF;AACA,OAAO,MAAMK,4BAAA,GAA+BA,CAAC;EAC3CC,MAAM;EACNC;AAAY,CAIb;EAIC,IAAI,CAACD,MAAA,EAAQE,MAAA,IAAU,CAACD,YAAA,EAAcC,MAAA,EAAQ;IAC5C,OAAO,EAAE;EACX;EACA,IAAIC,YAAA,GAG0D,EAAE;EAEhE,IAAIH,MAAA,EAAQE,MAAA,EAAQ;IAClB,KAAK,MAAME,KAAA,IAASJ,MAAA,EAAQ;MAC1B,MAAMK,WAAA,GAAcC,8BAAA,CAA+BF,KAAA,EAAO;MAE1D,IAAIC,WAAA,EAAa;QACfF,YAAA,GAAeA,YAAA,CAAaI,MAAM,CAACF,WAAA;MACrC;IACF;EACF;EAEA,IAAIJ,YAAA,EAAcC,MAAA,EAAQ;IACxB,KAAK,MAAME,KAAA,IAASH,YAAA,EAAc;MAChC,MAAMI,WAAA,GAAcC,8BAAA,CAA+BF,KAAA,EAAO;MAE1D,IAAIC,WAAA,EAAa;QACfF,YAAA,GAAeA,YAAA,CAAaI,MAAM,CAACF,WAAA;MACrC;IACF;EACF;EAEA,OAAOF,YAAA;AACT;AAEA,SAASG,+BACPF,KAAkB,EAClBI,aAAsB;EAOtB,IAAI,CAACJ,KAAA,CAAMK,GAAG,EAAE;IACd,OAAO;EACT;EACA,MAAMC,KAAA,GAAQlB,gBAAA,CAAiBY,KAAA,CAAMO,IAAI;EACzC,MAAMC,QAAA,GAKF,EAAE;EAEN,IAAIJ,aAAA,EAAe;IACjBI,QAAA,CAASC,IAAI,CAAC,CAAC;MAAEC,QAAQ;MAAEC;IAAe,CAAE,MAAM;MAChDC,IAAA,EAAM;MACNC,YAAA,EAAc,CAAC5B,eAAA,CAAgB;MAC/B6B,MAAA,EAASC,IAAA;QACP,IAAI,CAAC/B,kBAAA,CAAmB+B,IAAA,GAAO;UAC7B,OAAO;QACT;QAEA,IAAIA,IAAA,CAAKC,SAAS,IAAIC,SAAA,EAAWC,WAAA,OAAkBlB,KAAA,CAAMO,IAAI,CAACW,WAAW,IAAI;UAC3E,OAAO;QACT;QAEA,MAAMC,UAAA,GAAaJ,IAAA,CAAKC,SAAS;QACjC,MAAMI,iBAAA,GAAoBlC,oBAAA,CAAqBwB,QAAA,EAAUC,eAAA;QAEzD,MAAMU,YAAA,GAAerB,KAAA,CAAMK,GAAG,CAAES,MAAM,CAAC;UACrCQ,MAAA,EAAQH,UAAA;UACRC;QACF;QACA,IAAIC,YAAA,KAAiB,OAAO;UAC1B,OAAO;QACT;QACA,IAAI,OAAOA,YAAA,KAAiB,UAAU;UACpC,OAAOA,YAAA;QACT;QAEA,MAAME,QAAA,GAAWF,YAAA,CAAaG,KAAK,IAAIC,MAAA,CAAOC,IAAI,CAACL,YAAA,CAAaG,KAAK,GAAG1B,MAAA,GAAS;QACjF,MAAM0B,KAAA,GAAQH,YAAA,CAAaG,KAAK,IAAI,CAAC;QAErC,IAAIH,YAAA,EAAcM,QAAA,EAAU7B,MAAA,EAAQ;UAClC,OAAO,IAAIqB,UAAA,CAAWF,SAAS,GAAGM,QAAA,GAAW,MAAM7C,gBAAA,CAAiB;YAAE8C;UAAM,KAAK,MAAMH,YAAA,CAAaM,QAAQ,KAAKR,UAAA,CAAWF,SAAS,GAAG;QAC1I;QAEA,OAAO,IAAIE,UAAA,CAAWF,SAAS,GAAGM,QAAA,GAAW,MAAM7C,gBAAA,CAAiB;UAAE8C;QAAM,KAAK,MAAM;MACzF;MACAI,WAAA,EAAaA,CAACb,IAAA,EAAMc,KAAA;QAClB,MAAM;UAAEC;QAAoB,CAAE,GAAGnD,qCAAA,CAAsC;UACrEoD,aAAA,EAAe;UACfC,KAAA,EAAO,CAACjB,IAAA,CAAKkB,cAAc,GAAG;UAC9BC,cAAA,EAAgB5B,KAAA,CAAMd,SAAS;UAC/B2C,cAAA,EAAgB;UAChBC,UAAA,EAAYP,KAAA;UACZQ,YAAA,EAAc;QAChB;QAEA,OAAOP,oBAAA;MACT;MACAQ,YAAA,EAActC,KAAA,CAAMK,GAAG,EAAEkC,gBAAA,IAAoBjC,KAAA,CAAMZ,WAAW;MAC9D8C,MAAA,EAAQ;MACRjD,QAAQwB,IAAI,EAAEc,KAAK;QACjB,MAAM;UAAEY,OAAO;UAAEC;QAAW,CAAE,GAAG/D,qCAAA,CAAsC;UACrEoD,aAAA,EAAe;UACfC,KAAA,EAAO,CAACjB,IAAA,CAAKkB,cAAc,GAAG;UAC9BC,cAAA,EAAgB5B,KAAA,CAAMd,SAAS;UAC/B2C,cAAA,EAAgB;UAChBC,UAAA,EAAY;YACV,GAAGP,KAAK;YACRc,KAAA,EAAO;UACT;UACAN,YAAA,EAAc;QAChB;QAEA,IAAI,CAACrC,KAAA,EAAOK,GAAA,EAAKuC,MAAA,EAAQ;UACvB;UACA;QACF;QAEA,MAAMC,iBAAA,GAAoB1D,oBAAA,CAAqBuB,QAAA,EAAUC,eAAA;QAEzD,MAAMmC,WAAA,GAAc9C,KAAA,CAAMK,GAAG,CAACuC,MAAM,CAAC;UACnCjB,QAAA,EAAUc,OAAA;UACVM,UAAA,EAAY;UACZC,aAAA,EAAe;UACfH,iBAAA;UACAI,SAAA,EAAWpB,KAAA;UACXL,KAAA,EAAOkB,WAAA,GACHjE,8BAAA,CAA+B;YAC7BiE;UACF,KACA,CAAC;QACP;QACA,IAAII,WAAA,KAAgB,OAAO;UACzB;QACF;QAEA,MAAMI,eAAA,GAAkBnE,sBAAA,CAAuB;UAC7CkC,SAAA,EAAWjB,KAAA,CAAMO,IAAI;UACrB,GAAIuC;QACN;QAEA/B,IAAA,CAAKxB,OAAO,CAAC2D,eAAA;MACf;IACF;IAEA,OAAO1C,QAAA;EACT;EAEAA,QAAA,CAASC,IAAI,CAAC,CAAC;IAAEC,QAAQ;IAAEC;EAAe,CAAE,MAAM;IAChDE,YAAA,EAAc,CAAC/B,SAAA,CAAU;IACzBgC,MAAA,EAASC,IAAA;MACP,IAAI,CAAClC,YAAA,CAAakC,IAAA,GAAO;QACvB,OAAO;MACT;MAEA,IAAIA,IAAA,CAAKC,SAAS,IAAIC,SAAA,EAAWC,WAAA,OAAkBlB,KAAA,CAAMO,IAAI,CAACW,WAAW,IAAI;QAC3E,OAAO;MACT;MAEA,MAAMC,UAAA,GAAaJ,IAAA,CAAKC,SAAS;MACjC,MAAMI,iBAAA,GAAoBlC,oBAAA,CAAqBwB,QAAA,EAAUC,eAAA;MAEzD,MAAMU,YAAA,GAAerB,KAAA,CAAMK,GAAG,CAAES,MAAM,CAAC;QACrCQ,MAAA,EAAQH,UAAA;QACRC;MACF;MACA,IAAIC,YAAA,KAAiB,OAAO;QAC1B,OAAO;MACT;MACA,IAAI,OAAOA,YAAA,KAAiB,UAAU;QACpC,OAAOA,YAAA;MACT;MAEA,MAAME,QAAA,GAAWF,YAAA,CAAaG,KAAK,IAAIC,MAAA,CAAOC,IAAI,CAACL,YAAA,CAAaG,KAAK,GAAG1B,MAAA,GAAS;MACjF,MAAM0B,KAAA,GAAQH,YAAA,CAAaG,KAAK,IAAI,CAAC;MAErC,IAAIH,YAAA,EAAcM,QAAA,EAAU7B,MAAA,EAAQ;QAClC,MAAM6B,QAAA,GAAWN,YAAA,CAAaM,QAAQ;QACtC,IAAIwB,iBAAA,GAAoB;QAExB;QACA,IAAIxB,QAAA,CAASyB,QAAQ,CAAC,OAAO;UAC3B,KAAK,MAAMC,KAAA,IAAS1B,QAAA,CAAS2B,KAAK,CAAC,OAAO;YACxC,IAAIC,cAAA,GAAiB;YACrB,IAAI,CAACvD,KAAA,EAAOK,GAAA,EAAKmD,iBAAA,IAAqBH,KAAA,KAAU,IAAI;cAClDE,cAAA,GAAiB;YACnB;YACAA,cAAA,IAAkBF,KAAA,GAAQ;YAE1BF,iBAAA,IAAqBI,cAAA;UACvB;QACF,OAAO;UACLJ,iBAAA,GAAoB,CAACnD,KAAA,EAAOK,GAAA,EAAKmD,iBAAA,GAAoB,KAAK,IAAG,IAAK7B,QAAA,GAAW;QAC/E;QAEA,OAAO,IAAIR,UAAA,CAAWF,SAAS,GAAGM,QAAA,GAAW,MAAM7C,gBAAA,CAAiB;UAAE8C;QAAM,KAAK,QAAQ2B,iBAAA,KAAsBhC,UAAA,CAAWF,SAAS,GAAG;MACxI;MAEA,OAAO,IAAIE,UAAA,CAAWF,SAAS,GAAGM,QAAA,GAAW,MAAM7C,gBAAA,CAAiB;QAAE8C;MAAM,KAAK,MAAM;IACzF;IACAiC,2BAAA,EAA6BzD,KAAA,CAAMK,GAAG,EAAEqD,cAAA,GACpCC,SAAA,GACA,CAAC;MAAE3B,KAAK;MAAE4B,QAAQ;MAAEzB,cAAc;MAAEC,UAAU;MAAEnC;IAAW,CAAE;MAC3D,MAAMiC,cAAA,GACJ,OAAOjC,WAAA,CAAYT,SAAS,KAAK,YAAY,YAAYS,WAAA,CAAYT,SAAS,GAC1ES,WAAA,CAAYT,SAAS,CAACgD,MAAM,GAC5BvC,WAAA,CAAYT,SAAS;MAE3B,MAAMuC,aAAA,GACJ9B,WAAA,CAAYT,SAAS,IACrB,OAAOS,WAAA,CAAYT,SAAS,KAAK,YACjC,cAAcS,WAAA,CAAYT,SAAS,GAC/BS,WAAA,CAAYT,SAAS,CAACqE,QAAQ,GAC9B,CAAC5D,WAAA,CAAYT,SAAS;MAE5B,MAAM;QACJsE,YAAY;QACZC,eAAe;QACftB,OAAA,EAASuB,kBAAkB;QAC3BC,YAAY;QACZvB;MAAW,CACZ,GAAG/D,qCAAA,CAAsC;QACxCoD,aAAA;QACAC,KAAA;QACAE,cAAA;QACAC,cAAA;QACAC,UAAA;QACAC,YAAA,EAAc;MAChB;MAEA,IAAII,OAAA,GAAU;MAEd,IAAIzC,KAAA,EAAOK,GAAA,EAAKmD,iBAAA,EAAmB;QACjCf,OAAA,GAAUuB,kBAAA,CAAmBE,QAAQ,CAAC,QAClCF,kBAAA,CAAmBG,KAAK,CAAC,GAAG,CAAC,KAC7BH,kBAAA;MACN,OAAO;QACL;QACA,IAAIA,kBAAA,CAAmBZ,QAAQ,CAAC,OAAO;UACrC,MAAME,KAAA,GAAQU,kBAAA,CAAmBV,KAAK,CAAC;UACvC,IAAIX,KAAA,GAAQ;UACZ,KAAK,MAAMU,KAAA,IAASC,KAAA,EAAO;YACzBX,KAAA;YAEA,IAAIU,KAAA,CAAMe,UAAU,CAAC,OAAO;cAC1B3B,OAAA,IAAWY,KAAA,CAAMc,KAAK,CAAC;YACzB,OAAO;cACL;cACA,IAAId,KAAA,KAAU,IAAI;gBAChBZ,OAAA,IAAWY,KAAA;cACb,OAAO;gBACLZ,OAAA,GAAUuB,kBAAA,CAAmBE,QAAQ,CAAC,QAClCF,kBAAA,CAAmBG,KAAK,CAAC,GAAG,CAAC,KAC7BH,kBAAA;gBACJ;cACF;YACF;YAEAvB,OAAA,IAAWE,KAAA,KAAUW,KAAA,CAAMxD,MAAM,GAAG,KAAK;UAC3C;QACF,OAAO;UACL2C,OAAA,GACE,CAAC,CAACuB,kBAAA,CAAmBI,UAAU,CAAC,QAC5BJ,kBAAA,GACAA,kBAAA,CAAmBG,KAAK,CAAC,EAAC,IAAK;QACvC;MACF;MAEA,IAAI,CAACnE,KAAA,EAAOK,GAAA,EAAKuC,MAAA,EAAQ;QACvB;QACA,OAAO,CAAC,OAAOT,cAAA,CAAe;MAChC;MAEA,MAAMU,iBAAA,GAAoB1D,oBAAA,CAAqBuB,QAAA,EAAUC,eAAA;MAEzD,MAAMmC,WAAA,GAAc9C,KAAA,CAAMK,GAAG,CAACuC,MAAM,CAAC;QACnCjB,QAAA,EAAUc,OAAA;QACVM,UAAA,EAAY;QACZC,aAAA,EAAe;QACfH,iBAAA;QACAI,SAAA,EAAWb,UAAA;QACXZ,KAAA,EAAOkB,WAAA,GACHjE,8BAAA,CAA+B;UAC7BiE;QACF,KACA,CAAC;MACP;MACA,IAAII,WAAA,KAAgB,OAAO;QACzB,OAAO,CAAC,OAAOX,cAAA,CAAe;MAChC;MAEA,MAAMpB,IAAA,GAAOnC,gBAAA,CAAiB;QAC5BqC,SAAA,EAAWjB,KAAA,CAAMO,IAAI;QACrB,GAAGuC;MACL;MAEA,IAAI/B,IAAA,EAAM;QACR;QACA;QACA;QACA,IAAIsD,SAAA,GAA4C;QAChD,IAAIC,SAAA,GAA4C;QAChD;QAEA,IAAIP,eAAA,EAAiBjE,MAAA,EAAQ;UAC3BuE,SAAA,GAAYxB,iBAAA,CAAkB;YAAE0B,QAAA,EAAUR;UAAgB,IAAIS,IAAA,EAAM7C,QAAA,IAAY,EAAE;UAElF,MAAM8C,aAAA,GAAgBJ,SAAA,GAAY,EAAE;UACpC,IAAII,aAAA,EAAe;YACjBb,QAAA,CAASc,MAAM,CAAClG,oBAAA,CAAqBiG,aAAA;UACvC;QACF;QAEAb,QAAA,CAASc,MAAM,CAAC3D,IAAA;QAEhB,IAAI+C,YAAA,EAAchE,MAAA,EAAQ;UACxBwE,SAAA,GAAYzB,iBAAA,CAAkB;YAAE0B,QAAA,EAAUT;UAAa,IAAIU,IAAA,EAAM7C,QAAA;UACjE,MAAMgD,SAAA,GAAYf,QAAA,CAASgB,WAAW,EAAE,CAAChB,QAAA,CAASgB,WAAW,GAAG9E,MAAM,GAAG,EAAE;UAE3E,MAAM6B,QAAA,GAAYnD,oBAAA,CAAqB8F,SAAS,CAAC,EAAE,GAAoBM,WAAA;UACvE,IAAIjD,QAAA,EAAU7B,MAAA,EAAQ;YACpB,KAAK,MAAMuD,KAAA,IAAS1B,QAAA,EAAU;cAC1BgD,SAAA,CAA0BD,MAAM,CAACrB,KAAA;YACrC;UACF;QACF;MACF;MAEA,OAAO,CAAC,MAAMY,YAAA,CAAa;IAC7B;IACJzE,SAAA,EAAWQ,KAAA,CAAMK,GAAG,EAAEqD,cAAA,IAAkBpD,KAAA,CAAMd,SAAS;IACvDE,WAAA,EAAaM,KAAA,CAAMK,GAAG,EAAEkC,gBAAA,IAAoBjC,KAAA,CAAMZ,WAAW;IAC7D;IACAkB,IAAA,EAAM;IACNrB,OAAA,EAASA,CAACqE,QAAA,EAAUjC,QAAA,EAAUsB,SAAA,EAAWF,UAAA,EAAY8B,cAAA;MACnD,IAAI7E,KAAA,EAAOK,GAAA,EAAKuC,MAAA,EAAQ;QACtB,IAAI,CAACiC,cAAA,EAAgB;UACnB;UACA,IAAIC,IAAA,GAAO;UACX,IAAInD,QAAA,EAAU;YACZ,KAAK,MAAM0B,KAAA,IAAS1B,QAAA,EAAU;cAC5BmD,IAAA,IAAQzB,KAAA,CAAMpB,cAAc;YAC9B;UACF;UAEA4C,cAAA,GAAiB,CAACC,IAAA,CAAK;QACzB;QAEA,IAAIC,cAAA,GAAiB;QACrB,IAAI/E,KAAA,EAAOK,GAAA,EAAKmD,iBAAA,EAAmB;UACjCuB,cAAA,GAAiBF,cAAA,CAAeG,IAAI,CAAC;QACvC,OAAO;UACLD,cAAA,GAAiBF,cAAA,CAAeG,IAAI,CAAC,MAAMC,IAAI;QACjD;QAEA,MAAMvC,WAAA,GAAcO,SAAS,CAAC,EAAE,EAAEgC,IAAA;QAElC,MAAMpC,iBAAA,GAAoB1D,oBAAA,CAAqBuB,QAAA,EAAUC,eAAA;QAEzD,MAAMmC,WAAA,GAAc9C,KAAA,CAAMK,GAAG,CAACuC,MAAM,CAAC;UACnCjB,QAAA,EAAUoD,cAAA;UACVhC,UAAA,EAAYA,UAAA;UACZC,aAAA,EAAe;UACfH,iBAAA;UACAI,SAAA,EAAWA,SAAA;UACXzB,KAAA,EAAOkB,WAAA,GACHjE,8BAAA,CAA+B;YAC7BiE;UACF,KACA,CAAC;QACP;QACA,IAAII,WAAA,KAAgB,OAAO;UACzB,OAAO;QACT;QAEA,MAAM/B,IAAA,GAAOnC,gBAAA,CAAiB;UAC5BqC,SAAA,EAAWjB,KAAA,CAAMO,IAAI;UACrB,GAAGuC;QACL;QAEA,IAAI/B,IAAA,EAAM;UACR6C,QAAA,CAASc,MAAM,CAAC3D,IAAA;QAClB;QAEA;MACF;MACA,OAAO,MAAM;MAAA;IACf;EACF;EAEA,OAAOP,QAAA;AACT","ignoreList":[]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"Block.d.ts","sourceRoot":"","sources":["../../../../../../src/features/blocks/premade/CodeBlock/Component/Block.tsx"],"names":[],"mappings":"AAKA,OAAO,cAAc,CAAA;AAWrB,OAAO,KAAK,MAAM,OAAO,CAAA;AAEzB,OAAO,KAAK,EAAE,4BAA4B,EAAE,MAAM,WAAW,CAAA;AAQ7D,eAAO,MAAM,uBAAuB,EAAE,KAAK,CAAC,EAAE,CAC5C,QAAQ,CAAC,IAAI,CAAC,4BAA4B,EAAE,WAAW,CAAC,CAAC,CAmG1D,CAAA"}
|