@webiny/lexical-converter 5.38.0-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) Webiny
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,106 @@
1
+ # @webiny/lexical-converter
2
+
3
+ [![](https://img.shields.io/npm/dw/@webiny/lexical-converter.svg)](https://www.npmjs.com/package/@webiny/llexical-lexical-converter)
4
+ [![](https://img.shields.io/npm/v/@webiny/lexical-converter.svg)](https://www.npmjs.com/package/@webiny/lexical-converter)
5
+ [![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg?style=flat-square)](https://github.com/prettier/prettier)
6
+ [![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square)](http://makeapullrequest.com)
7
+
8
+ ## About
9
+
10
+ This package provides features that will enable you to parse your HTML pages into Lexical editor state object.
11
+
12
+ Further, this lexical state object can be imported into Webiny's apps like the Page builder and Headless CMS, trough the [Webiny's graphql API](https://www.webiny.com/docs/headless-cms/basics/graphql-api).
13
+
14
+ > Webiny use the Lexical editor as primary rich text editor across the platform.
15
+
16
+ Note: This module is built to be used in the `node.js` environment.
17
+
18
+ ## Usage
19
+
20
+ To parse the HTML to lexical editor state object, you need to import `createHtmlToLexicalParser` factory function,
21
+ to create the parser function (with default or custom configuration) and provide the HTML content as parameter.
22
+ Parser will return Lexical editor state object.
23
+
24
+ > The parser uses the default configuration with the Webiny's Lexical nodes. DOM elements like headings and
25
+ > paragraph, for example, will be converted to our custom Webiny Lexical nodes.
26
+
27
+ ```tsx
28
+ import { createHtmlToLexicalParser } from "@webiny/lexical-converter";
29
+
30
+ const htmlString = "<p>My paragraph</p>";
31
+
32
+ // Create a parser function.
33
+ const myParser = createHtmlToLexicalParser();
34
+
35
+ // Parse the HTML string to Lexical editor state object.
36
+ const lexicalEditorState = myParser(htmlString);
37
+ ```
38
+
39
+ Here is the result in JSON format. This object structure is a valid Lexical editor state.
40
+
41
+ ```json
42
+ {
43
+ "root": {
44
+ "children": [
45
+ {
46
+ "children": [
47
+ {
48
+ "detail": 0,
49
+ "format": 0,
50
+ "mode": "normal",
51
+ "style": "",
52
+ "text": "Space",
53
+ "type": "text",
54
+ "version": 1
55
+ }
56
+ ],
57
+ "direction": null,
58
+ "format": "",
59
+ "indent": 0,
60
+ "styles": [],
61
+ "type": "paragraph-element",
62
+ "version": 1
63
+ }
64
+ ],
65
+ "direction": null,
66
+ "format": "",
67
+ "indent": 0,
68
+ "type": "root",
69
+ "version": 1
70
+ }
71
+ }
72
+ ```
73
+
74
+ ## Configuration
75
+
76
+ To configure the parser, you can pass an optional configuration object to the parser factory.
77
+
78
+ ```ts
79
+ import { createHtmlToLexicalParser } from "@webiny/lexical-converter";
80
+ import { myCustomTheme } from "./theme/myCustomTheme";
81
+ import { MyCustomLexicalNode } from "./lexical/nodes/MyCustomLexicalNode";
82
+
83
+ const addCustomThemeStyleToHeadings = (node: LexicalNode): LexicalNode => {
84
+ if (node.getType() === "heading-element") {
85
+ return (node as HeadingNode).setThemeStyles([{ styleId: "my-default-id", type: "typography" }]);
86
+ }
87
+ return node;
88
+ };
89
+
90
+ // Create your parser with custom configuration
91
+ const myParser = createHtmlToLexicalParser({
92
+ // Lexical editor configuration
93
+ editorConfig: {
94
+ // Add custom nodes for parsing
95
+ nodes: [MyCustomLexicalNode],
96
+ // Add you custom theme
97
+ theme: myCustomTheme
98
+ },
99
+ nodeMapper: addCustomThemeStyleToHeadings,
100
+ normalizeTextNodes: false // Default: true
101
+ });
102
+
103
+ const lexicalEditorState = myParser(htmlString);
104
+ ```
105
+
106
+ To learn more about how to create custom Lexical nodes, please visit [Lexical's documentation web page](https://lexical.dev/docs/intro).
@@ -0,0 +1,8 @@
1
+ /// <reference types="react" />
2
+ /// <reference types="web" />
3
+ /// <reference types="aos" />
4
+ import { ParserConfigurationOptions } from "./types";
5
+ /**
6
+ * Parse html string to lexical JSON object.
7
+ */
8
+ export declare const createHtmlToLexicalParser: (config?: ParserConfigurationOptions) => (domDocument: Document) => Record<string, any> | null;
@@ -0,0 +1,74 @@
1
+ "use strict";
2
+
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
+ Object.defineProperty(exports, "__esModule", {
5
+ value: true
6
+ });
7
+ exports.createHtmlToLexicalParser = void 0;
8
+ var _objectSpread2 = _interopRequireDefault(require("@babel/runtime/helpers/objectSpread2"));
9
+ var _headless = require("@lexical/headless");
10
+ var _html = require("@lexical/html");
11
+ var _lexical = require("lexical");
12
+ var _lexicalNodes = require("@webiny/lexical-nodes");
13
+ /**
14
+ * By itself, "text" node without a parent node (like "paragraph"), is not a valid node. Lexical will simply ignore these elements.
15
+ * To fix this issue, we wrap the text node with a paragraph node.
16
+ *
17
+ * EXAMPLE:
18
+ * When we parse DOM, sometimes, 'span' html tag doesn't have parent elements that match the
19
+ * lexical node elements (there's no Node class that can handle that HTML element), like paragraph or headings.
20
+ * In this case, Lexical will parse the 'span' tag as a text node, but without a parent element.
21
+ */
22
+ const textNodeParentNormalizer = node => {
23
+ if (node.getType() === "text" && node.getParent() === null) {
24
+ return (0, _lexicalNodes.$createParagraphNode)().append(node);
25
+ }
26
+ return node;
27
+ };
28
+ const passthroughMapper = node => node;
29
+
30
+ /**
31
+ * Parse html string to lexical JSON object.
32
+ */
33
+ const createHtmlToLexicalParser = (config = {}) => {
34
+ return domDocument => {
35
+ var _config$editorConfig;
36
+ const normalizeTextNodes = config.normalizeTextNodes ?? true;
37
+ const textNodeNormalizer = normalizeTextNodes ? textNodeParentNormalizer : passthroughMapper;
38
+ const customNodeMapper = config.nodeMapper ?? passthroughMapper;
39
+ const editor = (0, _headless.createHeadlessEditor)((0, _objectSpread2.default)((0, _objectSpread2.default)({}, config.editorConfig), {}, {
40
+ nodes: [..._lexicalNodes.allNodes, ...(((_config$editorConfig = config.editorConfig) === null || _config$editorConfig === void 0 ? void 0 : _config$editorConfig.nodes) || [])]
41
+ }));
42
+ let parsingError;
43
+ editor.update(() => {
44
+ // Convert to lexical node objects that can be stored in db.
45
+ const lexicalNodes = (0, _html.$generateNodesFromDOM)(editor, domDocument).map(textNodeNormalizer).map(customNodeMapper);
46
+
47
+ // Select the root
48
+ (0, _lexical.$getRoot)().select();
49
+
50
+ // Insert the nodes at a selection.
51
+ const selection = (0, _lexical.$getSelection)();
52
+ if (selection) {
53
+ try {
54
+ selection.insertNodes(lexicalNodes);
55
+ } catch (err) {
56
+ parsingError = err;
57
+ }
58
+ }
59
+ },
60
+ /**
61
+ * Prevents this update from being batched, forcing it to run synchronously.
62
+ */
63
+ {
64
+ discrete: true
65
+ });
66
+ if (parsingError) {
67
+ throw parsingError;
68
+ }
69
+ return editor.getEditorState().toJSON();
70
+ };
71
+ };
72
+ exports.createHtmlToLexicalParser = createHtmlToLexicalParser;
73
+
74
+ //# sourceMappingURL=createHtmlToLexicalParser.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"names":["_headless","require","_html","_lexical","_lexicalNodes","textNodeParentNormalizer","node","getType","getParent","$createParagraphNode","append","passthroughMapper","createHtmlToLexicalParser","config","domDocument","_config$editorConfig","normalizeTextNodes","textNodeNormalizer","customNodeMapper","nodeMapper","editor","createHeadlessEditor","_objectSpread2","default","editorConfig","nodes","allNodes","parsingError","update","lexicalNodes","$generateNodesFromDOM","map","$getRoot","select","selection","$getSelection","insertNodes","err","discrete","getEditorState","toJSON","exports"],"sources":["createHtmlToLexicalParser.ts"],"sourcesContent":["import { createHeadlessEditor } from \"@lexical/headless\";\nimport { $generateNodesFromDOM } from \"@lexical/html\";\nimport { $getRoot, $getSelection } from \"lexical\";\nimport { allNodes, $createParagraphNode } from \"@webiny/lexical-nodes\";\nimport { NodeMapper, ParserConfigurationOptions } from \"~/types\";\n\n/**\n * By itself, \"text\" node without a parent node (like \"paragraph\"), is not a valid node. Lexical will simply ignore these elements.\n * To fix this issue, we wrap the text node with a paragraph node.\n *\n * EXAMPLE:\n * When we parse DOM, sometimes, 'span' html tag doesn't have parent elements that match the\n * lexical node elements (there's no Node class that can handle that HTML element), like paragraph or headings.\n * In this case, Lexical will parse the 'span' tag as a text node, but without a parent element.\n */\nconst textNodeParentNormalizer: NodeMapper = node => {\n if (node.getType() === \"text\" && node.getParent() === null) {\n return $createParagraphNode().append(node);\n }\n return node;\n};\n\nconst passthroughMapper: NodeMapper = node => node;\n\n/**\n * Parse html string to lexical JSON object.\n */\nexport const createHtmlToLexicalParser = (config: ParserConfigurationOptions = {}) => {\n return (domDocument: Document): Record<string, any> | null => {\n const normalizeTextNodes = config.normalizeTextNodes ?? true;\n const textNodeNormalizer = normalizeTextNodes\n ? textNodeParentNormalizer\n : passthroughMapper;\n\n const customNodeMapper: NodeMapper = config.nodeMapper ?? passthroughMapper;\n\n const editor = createHeadlessEditor({\n ...config.editorConfig,\n nodes: [...allNodes, ...(config.editorConfig?.nodes || [])]\n });\n\n let parsingError;\n\n editor.update(\n () => {\n // Convert to lexical node objects that can be stored in db.\n const lexicalNodes = $generateNodesFromDOM(editor, domDocument)\n .map(textNodeNormalizer)\n .map(customNodeMapper);\n\n // Select the root\n $getRoot().select();\n\n // Insert the nodes at a selection.\n const selection = $getSelection();\n if (selection) {\n try {\n selection.insertNodes(lexicalNodes);\n } catch (err) {\n parsingError = err;\n }\n }\n },\n /**\n * Prevents this update from being batched, forcing it to run synchronously.\n */\n { discrete: true }\n );\n\n if (parsingError) {\n throw parsingError;\n }\n\n return editor.getEditorState().toJSON();\n };\n};\n"],"mappings":";;;;;;;;AAAA,IAAAA,SAAA,GAAAC,OAAA;AACA,IAAAC,KAAA,GAAAD,OAAA;AACA,IAAAE,QAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAMI,wBAAoC,GAAGC,IAAI,IAAI;EACjD,IAAIA,IAAI,CAACC,OAAO,CAAC,CAAC,KAAK,MAAM,IAAID,IAAI,CAACE,SAAS,CAAC,CAAC,KAAK,IAAI,EAAE;IACxD,OAAO,IAAAC,kCAAoB,EAAC,CAAC,CAACC,MAAM,CAACJ,IAAI,CAAC;EAC9C;EACA,OAAOA,IAAI;AACf,CAAC;AAED,MAAMK,iBAA6B,GAAGL,IAAI,IAAIA,IAAI;;AAElD;AACA;AACA;AACO,MAAMM,yBAAyB,GAAGA,CAACC,MAAkC,GAAG,CAAC,CAAC,KAAK;EAClF,OAAQC,WAAqB,IAAiC;IAAA,IAAAC,oBAAA;IAC1D,MAAMC,kBAAkB,GAAGH,MAAM,CAACG,kBAAkB,IAAI,IAAI;IAC5D,MAAMC,kBAAkB,GAAGD,kBAAkB,GACvCX,wBAAwB,GACxBM,iBAAiB;IAEvB,MAAMO,gBAA4B,GAAGL,MAAM,CAACM,UAAU,IAAIR,iBAAiB;IAE3E,MAAMS,MAAM,GAAG,IAAAC,8BAAoB,MAAAC,cAAA,CAAAC,OAAA,MAAAD,cAAA,CAAAC,OAAA,MAC5BV,MAAM,CAACW,YAAY;MACtBC,KAAK,EAAE,CAAC,GAAGC,sBAAQ,EAAE,IAAI,EAAAX,oBAAA,GAAAF,MAAM,CAACW,YAAY,cAAAT,oBAAA,uBAAnBA,oBAAA,CAAqBU,KAAK,KAAI,EAAE,CAAC;IAAC,EAC9D,CAAC;IAEF,IAAIE,YAAY;IAEhBP,MAAM,CAACQ,MAAM,CACT,MAAM;MACF;MACA,MAAMC,YAAY,GAAG,IAAAC,2BAAqB,EAACV,MAAM,EAAEN,WAAW,CAAC,CAC1DiB,GAAG,CAACd,kBAAkB,CAAC,CACvBc,GAAG,CAACb,gBAAgB,CAAC;;MAE1B;MACA,IAAAc,iBAAQ,EAAC,CAAC,CAACC,MAAM,CAAC,CAAC;;MAEnB;MACA,MAAMC,SAAS,GAAG,IAAAC,sBAAa,EAAC,CAAC;MACjC,IAAID,SAAS,EAAE;QACX,IAAI;UACAA,SAAS,CAACE,WAAW,CAACP,YAAY,CAAC;QACvC,CAAC,CAAC,OAAOQ,GAAG,EAAE;UACVV,YAAY,GAAGU,GAAG;QACtB;MACJ;IACJ,CAAC;IACD;AACZ;AACA;IACY;MAAEC,QAAQ,EAAE;IAAK,CACrB,CAAC;IAED,IAAIX,YAAY,EAAE;MACd,MAAMA,YAAY;IACtB;IAEA,OAAOP,MAAM,CAACmB,cAAc,CAAC,CAAC,CAACC,MAAM,CAAC,CAAC;EAC3C,CAAC;AACL,CAAC;AAACC,OAAA,CAAA7B,yBAAA,GAAAA,yBAAA"}
@@ -0,0 +1,17 @@
1
+ import { CreateEditorArgs, SerializedEditorState, LexicalNode } from "lexical";
2
+ interface LexicalStateTransformerConfig {
3
+ editorConfig?: Pick<CreateEditorArgs, "nodes" | "theme">;
4
+ }
5
+ export declare type FlatStateWithHTML = Array<{
6
+ node: LexicalNode;
7
+ html: string;
8
+ }>;
9
+ declare class LexicalStateTransformer {
10
+ private readonly editor;
11
+ constructor(config?: LexicalStateTransformerConfig);
12
+ flatten(state: string | SerializedEditorState): FlatStateWithHTML;
13
+ toHtml(state: string | SerializedEditorState): string;
14
+ private getNodeDescendants;
15
+ }
16
+ export declare const createLexicalStateTransformer: (config?: LexicalStateTransformerConfig) => LexicalStateTransformer;
17
+ export {};
@@ -0,0 +1,65 @@
1
+ "use strict";
2
+
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
+ Object.defineProperty(exports, "__esModule", {
5
+ value: true
6
+ });
7
+ exports.createLexicalStateTransformer = void 0;
8
+ var _objectSpread2 = _interopRequireDefault(require("@babel/runtime/helpers/objectSpread2"));
9
+ var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
10
+ var _lexical = require("lexical");
11
+ var _html = require("@lexical/html");
12
+ var _headless = require("@lexical/headless");
13
+ var _lexicalNodes = require("@webiny/lexical-nodes");
14
+ class LexicalStateTransformer {
15
+ constructor(config = {}) {
16
+ var _config$editorConfig;
17
+ (0, _defineProperty2.default)(this, "editor", void 0);
18
+ this.editor = (0, _headless.createHeadlessEditor)((0, _objectSpread2.default)((0, _objectSpread2.default)({}, config.editorConfig), {}, {
19
+ nodes: [..._lexicalNodes.allNodes, ...(((_config$editorConfig = config.editorConfig) === null || _config$editorConfig === void 0 ? void 0 : _config$editorConfig.nodes) || [])]
20
+ }));
21
+ }
22
+ flatten(state) {
23
+ const editorState = this.editor.parseEditorState(state);
24
+ this.editor.setEditorState(editorState);
25
+ let flattenedNodes = [];
26
+ this.editor.update(() => {
27
+ const children = (0, _lexical.$getRoot)().getChildren();
28
+ flattenedNodes = children.map(childNode => {
29
+ const selection = (0, _lexical.$createNodeSelection)();
30
+ selection.add(childNode.getKey());
31
+ this.getNodeDescendants(childNode).forEach(node => {
32
+ selection.add(node.getKey());
33
+ });
34
+ const html = (0, _html.$generateHtmlFromNodes)(this.editor, selection);
35
+ return {
36
+ node: childNode,
37
+ html
38
+ };
39
+ });
40
+ });
41
+ return flattenedNodes;
42
+ }
43
+ toHtml(state) {
44
+ const editorState = this.editor.parseEditorState(state);
45
+ this.editor.setEditorState(editorState);
46
+ let html = "";
47
+ this.editor.update(() => {
48
+ html = (0, _html.$generateHtmlFromNodes)(this.editor);
49
+ });
50
+ return html;
51
+ }
52
+ getNodeDescendants(node) {
53
+ if (!(0, _lexical.$isElementNode)(node)) {
54
+ return [];
55
+ }
56
+ const children = node.getChildren();
57
+ return [...children, ...children.map(child => this.getNodeDescendants(child)).flat()];
58
+ }
59
+ }
60
+ const createLexicalStateTransformer = config => {
61
+ return new LexicalStateTransformer(config);
62
+ };
63
+ exports.createLexicalStateTransformer = createLexicalStateTransformer;
64
+
65
+ //# sourceMappingURL=createLexicalStateTransformer.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"names":["_lexical","require","_html","_headless","_lexicalNodes","LexicalStateTransformer","constructor","config","_config$editorConfig","_defineProperty2","default","editor","createHeadlessEditor","_objectSpread2","editorConfig","nodes","allNodes","flatten","state","editorState","parseEditorState","setEditorState","flattenedNodes","update","children","$getRoot","getChildren","map","childNode","selection","$createNodeSelection","add","getKey","getNodeDescendants","forEach","node","html","$generateHtmlFromNodes","toHtml","$isElementNode","child","flat","createLexicalStateTransformer","exports"],"sources":["createLexicalStateTransformer.ts"],"sourcesContent":["import {\n CreateEditorArgs,\n LexicalEditor,\n SerializedEditorState,\n $getRoot,\n $createNodeSelection,\n $isElementNode,\n LexicalNode\n} from \"lexical\";\nimport { $generateHtmlFromNodes } from \"@lexical/html\";\nimport { createHeadlessEditor } from \"@lexical/headless\";\nimport { allNodes } from \"@webiny/lexical-nodes\";\n\ninterface LexicalStateTransformerConfig {\n editorConfig?: Pick<CreateEditorArgs, \"nodes\" | \"theme\">;\n}\n\nexport type FlatStateWithHTML = Array<{ node: LexicalNode; html: string }>;\n\nclass LexicalStateTransformer {\n private readonly editor: LexicalEditor;\n\n constructor(config: LexicalStateTransformerConfig = {}) {\n this.editor = createHeadlessEditor({\n ...config.editorConfig,\n nodes: [...allNodes, ...(config.editorConfig?.nodes || [])]\n });\n }\n\n public flatten(state: string | SerializedEditorState) {\n const editorState = this.editor.parseEditorState(state);\n this.editor.setEditorState(editorState);\n\n let flattenedNodes: FlatStateWithHTML = [];\n\n this.editor.update(() => {\n const children = $getRoot().getChildren();\n\n flattenedNodes = children.map(childNode => {\n const selection = $createNodeSelection();\n selection.add(childNode.getKey());\n\n this.getNodeDescendants(childNode).forEach(node => {\n selection.add(node.getKey());\n });\n\n const html = $generateHtmlFromNodes(this.editor, selection);\n\n return {\n node: childNode,\n html\n };\n });\n });\n\n return flattenedNodes;\n }\n\n public toHtml(state: string | SerializedEditorState) {\n const editorState = this.editor.parseEditorState(state);\n this.editor.setEditorState(editorState);\n\n let html = \"\";\n\n this.editor.update(() => {\n html = $generateHtmlFromNodes(this.editor);\n });\n\n return html;\n }\n\n private getNodeDescendants(node: LexicalNode): LexicalNode[] {\n if (!$isElementNode(node)) {\n return [];\n }\n const children = node.getChildren();\n return [...children, ...children.map(child => this.getNodeDescendants(child)).flat()];\n }\n}\n\nexport const createLexicalStateTransformer = (config?: LexicalStateTransformerConfig) => {\n return new LexicalStateTransformer(config);\n};\n"],"mappings":";;;;;;;;;AAAA,IAAAA,QAAA,GAAAC,OAAA;AASA,IAAAC,KAAA,GAAAD,OAAA;AACA,IAAAE,SAAA,GAAAF,OAAA;AACA,IAAAG,aAAA,GAAAH,OAAA;AAQA,MAAMI,uBAAuB,CAAC;EAG1BC,WAAWA,CAACC,MAAqC,GAAG,CAAC,CAAC,EAAE;IAAA,IAAAC,oBAAA;IAAA,IAAAC,gBAAA,CAAAC,OAAA;IACpD,IAAI,CAACC,MAAM,GAAG,IAAAC,8BAAoB,MAAAC,cAAA,CAAAH,OAAA,MAAAG,cAAA,CAAAH,OAAA,MAC3BH,MAAM,CAACO,YAAY;MACtBC,KAAK,EAAE,CAAC,GAAGC,sBAAQ,EAAE,IAAI,EAAAR,oBAAA,GAAAD,MAAM,CAACO,YAAY,cAAAN,oBAAA,uBAAnBA,oBAAA,CAAqBO,KAAK,KAAI,EAAE,CAAC;IAAC,EAC9D,CAAC;EACN;EAEOE,OAAOA,CAACC,KAAqC,EAAE;IAClD,MAAMC,WAAW,GAAG,IAAI,CAACR,MAAM,CAACS,gBAAgB,CAACF,KAAK,CAAC;IACvD,IAAI,CAACP,MAAM,CAACU,cAAc,CAACF,WAAW,CAAC;IAEvC,IAAIG,cAAiC,GAAG,EAAE;IAE1C,IAAI,CAACX,MAAM,CAACY,MAAM,CAAC,MAAM;MACrB,MAAMC,QAAQ,GAAG,IAAAC,iBAAQ,EAAC,CAAC,CAACC,WAAW,CAAC,CAAC;MAEzCJ,cAAc,GAAGE,QAAQ,CAACG,GAAG,CAACC,SAAS,IAAI;QACvC,MAAMC,SAAS,GAAG,IAAAC,6BAAoB,EAAC,CAAC;QACxCD,SAAS,CAACE,GAAG,CAACH,SAAS,CAACI,MAAM,CAAC,CAAC,CAAC;QAEjC,IAAI,CAACC,kBAAkB,CAACL,SAAS,CAAC,CAACM,OAAO,CAACC,IAAI,IAAI;UAC/CN,SAAS,CAACE,GAAG,CAACI,IAAI,CAACH,MAAM,CAAC,CAAC,CAAC;QAChC,CAAC,CAAC;QAEF,MAAMI,IAAI,GAAG,IAAAC,4BAAsB,EAAC,IAAI,CAAC1B,MAAM,EAAEkB,SAAS,CAAC;QAE3D,OAAO;UACHM,IAAI,EAAEP,SAAS;UACfQ;QACJ,CAAC;MACL,CAAC,CAAC;IACN,CAAC,CAAC;IAEF,OAAOd,cAAc;EACzB;EAEOgB,MAAMA,CAACpB,KAAqC,EAAE;IACjD,MAAMC,WAAW,GAAG,IAAI,CAACR,MAAM,CAACS,gBAAgB,CAACF,KAAK,CAAC;IACvD,IAAI,CAACP,MAAM,CAACU,cAAc,CAACF,WAAW,CAAC;IAEvC,IAAIiB,IAAI,GAAG,EAAE;IAEb,IAAI,CAACzB,MAAM,CAACY,MAAM,CAAC,MAAM;MACrBa,IAAI,GAAG,IAAAC,4BAAsB,EAAC,IAAI,CAAC1B,MAAM,CAAC;IAC9C,CAAC,CAAC;IAEF,OAAOyB,IAAI;EACf;EAEQH,kBAAkBA,CAACE,IAAiB,EAAiB;IACzD,IAAI,CAAC,IAAAI,uBAAc,EAACJ,IAAI,CAAC,EAAE;MACvB,OAAO,EAAE;IACb;IACA,MAAMX,QAAQ,GAAGW,IAAI,CAACT,WAAW,CAAC,CAAC;IACnC,OAAO,CAAC,GAAGF,QAAQ,EAAE,GAAGA,QAAQ,CAACG,GAAG,CAACa,KAAK,IAAI,IAAI,CAACP,kBAAkB,CAACO,KAAK,CAAC,CAAC,CAACC,IAAI,CAAC,CAAC,CAAC;EACzF;AACJ;AAEO,MAAMC,6BAA6B,GAAInC,MAAsC,IAAK;EACrF,OAAO,IAAIF,uBAAuB,CAACE,MAAM,CAAC;AAC9C,CAAC;AAACoC,OAAA,CAAAD,6BAAA,GAAAA,6BAAA"}
package/index.d.ts ADDED
@@ -0,0 +1,2 @@
1
+ export * from "./createHtmlToLexicalParser";
2
+ export * from "./createLexicalStateTransformer";
package/index.js ADDED
@@ -0,0 +1,29 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ var _createHtmlToLexicalParser = require("./createHtmlToLexicalParser");
7
+ Object.keys(_createHtmlToLexicalParser).forEach(function (key) {
8
+ if (key === "default" || key === "__esModule") return;
9
+ if (key in exports && exports[key] === _createHtmlToLexicalParser[key]) return;
10
+ Object.defineProperty(exports, key, {
11
+ enumerable: true,
12
+ get: function () {
13
+ return _createHtmlToLexicalParser[key];
14
+ }
15
+ });
16
+ });
17
+ var _createLexicalStateTransformer = require("./createLexicalStateTransformer");
18
+ Object.keys(_createLexicalStateTransformer).forEach(function (key) {
19
+ if (key === "default" || key === "__esModule") return;
20
+ if (key in exports && exports[key] === _createLexicalStateTransformer[key]) return;
21
+ Object.defineProperty(exports, key, {
22
+ enumerable: true,
23
+ get: function () {
24
+ return _createLexicalStateTransformer[key];
25
+ }
26
+ });
27
+ });
28
+
29
+ //# sourceMappingURL=index.js.map
package/index.js.map ADDED
@@ -0,0 +1 @@
1
+ {"version":3,"names":["_createHtmlToLexicalParser","require","Object","keys","forEach","key","exports","defineProperty","enumerable","get","_createLexicalStateTransformer"],"sources":["index.ts"],"sourcesContent":["export * from \"./createHtmlToLexicalParser\";\nexport * from \"./createLexicalStateTransformer\";\n"],"mappings":";;;;;AAAA,IAAAA,0BAAA,GAAAC,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAH,0BAAA,EAAAI,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAL,0BAAA,CAAAK,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAT,0BAAA,CAAAK,GAAA;IAAA;EAAA;AAAA;AACA,IAAAK,8BAAA,GAAAT,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAO,8BAAA,EAAAN,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAK,8BAAA,CAAAL,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,8BAAA,CAAAL,GAAA;IAAA;EAAA;AAAA"}
package/package.json ADDED
@@ -0,0 +1,26 @@
1
+ {
2
+ "name": "@webiny/lexical-converter",
3
+ "version": "5.38.0-beta.0",
4
+ "dependencies": {
5
+ "@lexical/headless": "0.12.2",
6
+ "@lexical/html": "0.12.2",
7
+ "@webiny/lexical-nodes": "5.38.0-beta.0",
8
+ "lexical": "0.12.2"
9
+ },
10
+ "devDependencies": {
11
+ "@types/jsdom": "21.1.3",
12
+ "@webiny/cli": "5.38.0-beta.0",
13
+ "@webiny/project-utils": "5.38.0-beta.0",
14
+ "jsdom": "21.1.2"
15
+ },
16
+ "publishConfig": {
17
+ "access": "public",
18
+ "directory": "dist"
19
+ },
20
+ "scripts": {
21
+ "build": "yarn webiny run build",
22
+ "watch": "yarn webiny run watch"
23
+ },
24
+ "test": "jest --verbose --runInBand --detectOpenHandles --forceExit",
25
+ "gitHead": "5746389d4988626b50e2523ccfa0a9a5e398e0ed"
26
+ }
package/types.d.ts ADDED
@@ -0,0 +1,7 @@
1
+ import { CreateEditorArgs, LexicalNode } from "lexical";
2
+ export declare type NodeMapper = (node: LexicalNode) => LexicalNode;
3
+ export interface ParserConfigurationOptions {
4
+ editorConfig?: Pick<CreateEditorArgs, "nodes" | "theme">;
5
+ nodeMapper?: NodeMapper;
6
+ normalizeTextNodes?: boolean;
7
+ }
package/types.js ADDED
@@ -0,0 +1,7 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+
7
+ //# sourceMappingURL=types.js.map
package/types.js.map ADDED
@@ -0,0 +1 @@
1
+ {"version":3,"names":[],"sources":["types.ts"],"sourcesContent":["import { CreateEditorArgs, LexicalNode } from \"lexical\";\n\nexport type NodeMapper = (node: LexicalNode) => LexicalNode;\n\nexport interface ParserConfigurationOptions {\n editorConfig?: Pick<CreateEditorArgs, \"nodes\" | \"theme\">;\n nodeMapper?: NodeMapper;\n normalizeTextNodes?: boolean;\n}\n"],"mappings":""}