@lexical/text 0.8.1 → 0.9.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LexicalText.dev.js +59 -4
- package/LexicalText.prod.js +6 -6
- package/package.json +2 -2
package/LexicalText.dev.js
CHANGED
@@ -12,49 +12,64 @@ var lexical = require('lexical');
|
|
12
12
|
function $findTextIntersectionFromCharacters(root, targetCharacters) {
|
13
13
|
let node = root.getFirstChild();
|
14
14
|
let currentCharacters = 0;
|
15
|
+
|
15
16
|
mainLoop: while (node !== null) {
|
16
17
|
if (lexical.$isElementNode(node)) {
|
17
18
|
const child = node.getFirstChild();
|
19
|
+
|
18
20
|
if (child !== null) {
|
19
21
|
node = child;
|
20
22
|
continue;
|
21
23
|
}
|
22
24
|
} else if (lexical.$isTextNode(node)) {
|
23
25
|
const characters = node.getTextContentSize();
|
26
|
+
|
24
27
|
if (currentCharacters + characters > targetCharacters) {
|
25
28
|
return {
|
26
29
|
node,
|
27
30
|
offset: targetCharacters - currentCharacters
|
28
31
|
};
|
29
32
|
}
|
33
|
+
|
30
34
|
currentCharacters += characters;
|
31
35
|
}
|
36
|
+
|
32
37
|
const sibling = node.getNextSibling();
|
38
|
+
|
33
39
|
if (sibling !== null) {
|
34
40
|
node = sibling;
|
35
41
|
continue;
|
36
42
|
}
|
43
|
+
|
37
44
|
let parent = node.getParent();
|
45
|
+
|
38
46
|
while (parent !== null) {
|
39
47
|
const parentSibling = parent.getNextSibling();
|
48
|
+
|
40
49
|
if (parentSibling !== null) {
|
41
50
|
node = parentSibling;
|
42
51
|
continue mainLoop;
|
43
52
|
}
|
53
|
+
|
44
54
|
parent = parent.getParent();
|
45
55
|
}
|
56
|
+
|
46
57
|
break;
|
47
58
|
}
|
59
|
+
|
48
60
|
return null;
|
49
61
|
}
|
50
62
|
function $isRootTextContentEmpty(isEditorComposing, trim = true) {
|
51
63
|
if (isEditorComposing) {
|
52
64
|
return false;
|
53
65
|
}
|
66
|
+
|
54
67
|
let text = $rootTextContent();
|
68
|
+
|
55
69
|
if (trim) {
|
56
70
|
text = text.trim();
|
57
71
|
}
|
72
|
+
|
58
73
|
return text === '';
|
59
74
|
}
|
60
75
|
function $isRootTextContentEmptyCurry(isEditorComposing, trim) {
|
@@ -68,31 +83,44 @@ function $canShowPlaceholder(isComposing) {
|
|
68
83
|
if (!$isRootTextContentEmpty(isComposing, false)) {
|
69
84
|
return false;
|
70
85
|
}
|
86
|
+
|
71
87
|
const root = lexical.$getRoot();
|
72
88
|
const children = root.getChildren();
|
73
89
|
const childrenLength = children.length;
|
90
|
+
|
74
91
|
if (childrenLength > 1) {
|
75
92
|
return false;
|
76
93
|
}
|
94
|
+
|
77
95
|
for (let i = 0; i < childrenLength; i++) {
|
78
96
|
const topBlock = children[i];
|
97
|
+
|
98
|
+
if (lexical.$isDecoratorNode(topBlock)) {
|
99
|
+
return false;
|
100
|
+
}
|
101
|
+
|
79
102
|
if (lexical.$isElementNode(topBlock)) {
|
80
103
|
if (!lexical.$isParagraphNode(topBlock)) {
|
81
104
|
return false;
|
82
105
|
}
|
106
|
+
|
83
107
|
if (topBlock.__indent !== 0) {
|
84
108
|
return false;
|
85
109
|
}
|
110
|
+
|
86
111
|
const topBlockChildren = topBlock.getChildren();
|
87
112
|
const topBlockChildrenLength = topBlockChildren.length;
|
113
|
+
|
88
114
|
for (let s = 0; s < topBlockChildrenLength; s++) {
|
89
115
|
const child = topBlockChildren[i];
|
116
|
+
|
90
117
|
if (!lexical.$isTextNode(child)) {
|
91
118
|
return false;
|
92
119
|
}
|
93
120
|
}
|
94
121
|
}
|
95
122
|
}
|
123
|
+
|
96
124
|
return true;
|
97
125
|
}
|
98
126
|
function $canShowPlaceholderCurry(isEditorComposing) {
|
@@ -102,67 +130,80 @@ function registerLexicalTextEntity(editor, getMatch, targetNode, createNode) {
|
|
102
130
|
const isTargetNode = node => {
|
103
131
|
return node instanceof targetNode;
|
104
132
|
};
|
133
|
+
|
105
134
|
const replaceWithSimpleText = node => {
|
106
135
|
const textNode = lexical.$createTextNode(node.getTextContent());
|
107
136
|
textNode.setFormat(node.getFormat());
|
108
137
|
node.replace(textNode);
|
109
138
|
};
|
139
|
+
|
110
140
|
const getMode = node => {
|
111
141
|
return node.getLatest().__mode;
|
112
142
|
};
|
143
|
+
|
113
144
|
const textNodeTransform = node => {
|
114
145
|
if (!node.isSimpleText()) {
|
115
146
|
return;
|
116
147
|
}
|
148
|
+
|
117
149
|
const prevSibling = node.getPreviousSibling();
|
118
150
|
let text = node.getTextContent();
|
119
151
|
let currentNode = node;
|
120
152
|
let match;
|
153
|
+
|
121
154
|
if (lexical.$isTextNode(prevSibling)) {
|
122
155
|
const previousText = prevSibling.getTextContent();
|
123
156
|
const combinedText = previousText + text;
|
124
157
|
const prevMatch = getMatch(combinedText);
|
158
|
+
|
125
159
|
if (isTargetNode(prevSibling)) {
|
126
160
|
if (prevMatch === null || getMode(prevSibling) !== 0) {
|
127
161
|
replaceWithSimpleText(prevSibling);
|
128
162
|
return;
|
129
163
|
} else {
|
130
164
|
const diff = prevMatch.end - previousText.length;
|
165
|
+
|
131
166
|
if (diff > 0) {
|
132
167
|
const concatText = text.slice(0, diff);
|
133
168
|
const newTextContent = previousText + concatText;
|
134
169
|
prevSibling.select();
|
135
170
|
prevSibling.setTextContent(newTextContent);
|
171
|
+
|
136
172
|
if (diff === text.length) {
|
137
173
|
node.remove();
|
138
174
|
} else {
|
139
175
|
const remainingText = text.slice(diff);
|
140
176
|
node.setTextContent(remainingText);
|
141
177
|
}
|
178
|
+
|
142
179
|
return;
|
143
180
|
}
|
144
181
|
}
|
145
182
|
} else if (prevMatch === null || prevMatch.start < previousText.length) {
|
146
183
|
return;
|
147
184
|
}
|
148
|
-
}
|
185
|
+
} // eslint-disable-next-line no-constant-condition
|
186
|
+
|
149
187
|
|
150
|
-
// eslint-disable-next-line no-constant-condition
|
151
188
|
while (true) {
|
152
189
|
match = getMatch(text);
|
153
190
|
let nextText = match === null ? '' : text.slice(match.end);
|
154
191
|
text = nextText;
|
192
|
+
|
155
193
|
if (nextText === '') {
|
156
194
|
const nextSibling = currentNode.getNextSibling();
|
195
|
+
|
157
196
|
if (lexical.$isTextNode(nextSibling)) {
|
158
197
|
nextText = currentNode.getTextContent() + nextSibling.getTextContent();
|
159
198
|
const nextMatch = getMatch(nextText);
|
199
|
+
|
160
200
|
if (nextMatch === null) {
|
161
201
|
if (isTargetNode(nextSibling)) {
|
162
202
|
replaceWithSimpleText(nextSibling);
|
163
203
|
} else {
|
164
204
|
nextSibling.markDirty();
|
165
205
|
}
|
206
|
+
|
166
207
|
return;
|
167
208
|
} else if (nextMatch.start !== 0) {
|
168
209
|
return;
|
@@ -170,56 +211,70 @@ function registerLexicalTextEntity(editor, getMatch, targetNode, createNode) {
|
|
170
211
|
}
|
171
212
|
} else {
|
172
213
|
const nextMatch = getMatch(nextText);
|
214
|
+
|
173
215
|
if (nextMatch !== null && nextMatch.start === 0) {
|
174
216
|
return;
|
175
217
|
}
|
176
218
|
}
|
219
|
+
|
177
220
|
if (match === null) {
|
178
221
|
return;
|
179
222
|
}
|
223
|
+
|
180
224
|
if (match.start === 0 && lexical.$isTextNode(prevSibling) && prevSibling.isTextEntity()) {
|
181
225
|
continue;
|
182
226
|
}
|
227
|
+
|
183
228
|
let nodeToReplace;
|
229
|
+
|
184
230
|
if (match.start === 0) {
|
185
231
|
[nodeToReplace, currentNode] = currentNode.splitText(match.end);
|
186
232
|
} else {
|
187
233
|
[, nodeToReplace, currentNode] = currentNode.splitText(match.start, match.end);
|
188
234
|
}
|
235
|
+
|
189
236
|
const replacementNode = createNode(nodeToReplace);
|
190
237
|
nodeToReplace.replace(replacementNode);
|
238
|
+
|
191
239
|
if (currentNode == null) {
|
192
240
|
return;
|
193
241
|
}
|
194
242
|
}
|
195
243
|
};
|
244
|
+
|
196
245
|
const reverseNodeTransform = node => {
|
197
246
|
const text = node.getTextContent();
|
198
247
|
const match = getMatch(text);
|
248
|
+
|
199
249
|
if (match === null || match.start !== 0) {
|
200
250
|
replaceWithSimpleText(node);
|
201
251
|
return;
|
202
252
|
}
|
253
|
+
|
203
254
|
if (text.length > match.end) {
|
204
255
|
// This will split out the rest of the text as simple text
|
205
256
|
node.splitText(match.end);
|
206
257
|
return;
|
207
258
|
}
|
259
|
+
|
208
260
|
const prevSibling = node.getPreviousSibling();
|
261
|
+
|
209
262
|
if (lexical.$isTextNode(prevSibling) && prevSibling.isTextEntity()) {
|
210
263
|
replaceWithSimpleText(prevSibling);
|
211
264
|
replaceWithSimpleText(node);
|
212
265
|
}
|
266
|
+
|
213
267
|
const nextSibling = node.getNextSibling();
|
268
|
+
|
214
269
|
if (lexical.$isTextNode(nextSibling) && nextSibling.isTextEntity()) {
|
215
|
-
replaceWithSimpleText(nextSibling);
|
270
|
+
replaceWithSimpleText(nextSibling); // This may have already been converted in the previous block
|
216
271
|
|
217
|
-
// This may have already been converted in the previous block
|
218
272
|
if (isTargetNode(node)) {
|
219
273
|
replaceWithSimpleText(node);
|
220
274
|
}
|
221
275
|
}
|
222
276
|
};
|
277
|
+
|
223
278
|
const removePlainTextTransform = editor.registerNodeTransform(lexical.TextNode, textNodeTransform);
|
224
279
|
const removeReverseNodeTransform = editor.registerNodeTransform(targetNode, reverseNodeTransform);
|
225
280
|
return [removePlainTextTransform, removeReverseNodeTransform];
|
package/LexicalText.prod.js
CHANGED
@@ -4,9 +4,9 @@
|
|
4
4
|
* This source code is licensed under the MIT license found in the
|
5
5
|
* LICENSE file in the root directory of this source tree.
|
6
6
|
*/
|
7
|
-
'use strict';var
|
8
|
-
exports.$canShowPlaceholderCurry=function(c){return()=>u(c)};exports.$findTextIntersectionFromCharacters=function(c,
|
9
|
-
exports.$isRootTextContentEmpty=r;exports.$isRootTextContentEmptyCurry=function(c,
|
10
|
-
exports.registerLexicalTextEntity=function(c,
|
11
|
-
|
12
|
-
if(null==m)break}}});c=c.registerNodeTransform(b
|
7
|
+
'use strict';var g=require("lexical");function r(c,h=!0){if(c)return!1;c=t();h&&(c=c.trim());return""===c}function t(){return g.$getRoot().getTextContent()}function u(c){if(!r(c,!1))return!1;c=g.$getRoot().getChildren();let h=c.length;if(1<h)return!1;for(let e=0;e<h;e++){var a=c[e];if(g.$isDecoratorNode(a))return!1;if(g.$isElementNode(a)){if(!g.$isParagraphNode(a)||0!==a.__indent)return!1;a=a.getChildren();let n=a.length;for(let p=0;p<n;p++)if(!g.$isTextNode(a[e]))return!1}}return!0}
|
8
|
+
exports.$canShowPlaceholder=u;exports.$canShowPlaceholderCurry=function(c){return()=>u(c)};exports.$findTextIntersectionFromCharacters=function(c,h){var a=c.getFirstChild();c=0;a:for(;null!==a;){if(g.$isElementNode(a)){var e=a.getFirstChild();if(null!==e){a=e;continue}}else if(g.$isTextNode(a)){e=a.getTextContentSize();if(c+e>h)return{node:a,offset:h-c};c+=e}e=a.getNextSibling();if(null!==e)a=e;else{for(a=a.getParent();null!==a;){e=a.getNextSibling();if(null!==e){a=e;continue a}a=a.getParent()}break}}return null};
|
9
|
+
exports.$isRootTextContentEmpty=r;exports.$isRootTextContentEmptyCurry=function(c,h){return()=>r(c,h)};exports.$rootTextContent=t;
|
10
|
+
exports.registerLexicalTextEntity=function(c,h,a,e){let n=b=>{const d=g.$createTextNode(b.getTextContent());d.setFormat(b.getFormat());b.replace(d)},p=c.registerNodeTransform(g.TextNode,b=>{if(b.isSimpleText()){var d=b.getPreviousSibling(),l=b.getTextContent(),m=b;if(g.$isTextNode(d)){var k=d.getTextContent(),f=h(k+l);if(d instanceof a){if(null===f||0!==d.getLatest().__mode){n(d);return}f=f.end-k.length;if(0<f){m=l.slice(0,f);m=k+m;d.select();d.setTextContent(m);f===l.length?b.remove():(d=l.slice(f),
|
11
|
+
b.setTextContent(d));return}}else if(null===f||f.start<k.length)return}for(;;){b=h(l);l=f=null===b?"":l.slice(b.end);if(""===f){if(k=m.getNextSibling(),g.$isTextNode(k))if(f=m.getTextContent()+k.getTextContent(),f=h(f),null===f){k instanceof a?n(k):k.markDirty();break}else if(0!==f.start)break}else if(k=h(f),null!==k&&0===k.start)break;if(null===b)break;if(0===b.start&&g.$isTextNode(d)&&d.isTextEntity())continue;let q;0===b.start?[q,m]=m.splitText(b.end):[,q,m]=m.splitText(b.start,b.end);b=e(q);q.replace(b);
|
12
|
+
if(null==m)break}}});c=c.registerNodeTransform(a,b=>{var d=b.getTextContent();const l=h(d);null===l||0!==l.start?n(b):d.length>l.end?b.splitText(l.end):(d=b.getPreviousSibling(),g.$isTextNode(d)&&d.isTextEntity()&&(n(d),n(b)),d=b.getNextSibling(),g.$isTextNode(d)&&d.isTextEntity()&&(n(d),b instanceof a&&n(b)))});return[p,c]}
|
package/package.json
CHANGED