@lexical/clipboard 0.1.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/LexicalClipboard.dev.js +420 -0
- package/LexicalClipboard.js +9 -0
- package/LexicalClipboard.prod.js +15 -0
- package/README.md +3 -0
- package/package.json +27 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2020 Dominic Gannaway
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,420 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
3
|
+
*
|
|
4
|
+
* This source code is licensed under the MIT license found in the
|
|
5
|
+
* LICENSE file in the root directory of this source tree.
|
|
6
|
+
*/
|
|
7
|
+
'use strict';
|
|
8
|
+
|
|
9
|
+
var lexical = require('lexical');
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
13
|
+
*
|
|
14
|
+
* This source code is licensed under the MIT license found in the
|
|
15
|
+
* LICENSE file in the root directory of this source tree.
|
|
16
|
+
*
|
|
17
|
+
*
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
function $cloneWithProperties(node) {
|
|
21
|
+
const latest = node.getLatest();
|
|
22
|
+
const constructor = latest.constructor;
|
|
23
|
+
const clone = constructor.clone(latest);
|
|
24
|
+
clone.__parent = latest.__parent;
|
|
25
|
+
|
|
26
|
+
if (lexical.$isElementNode(latest) && lexical.$isElementNode(clone)) {
|
|
27
|
+
clone.__children = Array.from(latest.__children);
|
|
28
|
+
clone.__format = latest.__format;
|
|
29
|
+
clone.__indent = latest.__indent;
|
|
30
|
+
clone.__dir = latest.__dir;
|
|
31
|
+
} else if (lexical.$isTextNode(latest) && lexical.$isTextNode(clone)) {
|
|
32
|
+
clone.__format = latest.__format;
|
|
33
|
+
clone.__style = latest.__style;
|
|
34
|
+
clone.__mode = latest.__mode;
|
|
35
|
+
clone.__detail = latest.__detail;
|
|
36
|
+
} else if (lexical.$isDecoratorNode(latest) && lexical.$isDecoratorNode(clone)) {
|
|
37
|
+
clone.__state = latest.__state;
|
|
38
|
+
} // $FlowFixMe
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
return clone;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
function $getIndexFromPossibleClone(node, parent, nodeMap) {
|
|
45
|
+
const parentClone = nodeMap.get(parent.getKey());
|
|
46
|
+
|
|
47
|
+
if (lexical.$isElementNode(parentClone)) {
|
|
48
|
+
return parentClone.__children.indexOf(node.getKey());
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
return node.getIndexWithinParent();
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
function $getParentAvoidingExcludedElements(node) {
|
|
55
|
+
let parent = node.getParent();
|
|
56
|
+
|
|
57
|
+
while (parent !== null && parent.excludeFromCopy()) {
|
|
58
|
+
parent = parent.getParent();
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
return parent;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
function $copyLeafNodeBranchToRoot(leaf, startingOffset, isLeftSide, range, nodeMap) {
|
|
65
|
+
let node = leaf;
|
|
66
|
+
let offset = startingOffset;
|
|
67
|
+
|
|
68
|
+
while (node !== null) {
|
|
69
|
+
const parent = $getParentAvoidingExcludedElements(node);
|
|
70
|
+
|
|
71
|
+
if (parent === null) {
|
|
72
|
+
break;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
if (!lexical.$isElementNode(node) || !node.excludeFromCopy()) {
|
|
76
|
+
const key = node.getKey();
|
|
77
|
+
let clone = nodeMap.get(key);
|
|
78
|
+
const needsClone = clone === undefined;
|
|
79
|
+
|
|
80
|
+
if (needsClone) {
|
|
81
|
+
clone = $cloneWithProperties(node);
|
|
82
|
+
nodeMap.set(key, clone);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
if (lexical.$isTextNode(clone) && !clone.isSegmented() && !clone.isToken()) {
|
|
86
|
+
clone.__text = clone.__text.slice(isLeftSide ? offset : 0, isLeftSide ? undefined : offset);
|
|
87
|
+
} else if (lexical.$isElementNode(clone)) {
|
|
88
|
+
clone.__children = clone.__children.slice(isLeftSide ? offset : 0, isLeftSide ? undefined : offset + 1);
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
if (lexical.$isRootNode(parent)) {
|
|
92
|
+
if (needsClone) {
|
|
93
|
+
// We only want to collect a range of top level nodes.
|
|
94
|
+
// So if the parent is the root, we know this is a top level.
|
|
95
|
+
range.push(key);
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
break;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
offset = $getIndexFromPossibleClone(node, parent, nodeMap);
|
|
103
|
+
node = parent;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
function $cloneContents(selection) {
|
|
108
|
+
if (!lexical.$isRangeSelection(selection)) {
|
|
109
|
+
{
|
|
110
|
+
throw Error(`TODO`);
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
const anchor = selection.anchor;
|
|
115
|
+
const focus = selection.focus;
|
|
116
|
+
const anchorOffset = anchor.getCharacterOffset();
|
|
117
|
+
const focusOffset = focus.getCharacterOffset();
|
|
118
|
+
const anchorNode = anchor.getNode();
|
|
119
|
+
const focusNode = focus.getNode();
|
|
120
|
+
const anchorNodeParent = anchorNode.getParentOrThrow(); // Handle a single text node extraction
|
|
121
|
+
|
|
122
|
+
if (anchorNode === focusNode && lexical.$isTextNode(anchorNode) && (anchorNodeParent.canBeEmpty() || anchorNodeParent.getChildrenSize() > 1)) {
|
|
123
|
+
const clonedFirstNode = $cloneWithProperties(anchorNode);
|
|
124
|
+
const isBefore = focusOffset > anchorOffset;
|
|
125
|
+
const startOffset = isBefore ? anchorOffset : focusOffset;
|
|
126
|
+
const endOffset = isBefore ? focusOffset : anchorOffset;
|
|
127
|
+
clonedFirstNode.__text = clonedFirstNode.__text.slice(startOffset, endOffset);
|
|
128
|
+
const key = clonedFirstNode.getKey();
|
|
129
|
+
return {
|
|
130
|
+
nodeMap: [[key, clonedFirstNode]],
|
|
131
|
+
range: [key]
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
const nodes = selection.getNodes();
|
|
136
|
+
|
|
137
|
+
if (nodes.length === 0) {
|
|
138
|
+
return {
|
|
139
|
+
nodeMap: [],
|
|
140
|
+
range: []
|
|
141
|
+
};
|
|
142
|
+
} // Check if we can use the parent of the nodes, if the
|
|
143
|
+
// parent can't be empty, then it's important that we
|
|
144
|
+
// also copy that element node along with its children.
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
let nodesLength = nodes.length;
|
|
148
|
+
const firstNode = nodes[0];
|
|
149
|
+
const firstNodeParent = firstNode.getParent();
|
|
150
|
+
|
|
151
|
+
if (firstNodeParent !== null && (!firstNodeParent.canBeEmpty() || lexical.$isRootNode(firstNodeParent))) {
|
|
152
|
+
const parentChildren = firstNodeParent.__children;
|
|
153
|
+
const parentChildrenLength = parentChildren.length;
|
|
154
|
+
|
|
155
|
+
if (parentChildrenLength === nodesLength) {
|
|
156
|
+
let areTheSame = true;
|
|
157
|
+
|
|
158
|
+
for (let i = 0; i < parentChildren.length; i++) {
|
|
159
|
+
if (parentChildren[i] !== nodes[i].__key) {
|
|
160
|
+
areTheSame = false;
|
|
161
|
+
break;
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
if (areTheSame) {
|
|
166
|
+
nodesLength++;
|
|
167
|
+
nodes.push(firstNodeParent);
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
const lastNode = nodes[nodesLength - 1];
|
|
173
|
+
const isBefore = anchor.isBefore(focus);
|
|
174
|
+
const nodeMap = new Map();
|
|
175
|
+
const range = []; // Do first node to root
|
|
176
|
+
|
|
177
|
+
$copyLeafNodeBranchToRoot(firstNode, isBefore ? anchorOffset : focusOffset, true, range, nodeMap); // Copy all nodes between
|
|
178
|
+
|
|
179
|
+
for (let i = 0; i < nodesLength; i++) {
|
|
180
|
+
const node = nodes[i];
|
|
181
|
+
const key = node.getKey();
|
|
182
|
+
|
|
183
|
+
if (!nodeMap.has(key) && (!lexical.$isElementNode(node) || !node.excludeFromCopy())) {
|
|
184
|
+
const clone = $cloneWithProperties(node);
|
|
185
|
+
|
|
186
|
+
if (lexical.$isRootNode(node.getParent())) {
|
|
187
|
+
range.push(node.getKey());
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
nodeMap.set(key, clone);
|
|
191
|
+
}
|
|
192
|
+
} // Do last node to root
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
$copyLeafNodeBranchToRoot(lastNode, isBefore ? focusOffset : anchorOffset, false, range, nodeMap);
|
|
196
|
+
return {
|
|
197
|
+
nodeMap: Array.from(nodeMap.entries()),
|
|
198
|
+
range
|
|
199
|
+
};
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
/**
|
|
203
|
+
* Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
204
|
+
*
|
|
205
|
+
* This source code is licensed under the MIT license found in the
|
|
206
|
+
* LICENSE file in the root directory of this source tree.
|
|
207
|
+
*
|
|
208
|
+
*
|
|
209
|
+
*/
|
|
210
|
+
function getHtmlContent(editor) {
|
|
211
|
+
const domSelection = window.getSelection(); // If we haven't selected a range, then don't copy anything
|
|
212
|
+
|
|
213
|
+
if (domSelection.isCollapsed) {
|
|
214
|
+
return null;
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
const range = domSelection.getRangeAt(0);
|
|
218
|
+
|
|
219
|
+
if (range) {
|
|
220
|
+
const container = document.createElement('div');
|
|
221
|
+
const frag = range.cloneContents();
|
|
222
|
+
container.appendChild(frag);
|
|
223
|
+
return container.innerHTML;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
return null;
|
|
227
|
+
}
|
|
228
|
+
function $getLexicalContent(editor) {
|
|
229
|
+
const selection = lexical.$getSelection();
|
|
230
|
+
|
|
231
|
+
if (selection !== null) {
|
|
232
|
+
const namespace = editor._config.namespace;
|
|
233
|
+
return JSON.stringify({
|
|
234
|
+
namespace,
|
|
235
|
+
state: $cloneContents(selection)
|
|
236
|
+
});
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
return null;
|
|
240
|
+
}
|
|
241
|
+
function $insertDataTransferForPlainText(dataTransfer, selection) {
|
|
242
|
+
const text = dataTransfer.getData('text/plain');
|
|
243
|
+
|
|
244
|
+
if (text != null) {
|
|
245
|
+
selection.insertRawText(text);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
function $insertDataTransferForRichText(dataTransfer, selection, editor) {
|
|
249
|
+
const lexicalNodesString = dataTransfer.getData('application/x-lexical-editor');
|
|
250
|
+
|
|
251
|
+
if (lexicalNodesString) {
|
|
252
|
+
const namespace = editor._config.namespace;
|
|
253
|
+
|
|
254
|
+
try {
|
|
255
|
+
const lexicalClipboardData = JSON.parse(lexicalNodesString);
|
|
256
|
+
|
|
257
|
+
if (lexicalClipboardData.namespace === namespace) {
|
|
258
|
+
const nodeRange = lexicalClipboardData.state;
|
|
259
|
+
const nodes = $generateNodes(nodeRange);
|
|
260
|
+
selection.insertNodes(nodes);
|
|
261
|
+
return;
|
|
262
|
+
}
|
|
263
|
+
} catch (e) {// Malformed, missing nodes..
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
const textHtmlMimeType = 'text/html';
|
|
268
|
+
const htmlString = dataTransfer.getData(textHtmlMimeType);
|
|
269
|
+
|
|
270
|
+
if (htmlString) {
|
|
271
|
+
const parser = new DOMParser();
|
|
272
|
+
const dom = parser.parseFromString(htmlString, textHtmlMimeType);
|
|
273
|
+
const nodes = $generateNodesFromDOM(dom, editor); // Wrap text and inline nodes in paragraph nodes so we have all blocks at the top-level
|
|
274
|
+
|
|
275
|
+
const topLevelBlocks = [];
|
|
276
|
+
let currentBlock = null;
|
|
277
|
+
|
|
278
|
+
for (let i = 0; i < nodes.length; i++) {
|
|
279
|
+
const node = nodes[i];
|
|
280
|
+
|
|
281
|
+
if (!lexical.$isElementNode(node) || node.isInline()) {
|
|
282
|
+
if (currentBlock === null) {
|
|
283
|
+
currentBlock = lexical.$createParagraphNode();
|
|
284
|
+
topLevelBlocks.push(currentBlock);
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
if (currentBlock !== null) {
|
|
288
|
+
currentBlock.append(node);
|
|
289
|
+
}
|
|
290
|
+
} else {
|
|
291
|
+
topLevelBlocks.push(node);
|
|
292
|
+
currentBlock = null;
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
selection.insertNodes(topLevelBlocks);
|
|
297
|
+
return;
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
$insertDataTransferForPlainText(dataTransfer, selection);
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
function $generateNodes(nodeRange) {
|
|
304
|
+
const {
|
|
305
|
+
range,
|
|
306
|
+
nodeMap
|
|
307
|
+
} = nodeRange;
|
|
308
|
+
const parsedNodeMap = new Map(nodeMap);
|
|
309
|
+
const nodes = [];
|
|
310
|
+
|
|
311
|
+
for (let i = 0; i < range.length; i++) {
|
|
312
|
+
const key = range[i];
|
|
313
|
+
const parsedNode = parsedNodeMap.get(key);
|
|
314
|
+
|
|
315
|
+
if (parsedNode !== undefined) {
|
|
316
|
+
const node = lexical.$createNodeFromParse(parsedNode, parsedNodeMap);
|
|
317
|
+
nodes.push(node);
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
return nodes;
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
function getConversionFunction(domNode, editor) {
|
|
325
|
+
const {
|
|
326
|
+
nodeName
|
|
327
|
+
} = domNode;
|
|
328
|
+
|
|
329
|
+
const cachedConversions = editor._htmlConversions.get(nodeName.toLowerCase());
|
|
330
|
+
|
|
331
|
+
let currentConversion = null;
|
|
332
|
+
|
|
333
|
+
if (cachedConversions !== undefined) {
|
|
334
|
+
cachedConversions.forEach(cachedConversion => {
|
|
335
|
+
const domConversion = cachedConversion(domNode);
|
|
336
|
+
|
|
337
|
+
if (domConversion !== null) {
|
|
338
|
+
if (currentConversion === null || currentConversion.priority < domConversion.priority) {
|
|
339
|
+
currentConversion = domConversion;
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
});
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
return currentConversion !== null ? currentConversion.conversion : null;
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
function $createNodesFromDOM(node, editor, forChildMap = new Map()) {
|
|
349
|
+
let lexicalNodes = [];
|
|
350
|
+
let currentLexicalNode = null;
|
|
351
|
+
const transformFunction = getConversionFunction(node, editor);
|
|
352
|
+
const transformOutput = transformFunction ? transformFunction(node) : null;
|
|
353
|
+
let postTransform = null;
|
|
354
|
+
|
|
355
|
+
if (transformOutput !== null) {
|
|
356
|
+
postTransform = transformOutput.after;
|
|
357
|
+
currentLexicalNode = transformOutput.node;
|
|
358
|
+
|
|
359
|
+
if (currentLexicalNode !== null) {
|
|
360
|
+
lexicalNodes.push(currentLexicalNode);
|
|
361
|
+
const forChildFunctions = Array.from(forChildMap.values());
|
|
362
|
+
|
|
363
|
+
for (let i = 0; i < forChildFunctions.length; i++) {
|
|
364
|
+
forChildFunctions[i](currentLexicalNode);
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
if (transformOutput.forChild != null) {
|
|
369
|
+
forChildMap.set(node.nodeName, transformOutput.forChild);
|
|
370
|
+
}
|
|
371
|
+
} // If the DOM node doesn't have a transformer, we don't know what
|
|
372
|
+
// to do with it but we still need to process any childNodes.
|
|
373
|
+
|
|
374
|
+
|
|
375
|
+
const children = node.childNodes;
|
|
376
|
+
let childLexicalNodes = [];
|
|
377
|
+
|
|
378
|
+
for (let i = 0; i < children.length; i++) {
|
|
379
|
+
childLexicalNodes.push(...$createNodesFromDOM(children[i], editor, forChildMap));
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
if (postTransform != null) {
|
|
383
|
+
childLexicalNodes = postTransform(childLexicalNodes);
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
if (currentLexicalNode == null) {
|
|
387
|
+
// If it hasn't been converted to a LexicalNode, we hoist its children
|
|
388
|
+
// up to the same level as it.
|
|
389
|
+
lexicalNodes = lexicalNodes.concat(childLexicalNodes);
|
|
390
|
+
} else {
|
|
391
|
+
if (lexical.$isElementNode(currentLexicalNode)) {
|
|
392
|
+
// If the current node is a ElementNode after conversion,
|
|
393
|
+
// we can append all the children to it.
|
|
394
|
+
currentLexicalNode.append(...childLexicalNodes);
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
return lexicalNodes;
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
function $generateNodesFromDOM(dom, editor) {
|
|
402
|
+
let lexicalNodes = [];
|
|
403
|
+
const elements = dom.body ? Array.from(dom.body.childNodes) : [];
|
|
404
|
+
const elementsLength = elements.length;
|
|
405
|
+
|
|
406
|
+
for (let i = 0; i < elementsLength; i++) {
|
|
407
|
+
const lexicalNode = $createNodesFromDOM(elements[i], editor);
|
|
408
|
+
|
|
409
|
+
if (lexicalNode !== null) {
|
|
410
|
+
lexicalNodes = lexicalNodes.concat(lexicalNode);
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
return lexicalNodes;
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
exports.$getLexicalContent = $getLexicalContent;
|
|
418
|
+
exports.$insertDataTransferForPlainText = $insertDataTransferForPlainText;
|
|
419
|
+
exports.$insertDataTransferForRichText = $insertDataTransferForRichText;
|
|
420
|
+
exports.getHtmlContent = getHtmlContent;
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
3
|
+
*
|
|
4
|
+
* This source code is licensed under the MIT license found in the
|
|
5
|
+
* LICENSE file in the root directory of this source tree.
|
|
6
|
+
*/
|
|
7
|
+
'use strict'
|
|
8
|
+
const LexicalClipboard = process.env.NODE_ENV === 'development' ? require('./LexicalClipboard.dev.js') : require('./LexicalClipboard.prod.js')
|
|
9
|
+
module.exports = LexicalClipboard;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
3
|
+
*
|
|
4
|
+
* This source code is licensed under the MIT license found in the
|
|
5
|
+
* LICENSE file in the root directory of this source tree.
|
|
6
|
+
*/
|
|
7
|
+
var h=require("lexical");function r(a){a=a.getLatest();const b=a.constructor.clone(a);b.__parent=a.__parent;h.$isElementNode(a)&&h.$isElementNode(b)?(b.__children=Array.from(a.__children),b.__format=a.__format,b.__indent=a.__indent,b.__dir=a.__dir):h.$isTextNode(a)&&h.$isTextNode(b)?(b.__format=a.__format,b.__style=a.__style,b.__mode=a.__mode,b.__detail=a.__detail):h.$isDecoratorNode(a)&&h.$isDecoratorNode(b)&&(b.__state=a.__state);return b}
|
|
8
|
+
function t(a,b,l,g,f){for(var c=b;null!==a;){for(b=a.getParent();null!==b&&b.excludeFromCopy();)b=b.getParent();if(null===b)break;if(!h.$isElementNode(a)||!a.excludeFromCopy()){const e=a.getKey();let d=f.get(e);const k=void 0===d;k&&(d=r(a),f.set(e,d));!h.$isTextNode(d)||d.isSegmented()||d.isToken()?h.$isElementNode(d)&&(d.__children=d.__children.slice(l?c:0,l?void 0:c+1)):d.__text=d.__text.slice(l?c:0,l?void 0:c);if(h.$isRootNode(b)){k&&g.push(e);break}}c=f.get(b.getKey());c=h.$isElementNode(c)?
|
|
9
|
+
c.__children.indexOf(a.getKey()):a.getIndexWithinParent();a=b}}function u(a,b){a=a.getData("text/plain");null!=a&&b.insertRawText(a)}function v(a,b){const {nodeName:l}=a;b=b._htmlConversions.get(l.toLowerCase());let g=null;void 0!==b&&b.forEach(f=>{f=f(a);null!==f&&(null===g||g.priority<f.priority)&&(g=f)});return null!==g?g.conversion:null}
|
|
10
|
+
function w(a,b,l=new Map){let g=[],f=null;var c=v(a,b),e=c?c(a):null;c=null;if(null!==e){c=e.after;f=e.node;if(null!==f){g.push(f);var d=Array.from(l.values());for(let k=0;k<d.length;k++)d[k](f)}null!=e.forChild&&l.set(a.nodeName,e.forChild)}a=a.childNodes;e=[];for(d=0;d<a.length;d++)e.push(...w(a[d],b,l));null!=c&&(e=c(e));null==f?g=g.concat(e):h.$isElementNode(f)&&f.append(...e);return g}
|
|
11
|
+
exports.$getLexicalContent=function(a){var b=h.$getSelection();if(null!==b){const x=a._config.namespace;a=JSON;var l=a.stringify;{if(!h.$isRangeSelection(b))throw Error("Minified Lexical error #68; see codes.json for the full message or use the non-minified dev environment for full errors and additional helpful warnings.");var g=b.anchor,f=b.focus;var c=g.getCharacterOffset();const q=f.getCharacterOffset();var e=g.getNode(),d=f.getNode(),k=e.getParentOrThrow();if(e===d&&h.$isTextNode(e)&&(k.canBeEmpty()||
|
|
12
|
+
1<k.getChildrenSize()))b=r(e),e=q>c,b.__text=b.__text.slice(e?c:q,e?q:c),c=b.getKey(),c={nodeMap:[[c,b]],range:[c]};else if(b=b.getNodes(),0===b.length)c={nodeMap:[],range:[]};else{e=b.length;d=b[0];k=d.getParent();if(null!==k&&(!k.canBeEmpty()||h.$isRootNode(k))){var m=k.__children;if(m.length===e){var n=!0;for(var p=0;p<m.length;p++)if(m[p]!==b[p].__key){n=!1;break}n&&(e++,b.push(k))}}k=b[e-1];g=g.isBefore(f);f=new Map;m=[];t(d,g?c:q,!0,m,f);for(d=0;d<e;d++)if(n=b[d],p=n.getKey(),!(f.has(p)||h.$isElementNode(n)&&
|
|
13
|
+
n.excludeFromCopy())){const y=r(n);h.$isRootNode(n.getParent())&&m.push(n.getKey());f.set(p,y)}t(k,g?q:c,!1,m,f);c={nodeMap:Array.from(f.entries()),range:m}}}return l.call(a,{namespace:x,state:c})}return null};exports.$insertDataTransferForPlainText=u;
|
|
14
|
+
exports.$insertDataTransferForRichText=function(a,b,l){var g=a.getData("application/x-lexical-editor");if(g){var f=l._config.namespace;try{const k=JSON.parse(g);if(k.namespace===f){const {range:m,nodeMap:n}=k.state;var c=new Map(n);g=[];for(f=0;f<m.length;f++){var e=c.get(m[f]);if(void 0!==e){var d=h.$createNodeFromParse(e,c);g.push(d)}}b.insertNodes(g);return}}catch(k){}}if(c=a.getData("text/html")){c=(new DOMParser).parseFromString(c,"text/html");a=[];c=c.body?Array.from(c.body.childNodes):[];e=
|
|
15
|
+
c.length;for(d=0;d<e;d++)g=w(c[d],l),null!==g&&(a=a.concat(g));l=a;a=[];c=null;for(e=0;e<l.length;e++)d=l[e],!h.$isElementNode(d)||d.isInline()?(null===c&&(c=h.$createParagraphNode(),a.push(c)),null!==c&&c.append(d)):(a.push(d),c=null);b.insertNodes(a)}else u(a,b)};exports.getHtmlContent=function(){var a=window.getSelection();if(a.isCollapsed)return null;var b=a.getRangeAt(0);return b?(a=document.createElement("div"),b=b.cloneContents(),a.appendChild(b),a.innerHTML):null};
|
package/README.md
ADDED
package/package.json
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@lexical/clipboard",
|
|
3
|
+
"author": {
|
|
4
|
+
"name": "Dominic Gannaway",
|
|
5
|
+
"email": "dg@domgan.com"
|
|
6
|
+
},
|
|
7
|
+
"description": "This package provides the copy/paste functionality for Lexical.",
|
|
8
|
+
"keywords": [
|
|
9
|
+
"lexical",
|
|
10
|
+
"editor",
|
|
11
|
+
"rich-text",
|
|
12
|
+
"copy",
|
|
13
|
+
"paste"
|
|
14
|
+
],
|
|
15
|
+
"license": "MIT",
|
|
16
|
+
"version": "0.1.12",
|
|
17
|
+
"main": "LexicalClipboard.js",
|
|
18
|
+
"peerDependencies": {
|
|
19
|
+
"lexical": "0.1.12",
|
|
20
|
+
"@lexical/helpers": "0.1.12"
|
|
21
|
+
},
|
|
22
|
+
"repository": {
|
|
23
|
+
"type": "git",
|
|
24
|
+
"url": "https://github.com/facebook/lexical",
|
|
25
|
+
"directory": "packages/lexical-clipboard"
|
|
26
|
+
}
|
|
27
|
+
}
|