@etrepum/lexical-builder-markdown 0.0.26-nightly.20240625.0 → 0.0.26

Sign up to get free protection for your applications and to get access to all the features.
package/dist/index.d.ts CHANGED
@@ -5,7 +5,8 @@
5
5
  * LICENSE file in the root directory of this source tree.
6
6
  *
7
7
  */
8
- export { type MarkdownTransformerOptions, type TransformersByType, } from './types';
8
+ export { type MarkdownTransformerOptions, type TransformersByType, type Filter, type KebabToCamel, } from './types';
9
9
  export { type MarkdownTransformersConfig, type MarkdownTransformersOutput, MarkdownTransformersPlan, } from './MarkdownTransformersPlan';
10
+ export { type MarkdownShortcutsConfig, type MarkdownShortcutsOutput, MarkdownShortcutsPlan, } from './MarkdownShortcutsPlan';
10
11
  export declare const PACKAGE_VERSION: string;
11
12
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EACL,KAAK,0BAA0B,EAC/B,KAAK,kBAAkB,GACxB,MAAM,SAAS,CAAC;AACjB,OAAO,EACL,KAAK,0BAA0B,EAC/B,KAAK,0BAA0B,EAC/B,wBAAwB,GACzB,MAAM,4BAA4B,CAAC;AACpC,eAAO,MAAM,eAAe,EAAE,MAAwC,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EACL,KAAK,0BAA0B,EAC/B,KAAK,kBAAkB,EACvB,KAAK,MAAM,EACX,KAAK,YAAY,GAClB,MAAM,SAAS,CAAC;AACjB,OAAO,EACL,KAAK,0BAA0B,EAC/B,KAAK,0BAA0B,EAC/B,wBAAwB,GACzB,MAAM,4BAA4B,CAAC;AACpC,OAAO,EACL,KAAK,uBAAuB,EAC5B,KAAK,uBAAuB,EAC5B,qBAAqB,GACtB,MAAM,yBAAyB,CAAC;AACjC,eAAO,MAAM,eAAe,EAAE,MAAwC,CAAC"}
package/dist/index.js CHANGED
@@ -1,235 +1,242 @@
1
- import { definePlan as b, RichTextPlan as L, safeCast as D, getKnownTypesAndNodes as y, provideOutput as j } from "@etrepum/lexical-builder";
2
- import { ELEMENT_TRANSFORMERS as z, TEXT_FORMAT_TRANSFORMERS as K, TEXT_MATCH_TRANSFORMERS as W } from "@lexical/markdown";
3
- import { $createCodeNode as X } from "@lexical/code";
4
- import { $isListNode as x, $isListItemNode as G } from "@lexical/list";
5
- import { $isQuoteNode as U } from "@lexical/rich-text";
6
- import { $findMatchingParent as H, IS_SAFARI as N, IS_IOS as E, IS_APPLE_WEBKIT as $ } from "@lexical/utils";
7
- import { $getSelection as S, $setSelection as Q, $isParagraphNode as P, $isTextNode as p, $createParagraphNode as _, $isElementNode as g, $createTextNode as F, $createLineBreakNode as V, $isRootOrShadowRoot as Y, $getRoot as q, $isDecoratorNode as k, $isLineBreakNode as J } from "lexical";
8
- const R = /[!-/:-@[-`{-~\s]/, Z = /^\s{0,3}$/;
9
- function T(e) {
10
- if (!P(e))
1
+ import { definePlan as G, RichTextPlan as rt, safeCast as U, getKnownTypesAndNodes as st, provideOutput as q } from "@etrepum/lexical-builder";
2
+ import { ELEMENT_TRANSFORMERS as it, TEXT_FORMAT_TRANSFORMERS as ct, TEXT_MATCH_TRANSFORMERS as lt } from "@lexical/markdown";
3
+ import { $createCodeNode as at, $isCodeNode as ft } from "@lexical/code";
4
+ import { $isListNode as B, $isListItemNode as ut } from "@lexical/list";
5
+ import { $isQuoteNode as gt } from "@lexical/rich-text";
6
+ import { $findMatchingParent as dt, IS_SAFARI as O, IS_IOS as A, IS_APPLE_WEBKIT as L } from "@lexical/utils";
7
+ import { $getSelection as N, $setSelection as H, $isParagraphNode as Q, $isTextNode as x, $createParagraphNode as V, $isElementNode as E, $createTextNode as Y, $createLineBreakNode as pt, $isRootOrShadowRoot as J, $getRoot as mt, $isDecoratorNode as Z, $isLineBreakNode as tt, $isRangeSelection as v, $createRangeSelection as ht } from "lexical";
8
+ function y(t, o) {
9
+ const r = {};
10
+ for (const e of t) {
11
+ const i = o(e);
12
+ r[i] ? r[i].push(e) : r[i] = [e];
13
+ }
14
+ return r;
15
+ }
16
+ const $ = /[!-/:-@[-`{-~\s]/, xt = /^\s{0,3}$/;
17
+ function k(t) {
18
+ if (!Q(t))
11
19
  return !1;
12
- const t = e.getFirstChild();
13
- return t == null || e.getChildrenSize() === 1 && p(t) && Z.test(t.getTextContent());
20
+ const o = t.getFirstChild();
21
+ return o == null || t.getChildrenSize() === 1 && x(o) && xt.test(o.getTextContent());
14
22
  }
15
- function ee(e) {
16
- return function(...n) {
17
- const i = S(), s = i ? i.clone() : null, r = e(...n), c = S(), o = s && s.is(i) && !Object.isFrozen(i) ? i : s;
18
- return c !== o && Q(o), r;
23
+ function Tt(t) {
24
+ return function(...r) {
25
+ const e = N(), i = e ? e.clone() : null, n = t(...r), c = N(), s = i && i.is(e) && !Object.isFrozen(e) ? e : i;
26
+ return c !== s && H(s), n;
19
27
  };
20
28
  }
21
- function u(e, t, ...n) {
22
- if (!e)
29
+ function T(t, o, ...r) {
30
+ if (!t)
23
31
  throw new Error(
24
- n.reduce((i, s) => i.replace("%s", String(s)), t || "")
32
+ r.reduce((e, i) => e.replace("%s", String(i)), o || "")
25
33
  );
26
34
  }
27
- const w = /^[ \t]*```(\w{1,10})?\s?$/;
28
- function te(e, t) {
29
- const n = ie(
30
- e.textFormat || []
35
+ const D = /^[ \t]*```(\w{1,10})?\s?$/;
36
+ function St(t, o) {
37
+ const r = $t(
38
+ t.textFormat || []
31
39
  );
32
- return function(s, { shouldPreserveNewlines: r } = t) {
33
- const c = s.split(`
34
- `), o = c.length, l = _();
35
- for (let f = 0; f < o; f++) {
36
- const A = c[f], [B, I] = oe(c, f, l);
37
- if (B != null) {
38
- f = I;
40
+ return function(i, { shouldPreserveNewlines: n } = o) {
41
+ const c = i.split(`
42
+ `), s = c.length, l = V();
43
+ for (let f = 0; f < s; f++) {
44
+ const m = c[f], [g, u] = Et(c, f, l);
45
+ if (g != null) {
46
+ f = u;
39
47
  continue;
40
48
  }
41
- ne(
42
- A,
49
+ Nt(
50
+ m,
43
51
  l,
44
- e.element,
45
- n,
46
- e.textMatch
52
+ t.element,
53
+ r,
54
+ t.textMatch
47
55
  );
48
56
  }
49
57
  const a = [];
50
58
  for (let f = l.getFirstChild(); f; f = f.getNextSibling())
51
- u(
52
- g(f),
59
+ T(
60
+ E(f),
53
61
  "markdownImport: Expected child of type %s to be an ElementNode",
54
62
  f.getType()
55
- ), (r || !T(f)) && a.push(f);
63
+ ), (n || !k(f)) && a.push(f);
56
64
  return l.clear(), a;
57
65
  };
58
66
  }
59
- function ne(e, t, n, i, s) {
60
- const r = e.trim(), c = F(r), o = _();
61
- o.append(c), t.append(o);
62
- for (const { regExp: l, replace: a } of n) {
63
- const f = e.match(l);
67
+ function Nt(t, o, r, e, i) {
68
+ const n = t.trim(), c = Y(n), s = V();
69
+ s.append(c), o.append(s);
70
+ for (const { regExp: l, replace: a } of r) {
71
+ const f = t.match(l);
64
72
  if (f) {
65
- c.setTextContent(e.slice(f[0].length)), a(o, [c], f, !0);
73
+ c.setTextContent(t.slice(f[0].length)), a(s, [c], f, !0);
66
74
  break;
67
75
  }
68
76
  }
69
- if (m(
77
+ if (C(
70
78
  c,
71
- i,
72
- s
73
- ), o.isAttached() && r.length > 0) {
74
- const l = o.getPreviousSibling();
75
- if (P(l) || U(l) || x(l)) {
79
+ e,
80
+ i
81
+ ), s.isAttached() && n.length > 0) {
82
+ const l = s.getPreviousSibling();
83
+ if (Q(l) || gt(l) || B(l)) {
76
84
  let a = l;
77
- if (x(l)) {
85
+ if (B(l)) {
78
86
  const f = l.getLastDescendant();
79
- f == null ? a = null : a = H(f, G);
87
+ f == null ? a = null : a = dt(f, ut);
80
88
  }
81
89
  a != null && a.getTextContentSize() > 0 && (a.splice(a.getChildrenSize(), 0, [
82
- V(),
83
- ...o.getChildren()
84
- ]), o.remove());
90
+ pt(),
91
+ ...s.getChildren()
92
+ ]), s.remove());
85
93
  }
86
94
  }
87
95
  }
88
- function oe(e, t, n) {
89
- const i = e[t].match(w);
90
- if (i) {
91
- let s = t;
92
- const r = e.length;
93
- for (; ++s < r; )
94
- if (e[s].match(w)) {
95
- const o = X(i[1]), l = F(
96
- e.slice(t + 1, s).join(`
96
+ function Et(t, o, r) {
97
+ const e = t[o].match(D);
98
+ if (e) {
99
+ let i = o;
100
+ const n = t.length;
101
+ for (; ++i < n; )
102
+ if (t[i].match(D)) {
103
+ const s = at(e[1]), l = Y(
104
+ t.slice(o + 1, i).join(`
97
105
  `)
98
106
  );
99
- return o.append(l), n.append(o), [o, s];
107
+ return s.append(l), r.append(s), [s, i];
100
108
  }
101
109
  }
102
- return [null, t];
110
+ return [null, o];
103
111
  }
104
- function m(e, t, n) {
105
- const i = e.getTextContent(), s = re(i, t);
106
- if (!s) {
107
- O(e, n);
112
+ function C(t, o, r) {
113
+ const e = t.getTextContent(), i = Ct(e, o);
114
+ if (!i) {
115
+ et(t, r);
108
116
  return;
109
117
  }
110
- u(
111
- s[1] !== void 0 && s[2] !== void 0,
118
+ T(
119
+ i[1] !== void 0 && i[2] !== void 0,
112
120
  "importTextMatchTransformers: expecting match with two groups"
113
121
  );
114
- let r, c, o;
115
- if (s[0] === i)
116
- r = e;
122
+ let n, c, s;
123
+ if (i[0] === e)
124
+ n = t;
117
125
  else {
118
- const a = s.index || 0, f = a + s[0].length;
119
- a === 0 ? [r, c] = e.splitText(f) : [o, r, c] = e.splitText(
126
+ const a = i.index || 0, f = a + i[0].length;
127
+ a === 0 ? [n, c] = t.splitText(f) : [s, n, c] = t.splitText(
120
128
  a,
121
129
  f
122
130
  );
123
131
  }
124
- u(
125
- r !== void 0,
132
+ T(
133
+ n !== void 0,
126
134
  "importTextMatchTransformers: currentNode must be defined"
127
- ), r.setTextContent(s[2]);
128
- const l = t.transformersByTag[s[1]];
135
+ ), n.setTextContent(i[2]);
136
+ const l = o.transformersByTag[i[1]];
129
137
  if (l)
130
138
  for (const a of l.format)
131
- r.hasFormat(a) || r.toggleFormat(a);
132
- r.hasFormat("code") || m(
133
- r,
134
- t,
135
- n
136
- ), o && m(
139
+ n.hasFormat(a) || n.toggleFormat(a);
140
+ n.hasFormat("code") || C(
141
+ n,
142
+ o,
143
+ r
144
+ ), s && C(
145
+ s,
137
146
  o,
138
- t,
139
- n
140
- ), c && m(
147
+ r
148
+ ), c && C(
141
149
  c,
142
- t,
143
- n
150
+ o,
151
+ r
144
152
  );
145
153
  }
146
- function O(e, t) {
147
- let n = e;
148
- e:
149
- for (; n; ) {
150
- for (const i of t) {
151
- const s = n.getTextContent().match(i.importRegExp);
152
- if (!s)
153
- continue;
154
- const r = s.index || 0, c = r + s[0].length;
155
- let o, l;
156
- if (r === 0) {
157
- const a = n.splitText(c);
158
- u(
159
- a[0] !== void 0 && a[1] !== void 0,
160
- "importTextMatchTransformers: splitText expected two nodes"
161
- ), [o, n] = a;
162
- } else {
163
- const a = n.splitText(r, c);
164
- u(
165
- a[1] !== void 0 && a[2] !== void 0,
166
- "importTextMatchTransformers: splitText expected three nodes"
167
- ), [, o, l] = a, l && O(l, t);
168
- }
169
- i.replace(o, s);
170
- continue e;
154
+ function et(t, o) {
155
+ let r = t;
156
+ t: for (; r; ) {
157
+ for (const e of o) {
158
+ const i = r.getTextContent().match(e.importRegExp);
159
+ if (!i)
160
+ continue;
161
+ const n = i.index || 0, c = n + i[0].length;
162
+ let s, l;
163
+ if (n === 0) {
164
+ const a = r.splitText(c);
165
+ T(
166
+ a[0] !== void 0 && a[1] !== void 0,
167
+ "importTextMatchTransformers: splitText expected two nodes"
168
+ ), [s, r] = a;
169
+ } else {
170
+ const a = r.splitText(n, c);
171
+ T(
172
+ a[1] !== void 0 && a[2] !== void 0,
173
+ "importTextMatchTransformers: splitText expected three nodes"
174
+ ), [, s, l] = a, l && et(l, o);
171
175
  }
172
- break;
176
+ e.replace(s, i);
177
+ continue t;
173
178
  }
179
+ break;
180
+ }
174
181
  }
175
- function re(e, t) {
176
- const n = e.match(t.openTagsRegExp);
177
- if (n == null)
182
+ function Ct(t, o) {
183
+ const r = t.match(o.openTagsRegExp);
184
+ if (r == null)
178
185
  return null;
179
- for (const i of n) {
180
- const s = i.replace(/^\s/, ""), r = t.fullMatchRegExpByTag[s];
181
- if (r == null)
186
+ for (const e of r) {
187
+ const i = e.replace(/^\s/, ""), n = o.fullMatchRegExpByTag[i];
188
+ if (n == null)
182
189
  continue;
183
- const c = e.match(r), o = t.transformersByTag[s];
184
- if (c != null && o != null) {
185
- if (o.intraword !== !1)
190
+ const c = t.match(n), s = o.transformersByTag[i];
191
+ if (c != null && s != null) {
192
+ if (s.intraword !== !1)
186
193
  return c;
187
- const { index: l = 0 } = c, a = e[l - 1], f = e[l + c[0].length];
188
- if ((!a || R.test(a)) && (!f || R.test(f)))
194
+ const { index: l = 0 } = c, a = t[l - 1], f = t[l + c[0].length];
195
+ if ((!a || $.test(a)) && (!f || $.test(f)))
189
196
  return c;
190
197
  }
191
198
  }
192
199
  return null;
193
200
  }
194
- function ie(e) {
195
- const t = {}, n = {}, i = [], s = "(?<![\\\\])";
196
- for (const r of e) {
197
- const { tag: c } = r;
198
- t[c] = r;
199
- const o = c.replace(/(\*|\^|\+)/g, "\\$1");
200
- i.push(o), N || E || $ ? n[c] = new RegExp(
201
- `(${o})(?![${o}\\s])(.*?[^${o}\\s])${o}(?!${o})`
202
- ) : n[c] = new RegExp(
203
- `(?<![\\\\${o}])(${o})((\\\\${o})?.*?[^${o}\\s](\\\\${o})?)((?<!\\\\)|(?<=\\\\\\\\))(${o})(?![\\\\${o}])`
201
+ function $t(t) {
202
+ const o = {}, r = {}, e = [], i = "(?<![\\\\])";
203
+ for (const n of t) {
204
+ const { tag: c } = n;
205
+ o[c] = n;
206
+ const s = c.replace(/(\*|\^|\+)/g, "\\$1");
207
+ e.push(s), O || A || L ? r[c] = new RegExp(
208
+ `(${s})(?![${s}\\s])(.*?[^${s}\\s])${s}(?!${s})`
209
+ ) : r[c] = new RegExp(
210
+ `(?<![\\\\${s}])(${s})((\\\\${s})?.*?[^${s}\\s](\\\\${s})?)((?<!\\\\)|(?<=\\\\\\\\))(${s})(?![\\\\${s}])`
204
211
  );
205
212
  }
206
213
  return {
207
214
  // Reg exp to find open tag + content + close tag
208
- fullMatchRegExpByTag: n,
215
+ fullMatchRegExpByTag: r,
209
216
  // Reg exp to find opening tags
210
217
  openTagsRegExp: new RegExp(
211
- (N || E || $ ? "" : `${s}`) + "(" + i.join("|") + ")",
218
+ (O || A || L ? "" : `${i}`) + "(" + e.join("|") + ")",
212
219
  "g"
213
220
  ),
214
- transformersByTag: t
221
+ transformersByTag: o
215
222
  };
216
223
  }
217
- function se(e, { shouldPreserveNewlines: t }) {
218
- const n = !t, i = e.textFormat.filter(
219
- (s) => s.format.length === 1
224
+ function wt(t, { shouldPreserveNewlines: o }) {
225
+ const r = !o, e = t.textFormat.filter(
226
+ (i) => i.format.length === 1
220
227
  );
221
- return function(r = q()) {
222
- const c = [], o = Y(r) ? r.getChildren() : [r];
223
- for (let l = 0; l < o.length; l++) {
224
- const a = o[l], f = ce(
228
+ return function(n = mt()) {
229
+ const c = [], s = J(n) ? n.getChildren() : [n];
230
+ for (let l = 0; l < s.length; l++) {
231
+ const a = s[l], f = Rt(
225
232
  a,
226
- e.element,
227
- i,
228
- e.textMatch
233
+ t.element,
234
+ e,
235
+ t.textMatch
229
236
  );
230
237
  f != null && c.push(
231
238
  // separate consecutive group of texts with a line break: eg. ["hello", "world"] -> ["hello", "\nworld"]
232
- n && l > 0 && !T(a) && !T(o[l - 1]) ? `
239
+ r && l > 0 && !k(a) && !k(s[l - 1]) ? `
233
240
  `.concat(f) : f
234
241
  );
235
242
  }
@@ -237,129 +244,305 @@ function se(e, { shouldPreserveNewlines: t }) {
237
244
  `);
238
245
  };
239
246
  }
240
- function ce(e, t, n, i) {
241
- for (const s of t) {
242
- const r = s.export(
243
- e,
244
- (c) => d(c, n, i)
247
+ function Rt(t, o, r, e) {
248
+ for (const i of o) {
249
+ const n = i.export(
250
+ t,
251
+ (c) => w(c, r, e)
245
252
  );
246
- if (r != null)
247
- return r;
253
+ if (n != null)
254
+ return n;
248
255
  }
249
- return g(e) ? d(e, n, i) : k(e) ? e.getTextContent() : null;
256
+ return E(t) ? w(t, r, e) : Z(t) ? t.getTextContent() : null;
250
257
  }
251
- function d(e, t, n) {
252
- const i = [], s = e.getChildren();
253
- e:
254
- for (const r of s) {
255
- for (const c of n) {
256
- const o = c.export(
257
- r,
258
- (l) => d(
259
- l,
260
- t,
261
- n
262
- ),
263
- (l, a) => C(l, a, t)
264
- );
265
- if (o != null) {
266
- i.push(o);
267
- continue e;
268
- }
258
+ function w(t, o, r) {
259
+ const e = [], i = t.getChildren();
260
+ t: for (const n of i) {
261
+ for (const c of r) {
262
+ const s = c.export(
263
+ n,
264
+ (l) => w(
265
+ l,
266
+ o,
267
+ r
268
+ ),
269
+ (l, a) => K(l, a, o)
270
+ );
271
+ if (s != null) {
272
+ e.push(s);
273
+ continue t;
269
274
  }
270
- J(r) ? i.push(`
271
- `) : p(r) ? i.push(
272
- C(r, r.getTextContent(), t)
273
- ) : g(r) ? i.push(
274
- d(r, t, n)
275
- ) : k(r) && i.push(r.getTextContent());
276
275
  }
277
- return i.join("");
276
+ tt(n) ? e.push(`
277
+ `) : x(n) ? e.push(
278
+ K(n, n.getTextContent(), o)
279
+ ) : E(n) ? e.push(
280
+ w(n, o, r)
281
+ ) : Z(n) && e.push(n.getTextContent());
282
+ }
283
+ return e.join("");
278
284
  }
279
- function C(e, t, n) {
280
- const i = t.trim();
281
- let s = i;
282
- const r = /* @__PURE__ */ new Set();
283
- for (const c of n) {
284
- const o = c.format[0], l = c.tag;
285
- if (u(
286
- o !== void 0,
285
+ function K(t, o, r) {
286
+ const e = o.trim();
287
+ let i = e;
288
+ const n = /* @__PURE__ */ new Set();
289
+ for (const c of r) {
290
+ const s = c.format[0], l = c.tag;
291
+ if (T(
292
+ s !== void 0,
287
293
  "TextFormatTransformer for tag %s has empty format array",
288
294
  l
289
- ), h(e, o) && !r.has(o)) {
290
- r.add(o);
291
- const a = v(e, !0);
292
- h(a, o) || (s = l + s);
293
- const f = v(e, !1);
294
- h(f, o) || (s += l);
295
+ ), M(t, s) && !n.has(s)) {
296
+ n.add(s);
297
+ const a = j(t, !0);
298
+ M(a, s) || (i = l + i);
299
+ const f = j(t, !1);
300
+ M(f, s) || (i += l);
295
301
  }
296
302
  }
297
- return t.replace(i, () => s);
303
+ return o.replace(e, () => i);
298
304
  }
299
- function v(e, t) {
300
- let n = t ? e.getPreviousSibling() : e.getNextSibling();
301
- if (!n) {
302
- const i = e.getParentOrThrow();
303
- i.isInline() && (n = t ? i.getPreviousSibling() : i.getNextSibling());
305
+ function j(t, o) {
306
+ let r = o ? t.getPreviousSibling() : t.getNextSibling();
307
+ if (!r) {
308
+ const e = t.getParentOrThrow();
309
+ e.isInline() && (r = o ? e.getPreviousSibling() : e.getNextSibling());
304
310
  }
305
- for (; n; ) {
306
- if (g(n)) {
307
- if (!n.isInline())
311
+ for (; r; ) {
312
+ if (E(r)) {
313
+ if (!r.isInline())
308
314
  break;
309
- const i = t ? n.getLastDescendant() : n.getFirstDescendant();
310
- if (p(i))
311
- return i;
312
- n = t ? n.getPreviousSibling() : n.getNextSibling();
315
+ const e = o ? r.getLastDescendant() : r.getFirstDescendant();
316
+ if (x(e))
317
+ return e;
318
+ r = o ? r.getPreviousSibling() : r.getNextSibling();
313
319
  }
314
- if (p(n))
315
- return n;
316
- if (!g(n))
320
+ if (x(r))
321
+ return r;
322
+ if (!E(r))
317
323
  return null;
318
324
  }
319
325
  return null;
320
326
  }
321
- function h(e, t) {
322
- return p(e) && e.hasFormat(t);
327
+ function M(t, o) {
328
+ return x(t) && t.hasFormat(o);
323
329
  }
324
- function M({ nodes: e }, t) {
325
- const n = e.has.bind(e);
326
- return t.filter((i) => i.dependencies.every(n));
330
+ function z({ nodes: t }, o) {
331
+ const r = t.has.bind(t);
332
+ return o.filter((e) => e.dependencies.every(r));
327
333
  }
328
- const de = b({
334
+ const W = G({
329
335
  name: "@etrepum/lexical-builder-markdown/MarkdownTransformers",
330
- dependencies: [L],
331
- config: D({
332
- elementTransformers: z,
333
- textFormatTransformers: K,
334
- textMatchTransformers: W,
336
+ dependencies: [rt],
337
+ config: U({
338
+ elementTransformers: it,
339
+ textFormatTransformers: ct,
340
+ textMatchTransformers: lt,
335
341
  shouldPreserveNewlines: !1
336
342
  }),
337
343
  // For now we replace the transformer arrays with the default
338
344
  // shallowMergeConfig. I think ideally these should be additive
339
- init(e, t, n) {
340
- const i = y(e), s = {
341
- shouldPreserveNewlines: t.shouldPreserveNewlines
342
- }, r = {
345
+ init(t, o, r) {
346
+ const e = st(t), i = {
347
+ shouldPreserveNewlines: o.shouldPreserveNewlines
348
+ }, n = {
343
349
  // Only register transforms for nodes that are configured
344
- element: M(i, t.elementTransformers),
345
- textMatch: M(i, t.textMatchTransformers),
346
- textFormat: t.textFormatTransformers
347
- }, c = ee(
348
- te(r, s)
349
- ), o = se(
350
- r,
351
- s
350
+ element: z(e, o.elementTransformers),
351
+ textMatch: z(e, o.textMatchTransformers),
352
+ textFormat: o.textFormatTransformers
353
+ }, c = Tt(
354
+ St(n, i)
355
+ ), s = wt(
356
+ n,
357
+ i
352
358
  );
353
359
  return {
354
- transformerOptions: s,
355
- transformersByType: r,
356
- $markdownExport: o,
360
+ transformerOptions: i,
361
+ transformersByType: n,
362
+ $markdownExport: s,
357
363
  $markdownImport: c
358
364
  };
359
365
  },
360
- register: (e, t, n) => j(n.getInitResult())
361
- }), he = "0.0.26-nightly.20240625.0";
366
+ register: (t, o, r) => q(r.getInitResult())
367
+ });
368
+ function Mt(t, o, r, e) {
369
+ const i = t.getParent();
370
+ if (!J(i) || t.getFirstChild() !== o)
371
+ return !1;
372
+ const n = o.getTextContent();
373
+ if (n[r - 1] !== " ")
374
+ return !1;
375
+ for (const { regExp: c, replace: s } of e) {
376
+ const l = n.match(c);
377
+ if (l && l[0].length === r) {
378
+ const a = o.getNextSiblings(), [f, m] = o.splitText(r);
379
+ f.remove();
380
+ const g = m ? [m, ...a] : a;
381
+ return s(t, g, l, !1), !0;
382
+ }
383
+ }
384
+ return !1;
385
+ }
386
+ function vt(t, o, r) {
387
+ let e = t.getTextContent();
388
+ const i = e[o - 1], n = r[i];
389
+ if (n == null)
390
+ return !1;
391
+ o < e.length && (e = e.slice(0, o));
392
+ for (const c of n) {
393
+ const s = e.match(c.regExp);
394
+ if (s === null)
395
+ continue;
396
+ const l = s.index || 0, a = l + s[0].length;
397
+ let f;
398
+ return l === 0 ? f = t.splitText(a)[0] : f = t.splitText(l, a)[1], T(
399
+ x(f),
400
+ "MarkdownShortcuts: splitText(%s, %s)[%s] did not return the expected TextNode",
401
+ String(l),
402
+ String(a),
403
+ String(l === 0 ? 0 : 1)
404
+ ), f.selectNext(0, 0), c.replace(f, s), !0;
405
+ }
406
+ return !1;
407
+ }
408
+ function kt(t, o, r) {
409
+ const e = t.getTextContent(), i = o - 1, n = e[i], c = r[n];
410
+ if (!c)
411
+ return !1;
412
+ for (const s of c) {
413
+ const { tag: l } = s, a = l.length, f = i - a + 1;
414
+ if (a > 1 && !nt(e, f, l, 0, a) || e[f - 1] === " ")
415
+ continue;
416
+ const m = e[i + 1];
417
+ if (s.intraword === !1 && m && !$.test(m))
418
+ continue;
419
+ const g = t;
420
+ let u = g, d = X(
421
+ e,
422
+ f,
423
+ l
424
+ ), S = u;
425
+ for (; d < 0 && (S = S.getPreviousSibling()) && !tt(S); )
426
+ if (x(S)) {
427
+ const h = S.getTextContent();
428
+ u = S, d = X(
429
+ h,
430
+ h.length,
431
+ l
432
+ );
433
+ }
434
+ if (d < 0 || u === g && d + a === f)
435
+ continue;
436
+ const R = u.getTextContent();
437
+ if (d > 0 && R[d - 1] === n)
438
+ continue;
439
+ const F = R[d - 1];
440
+ if (s.intraword === !1 && F && !$.test(F))
441
+ continue;
442
+ const P = g.getTextContent(), _ = P.slice(0, f) + P.slice(i + 1);
443
+ g.setTextContent(_);
444
+ const b = u === g ? _ : R;
445
+ u.setTextContent(
446
+ b.slice(0, d) + b.slice(d + a)
447
+ );
448
+ const I = N(), p = ht();
449
+ H(p);
450
+ const ot = i - a * (u === g ? 2 : 1) + 1;
451
+ p.anchor.set(u.__key, d, "text"), p.focus.set(g.__key, ot, "text");
452
+ for (const h of s.format)
453
+ p.hasFormat(h) || p.formatText(h);
454
+ p.anchor.set(
455
+ p.focus.key,
456
+ p.focus.offset,
457
+ p.focus.type
458
+ );
459
+ for (const h of s.format)
460
+ p.hasFormat(h) && p.toggleFormat(h);
461
+ return v(I) && (p.format = I.format), !0;
462
+ }
463
+ return !1;
464
+ }
465
+ function X(t, o, r) {
466
+ const e = r.length;
467
+ for (let i = o; i >= e; i--) {
468
+ const n = i - e;
469
+ if (nt(t, n, r, 0, e) && // Space after opening tag cancels transformation
470
+ t[n + e] !== " ")
471
+ return n;
472
+ }
473
+ return -1;
474
+ }
475
+ function nt(t, o, r, e, i) {
476
+ for (let n = 0; n < i; n++)
477
+ if (t[o + n] !== r[e + n])
478
+ return !1;
479
+ return !0;
480
+ }
481
+ function Ft(t, o) {
482
+ const r = y(
483
+ o.textFormat,
484
+ ({ tag: n }) => n[n.length - 1]
485
+ ), e = y(
486
+ o.textMatch,
487
+ ({ trigger: n }) => n
488
+ ), i = (n, c, s) => {
489
+ Mt(
490
+ n,
491
+ c,
492
+ s,
493
+ o.element
494
+ ) || vt(
495
+ c,
496
+ s,
497
+ e
498
+ ) || kt(
499
+ c,
500
+ s,
501
+ r
502
+ );
503
+ };
504
+ return t.registerUpdateListener(
505
+ ({ tags: n, dirtyLeaves: c, editorState: s, prevEditorState: l }) => {
506
+ if (n.has("collaboration") || n.has("historic") || t.isComposing())
507
+ return;
508
+ const a = s.read(N), f = l.read(N);
509
+ if (!v(f) || !v(a) || !a.isCollapsed())
510
+ return;
511
+ const m = a.anchor.key, g = a.anchor.offset, u = s._nodeMap.get(m);
512
+ !x(u) || !c.has(m) || g !== 1 && g > f.anchor.offset + 1 || t.update(() => {
513
+ if (u.hasFormat("code"))
514
+ return;
515
+ const d = u.getParent();
516
+ d === null || ft(d) || i(d, u, a.anchor.offset);
517
+ });
518
+ }
519
+ );
520
+ }
521
+ const Lt = G({
522
+ name: "@etrepum/lexical-builder-markdown/MarkdownShortcuts",
523
+ dependencies: [W],
524
+ config: U({
525
+ enabled: !0
526
+ }),
527
+ register(t, o, r) {
528
+ let e = null;
529
+ function i() {
530
+ return e !== null;
531
+ }
532
+ function n(c) {
533
+ c && e === null ? e = Ft(
534
+ t,
535
+ r.getDependency(W).output.transformersByType
536
+ ) : !c && e !== null && (e(), e = null);
537
+ }
538
+ return n(o.enabled), q(
539
+ { getEnabled: i, setEnabled: n },
540
+ () => n(!1)
541
+ );
542
+ }
543
+ }), yt = "0.0.26";
362
544
  export {
363
- de as MarkdownTransformersPlan,
364
- he as PACKAGE_VERSION
545
+ Lt as MarkdownShortcutsPlan,
546
+ W as MarkdownTransformersPlan,
547
+ yt as PACKAGE_VERSION
365
548
  };
package/dist/types.d.ts CHANGED
@@ -3,12 +3,12 @@ import { Transformer } from '@lexical/markdown';
3
3
  export interface MarkdownTransformerOptions {
4
4
  shouldPreserveNewlines: boolean;
5
5
  }
6
- type Filter<T, U> = T extends U ? T : never;
7
- type KebabToCamel<S extends string> = S extends `${infer T}-${infer U}` ? `${T}${Capitalize<KebabToCamel<U>>}` : S;
6
+ export type Filter<T, U> = T extends U ? T : never;
7
+ export type KebabToCamel<S extends string> = S extends `${infer T}-${infer U}` ? `${T}${Capitalize<KebabToCamel<U>>}` : S;
8
+ /** Transformers by type (element, textFormat, textMatch) */
8
9
  export type TransformersByType = {
9
10
  readonly [K in Transformer["type"] as KebabToCamel<K>]: Filter<Transformer, {
10
11
  type: K;
11
12
  }>[];
12
13
  };
13
- export {};
14
14
  //# sourceMappingURL=types.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAErD,MAAM,WAAW,0BAA0B;IACzC,sBAAsB,EAAE,OAAO,CAAC;CACjC;AAED,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;AAC5C,KAAK,YAAY,CAAC,CAAC,SAAS,MAAM,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,IAAI,MAAM,CAAC,EAAE,GACnE,GAAG,CAAC,GAAG,UAAU,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,EAAE,GACpC,CAAC,CAAC;AAEN,MAAM,MAAM,kBAAkB,GAAG;IAC/B,QAAQ,EAAE,CAAC,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,YAAY,CAAC,CAAC,CAAC,GAAG,MAAM,CAC5D,WAAW,EACX;QAAE,IAAI,EAAE,CAAC,CAAA;KAAE,CACZ,EAAE;CACJ,CAAC"}
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAErD,MAAM,WAAW,0BAA0B;IACzC,sBAAsB,EAAE,OAAO,CAAC;CACjC;AAED,MAAM,MAAM,MAAM,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;AAEnD,MAAM,MAAM,YAAY,CAAC,CAAC,SAAS,MAAM,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,IAAI,MAAM,CAAC,EAAE,GAC1E,GAAG,CAAC,GAAG,UAAU,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,EAAE,GACpC,CAAC,CAAC;AAEN,4DAA4D;AAC5D,MAAM,MAAM,kBAAkB,GAAG;IAC/B,QAAQ,EAAE,CAAC,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,YAAY,CAAC,CAAC,CAAC,GAAG,MAAM,CAC5D,WAAW,EACX;QAAE,IAAI,EAAE,CAAC,CAAA;KAAE,CACZ,EAAE;CACJ,CAAC"}
package/package.json CHANGED
@@ -12,10 +12,11 @@
12
12
  "scripts": {
13
13
  "build": "tsc --noEmit && vite build",
14
14
  "dev": "vite",
15
+ "lint": "eslint",
15
16
  "test": "vitest run",
16
17
  "test:watch": "vitest"
17
18
  },
18
- "version": "0.0.26-nightly.20240625.0",
19
+ "version": "0.0.26",
19
20
  "license": "MIT",
20
21
  "repository": {
21
22
  "type": "git",
@@ -30,18 +31,20 @@
30
31
  "@etrepum/lexical-builder": "*"
31
32
  },
32
33
  "peerDependencies": {
33
- "lexical": ">=0.16.0-0",
34
34
  "@lexical/code": ">=0.16.0-0",
35
35
  "@lexical/list": ">=0.16.0-0",
36
36
  "@lexical/markdown": ">=0.16.0-0",
37
37
  "@lexical/rich-text": ">=0.16.0-0",
38
- "@lexical/utils": ">=0.16.0-0"
38
+ "@lexical/utils": ">=0.16.0-0",
39
+ "lexical": ">=0.16.0-0"
39
40
  },
40
41
  "sideEffects": false,
41
42
  "devDependencies": {
43
+ "@repo/eslint-config": "*",
42
44
  "@testing-library/dom": "^10.1.0",
43
45
  "@testing-library/jest-dom": "^6.4.5",
44
46
  "@testing-library/user-event": "^14.5.2",
47
+ "eslint": "^8.57.0",
45
48
  "jsdom": "^24.1.0",
46
49
  "tslib": "^2.6.2",
47
50
  "typescript": "^5.4.5",