@fuzdev/fuz_code 0.37.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. package/LICENSE +25 -0
  2. package/README.md +185 -0
  3. package/dist/Code.svelte +146 -0
  4. package/dist/Code.svelte.d.ts +79 -0
  5. package/dist/Code.svelte.d.ts.map +1 -0
  6. package/dist/CodeHighlight.svelte +205 -0
  7. package/dist/CodeHighlight.svelte.d.ts +101 -0
  8. package/dist/CodeHighlight.svelte.d.ts.map +1 -0
  9. package/dist/code_sample.d.ts +8 -0
  10. package/dist/code_sample.d.ts.map +1 -0
  11. package/dist/code_sample.js +2 -0
  12. package/dist/grammar_clike.d.ts +12 -0
  13. package/dist/grammar_clike.d.ts.map +1 -0
  14. package/dist/grammar_clike.js +43 -0
  15. package/dist/grammar_css.d.ts +11 -0
  16. package/dist/grammar_css.d.ts.map +1 -0
  17. package/dist/grammar_css.js +70 -0
  18. package/dist/grammar_js.d.ts +11 -0
  19. package/dist/grammar_js.d.ts.map +1 -0
  20. package/dist/grammar_js.js +180 -0
  21. package/dist/grammar_json.d.ts +11 -0
  22. package/dist/grammar_json.d.ts.map +1 -0
  23. package/dist/grammar_json.js +35 -0
  24. package/dist/grammar_markdown.d.ts +8 -0
  25. package/dist/grammar_markdown.d.ts.map +1 -0
  26. package/dist/grammar_markdown.js +228 -0
  27. package/dist/grammar_markup.d.ts +31 -0
  28. package/dist/grammar_markup.d.ts.map +1 -0
  29. package/dist/grammar_markup.js +192 -0
  30. package/dist/grammar_svelte.d.ts +12 -0
  31. package/dist/grammar_svelte.d.ts.map +1 -0
  32. package/dist/grammar_svelte.js +150 -0
  33. package/dist/grammar_ts.d.ts +11 -0
  34. package/dist/grammar_ts.d.ts.map +1 -0
  35. package/dist/grammar_ts.js +95 -0
  36. package/dist/highlight_manager.d.ts +25 -0
  37. package/dist/highlight_manager.d.ts.map +1 -0
  38. package/dist/highlight_manager.js +139 -0
  39. package/dist/highlight_priorities.d.ts +3 -0
  40. package/dist/highlight_priorities.d.ts.map +1 -0
  41. package/dist/highlight_priorities.gen.d.ts +4 -0
  42. package/dist/highlight_priorities.gen.d.ts.map +1 -0
  43. package/dist/highlight_priorities.gen.js +58 -0
  44. package/dist/highlight_priorities.js +55 -0
  45. package/dist/syntax_styler.d.ts +277 -0
  46. package/dist/syntax_styler.d.ts.map +1 -0
  47. package/dist/syntax_styler.js +426 -0
  48. package/dist/syntax_styler_global.d.ts +3 -0
  49. package/dist/syntax_styler_global.d.ts.map +1 -0
  50. package/dist/syntax_styler_global.js +18 -0
  51. package/dist/syntax_token.d.ts +34 -0
  52. package/dist/syntax_token.d.ts.map +1 -0
  53. package/dist/syntax_token.js +27 -0
  54. package/dist/theme.css +98 -0
  55. package/dist/theme_highlight.css +160 -0
  56. package/dist/theme_variables.css +20 -0
  57. package/dist/tokenize_syntax.d.ts +28 -0
  58. package/dist/tokenize_syntax.d.ts.map +1 -0
  59. package/dist/tokenize_syntax.js +194 -0
  60. package/package.json +117 -0
  61. package/src/lib/code_sample.ts +10 -0
  62. package/src/lib/grammar_clike.ts +48 -0
  63. package/src/lib/grammar_css.ts +84 -0
  64. package/src/lib/grammar_js.ts +215 -0
  65. package/src/lib/grammar_json.ts +38 -0
  66. package/src/lib/grammar_markdown.ts +289 -0
  67. package/src/lib/grammar_markup.ts +225 -0
  68. package/src/lib/grammar_svelte.ts +165 -0
  69. package/src/lib/grammar_ts.ts +114 -0
  70. package/src/lib/highlight_manager.ts +182 -0
  71. package/src/lib/highlight_priorities.gen.ts +71 -0
  72. package/src/lib/highlight_priorities.ts +110 -0
  73. package/src/lib/syntax_styler.ts +583 -0
  74. package/src/lib/syntax_styler_global.ts +20 -0
  75. package/src/lib/syntax_token.ts +49 -0
  76. package/src/lib/tokenize_syntax.ts +270 -0
@@ -0,0 +1,160 @@
1
+ .token_processing_instruction,
2
+ ::highlight(token_processing_instruction),
3
+ .token_doctype,
4
+ ::highlight(token_doctype),
5
+ .token_cdata,
6
+ ::highlight(token_cdata),
7
+ .token_punctuation,
8
+ ::highlight(token_punctuation) {
9
+ color: var(--text_color_5);
10
+ }
11
+
12
+ .token_tag,
13
+ ::highlight(token_tag),
14
+ .token_constant,
15
+ ::highlight(token_constant),
16
+ .token_symbol,
17
+ ::highlight(token_symbol),
18
+ .token_deleted,
19
+ ::highlight(token_deleted),
20
+ .token_keyword,
21
+ ::highlight(token_keyword),
22
+ .token_null,
23
+ ::highlight(token_null),
24
+ .token_boolean,
25
+ ::highlight(token_boolean),
26
+ .token_interpolation_punctuation,
27
+ ::highlight(token_interpolation_punctuation),
28
+ .token_heading,
29
+ ::highlight(token_heading),
30
+ .token_heading_punctuation,
31
+ ::highlight(token_heading_punctuation),
32
+ .token_tag_punctuation,
33
+ ::highlight(token_tag_punctuation) {
34
+ color: var(--color_a_5);
35
+ }
36
+
37
+ .token_comment,
38
+ ::highlight(token_comment),
39
+ .token_char,
40
+ ::highlight(token_char),
41
+ .token_inserted,
42
+ ::highlight(token_inserted),
43
+ .token_blockquote,
44
+ ::highlight(token_blockquote),
45
+ .token_blockquote_punctuation,
46
+ ::highlight(token_blockquote_punctuation) {
47
+ color: var(--color_b_5);
48
+ }
49
+
50
+ .token_builtin,
51
+ ::highlight(token_builtin),
52
+ .token_class_name,
53
+ ::highlight(token_class_name),
54
+ .token_number,
55
+ ::highlight(token_number) {
56
+ color: var(--color_j_5);
57
+ }
58
+
59
+ .token_attr_value,
60
+ ::highlight(token_attr_value),
61
+ .token_attr_quote,
62
+ ::highlight(token_attr_quote),
63
+ .token_string,
64
+ ::highlight(token_string),
65
+ .token_template_punctuation,
66
+ ::highlight(token_template_punctuation),
67
+ .token_inline_code,
68
+ ::highlight(token_inline_code),
69
+ .token_code_punctuation,
70
+ ::highlight(token_code_punctuation) {
71
+ color: var(--color_h_5);
72
+ }
73
+
74
+ /* attr_equals must be after attr_value */
75
+ .token_attr_equals,
76
+ ::highlight(token_attr_equals) {
77
+ color: var(--text_color_5);
78
+ }
79
+
80
+ .token_selector,
81
+ ::highlight(token_selector),
82
+ .token_function,
83
+ ::highlight(token_function),
84
+ .token_regex,
85
+ ::highlight(token_regex),
86
+ .token_important,
87
+ ::highlight(token_important),
88
+ .token_variable,
89
+ ::highlight(token_variable) {
90
+ color: var(--color_e_5);
91
+ }
92
+
93
+ .token_atrule,
94
+ ::highlight(token_atrule) {
95
+ color: var(--color_f_5);
96
+ }
97
+
98
+ .token_attr_name,
99
+ ::highlight(token_attr_name),
100
+ .token_property,
101
+ ::highlight(token_property),
102
+ .token_decorator,
103
+ ::highlight(token_decorator),
104
+ .token_decorator_name,
105
+ ::highlight(token_decorator_name),
106
+ .token_link_text_wrapper,
107
+ ::highlight(token_link_text_wrapper),
108
+ .token_link_text,
109
+ ::highlight(token_link_text),
110
+ .token_link_punctuation,
111
+ ::highlight(token_link_punctuation) {
112
+ color: var(--color_i_5);
113
+ }
114
+
115
+ .token_special_keyword,
116
+ ::highlight(token_special_keyword),
117
+ .token_namespace,
118
+ ::highlight(token_namespace),
119
+ .token_rule,
120
+ ::highlight(token_rule) {
121
+ color: var(--color_g_5);
122
+ }
123
+
124
+ .token_at_keyword,
125
+ ::highlight(token_at_keyword),
126
+ .token_url,
127
+ ::highlight(token_url) {
128
+ color: var(--color_d_5);
129
+ }
130
+
131
+ .token_url,
132
+ ::highlight(token_url) {
133
+ text-decoration: underline;
134
+ }
135
+
136
+ .token_strikethrough,
137
+ ::highlight(token_strikethrough) {
138
+ text-decoration: line-through;
139
+ }
140
+
141
+ /*
142
+
143
+ Note: CSS Highlights API doesn't support font-weight or font-style,
144
+ so bold/italic will only work with HTML mode, not range mode.
145
+
146
+ @see https://github.com/w3c/csswg-drafts/issues/8355
147
+
148
+ */
149
+
150
+ .token_important,
151
+ ::highlight(token_important),
152
+ .token_bold,
153
+ ::highlight(token_bold) {
154
+ font-weight: bold;
155
+ }
156
+
157
+ .token_italic,
158
+ ::highlight(token_italic) {
159
+ font-style: italic;
160
+ }
@@ -0,0 +1,20 @@
1
+ /*
2
+
3
+ CSS variables for syntax highlighting when not using Moss.
4
+ Import this alongside theme.css if you're not using Moss.
5
+
6
+ */
7
+
8
+ :root {
9
+ --text_color_5: light-dark(#8e7e71, #8e7e71);
10
+ --color_a_5: light-dark(#397fc6, #88b2dd);
11
+ --color_b_5: light-dark(#298e29, #66c266);
12
+ --color_c_5: light-dark(#d22d2d, #dd4040);
13
+ --color_d_5: light-dark(#6a40bf, #a68cd9);
14
+ --color_e_5: light-dark(#ad9625, #e2cb5a);
15
+ --color_f_5: light-dark(#6a3e1b, #b08b6d);
16
+ --color_g_5: light-dark(#e03e81, #ea7ba9);
17
+ --color_h_5: light-dark(#f24e0d, #f67c4c);
18
+ --color_i_5: light-dark(#19b3b3, #79ecec);
19
+ --color_j_5: light-dark(#2e9e82, #61d1b5);
20
+ }
@@ -0,0 +1,28 @@
1
+ import type { SyntaxGrammar } from './syntax_styler.js';
2
+ import { type SyntaxTokenStream } from './syntax_token.js';
3
+ /**
4
+ * Accepts a string of text as input and the language definitions to use,
5
+ * and returns an array with the tokenized code.
6
+ *
7
+ * When the language definition includes nested tokens, the function is called recursively on each of these tokens.
8
+ *
9
+ * This method could be useful in other contexts as well, as a very crude parser.
10
+ *
11
+ * @param text - a string with the code to be styled
12
+ * @param grammar - an object containing the tokens to use
13
+ *
14
+ * Usually a language definition like `syntax_styler.get_lang('markup')`.
15
+ *
16
+ * @returns an array of strings and tokens, a token stream
17
+ *
18
+ * @example
19
+ * var code = `var foo = 0;`;
20
+ * var tokens = tokenize_syntax(code, SyntaxStyler.langs.js);
21
+ * for (var token of tokens) {
22
+ * if (token instanceof SyntaxToken && token.type === 'number') {
23
+ * console.log(`Found numeric literal: ${token.content}`);
24
+ * }
25
+ * }
26
+ */
27
+ export declare const tokenize_syntax: (text: string, grammar: SyntaxGrammar) => SyntaxTokenStream;
28
+ //# sourceMappingURL=tokenize_syntax.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"tokenize_syntax.d.ts","sourceRoot":"../src/lib/","sources":["../src/lib/tokenize_syntax.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,oBAAoB,CAAC;AACtD,OAAO,EAAc,KAAK,iBAAiB,EAAC,MAAM,mBAAmB,CAAC;AAEtE;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,eAAO,MAAM,eAAe,GAAI,MAAM,MAAM,EAAE,SAAS,aAAa,KAAG,iBAQtE,CAAC"}
@@ -0,0 +1,194 @@
1
+ import { SyntaxToken } from './syntax_token.js';
2
+ /**
3
+ * Accepts a string of text as input and the language definitions to use,
4
+ * and returns an array with the tokenized code.
5
+ *
6
+ * When the language definition includes nested tokens, the function is called recursively on each of these tokens.
7
+ *
8
+ * This method could be useful in other contexts as well, as a very crude parser.
9
+ *
10
+ * @param text - a string with the code to be styled
11
+ * @param grammar - an object containing the tokens to use
12
+ *
13
+ * Usually a language definition like `syntax_styler.get_lang('markup')`.
14
+ *
15
+ * @returns an array of strings and tokens, a token stream
16
+ *
17
+ * @example
18
+ * var code = `var foo = 0;`;
19
+ * var tokens = tokenize_syntax(code, SyntaxStyler.langs.js);
20
+ * for (var token of tokens) {
21
+ * if (token instanceof SyntaxToken && token.type === 'number') {
22
+ * console.log(`Found numeric literal: ${token.content}`);
23
+ * }
24
+ * }
25
+ */
26
+ export const tokenize_syntax = (text, grammar) => {
27
+ // Grammar is already normalized (rest merged, patterns in arrays, etc.)
28
+ var token_list = new LinkedList();
29
+ add_after(token_list, token_list.head, text);
30
+ match_grammar(text, token_list, grammar, token_list.head, 0);
31
+ return to_array(token_list);
32
+ };
33
+ const match_grammar = (text, token_list, grammar, start_node, start_pos, rematch) => {
34
+ for (var token in grammar) {
35
+ // Grammar is normalized: patterns is always an array of normalized objects
36
+ var patterns = grammar[token];
37
+ if (!patterns) {
38
+ continue;
39
+ }
40
+ for (var j = 0; j < patterns.length; ++j) {
41
+ if (rematch?.cause === token + ',' + j) {
42
+ return;
43
+ }
44
+ var pattern_obj = patterns[j];
45
+ // All properties are guaranteed to be present after normalization
46
+ var inside = pattern_obj.inside;
47
+ var lookbehind = pattern_obj.lookbehind;
48
+ var greedy = pattern_obj.greedy;
49
+ var alias = pattern_obj.alias;
50
+ // Pattern already has global flag if greedy (added during normalization)
51
+ var pattern = pattern_obj.pattern;
52
+ for (
53
+ // iterate the token list and keep track of the current token/string position
54
+ var current_node = start_node.next, pos = start_pos; current_node !== token_list.tail; pos += current_node.value.length, current_node = current_node.next) {
55
+ if (rematch && pos >= rematch.reach) {
56
+ break;
57
+ }
58
+ var str = current_node.value;
59
+ if (token_list.length > text.length) {
60
+ // Something went terribly wrong, ABORT, ABORT!
61
+ return;
62
+ }
63
+ if (str instanceof SyntaxToken) {
64
+ continue;
65
+ }
66
+ var remove_count = 1;
67
+ var match;
68
+ if (greedy) {
69
+ match = match_pattern(pattern, pos, text, lookbehind);
70
+ if (!match || match.index >= text.length) {
71
+ break;
72
+ }
73
+ var from = match.index;
74
+ var to = match.index + match[0].length;
75
+ var p = pos;
76
+ // find the node that contains the match
77
+ p += current_node.value.length;
78
+ while (from >= p) {
79
+ current_node = current_node.next;
80
+ p += current_node.value.length;
81
+ }
82
+ // adjust pos (and p)
83
+ p -= current_node.value.length;
84
+ pos = p;
85
+ // the current node is a Token, then the match starts inside another Token, which is invalid
86
+ if (current_node.value instanceof SyntaxToken) {
87
+ continue;
88
+ }
89
+ // find the last node which is affected by this match
90
+ for (var k = current_node; k !== token_list.tail && (p < to || typeof k.value === 'string'); k = k.next) {
91
+ remove_count++;
92
+ p += k.value.length;
93
+ }
94
+ remove_count--;
95
+ // replace with the new match
96
+ str = text.substring(pos, p);
97
+ match.index -= pos;
98
+ }
99
+ else {
100
+ match = match_pattern(pattern, 0, str, lookbehind);
101
+ if (!match) {
102
+ continue;
103
+ }
104
+ }
105
+ var from = match.index;
106
+ var match_str = match[0];
107
+ var before = str.substring(0, from);
108
+ var after = str.substring(from + match_str.length);
109
+ var reach = pos + str.length;
110
+ if (rematch && reach > rematch.reach) {
111
+ rematch.reach = reach;
112
+ }
113
+ var remove_from = current_node.prev;
114
+ if (before) {
115
+ remove_from = add_after(token_list, remove_from, before);
116
+ pos += before.length;
117
+ }
118
+ remove_range(token_list, remove_from, remove_count);
119
+ var wrapped = new SyntaxToken(token, inside ? tokenize_syntax(match_str, inside) : match_str, alias, match_str);
120
+ current_node = add_after(token_list, remove_from, wrapped);
121
+ if (after) {
122
+ add_after(token_list, current_node, after);
123
+ }
124
+ if (remove_count > 1) {
125
+ // at least one Token object was removed, so we have to do some rematching
126
+ // this can only happen if the current pattern is greedy
127
+ var nested_rematch = {
128
+ cause: token + ',' + j,
129
+ reach,
130
+ };
131
+ match_grammar(text, token_list, grammar, current_node.prev, pos, nested_rematch);
132
+ // the reach might have been extended because of the rematching
133
+ if (rematch && nested_rematch.reach > rematch.reach) {
134
+ rematch.reach = nested_rematch.reach;
135
+ }
136
+ }
137
+ }
138
+ }
139
+ }
140
+ };
141
+ class LinkedList {
142
+ head;
143
+ tail;
144
+ length = 0;
145
+ constructor() {
146
+ this.head = { value: null, prev: null, next: null };
147
+ this.tail = { value: null, prev: this.head, next: null };
148
+ this.head.next = this.tail;
149
+ }
150
+ }
151
+ /**
152
+ * Adds a new node with the given value to the list.
153
+ */
154
+ const add_after = (list, node, value) => {
155
+ // assumes that node != list.tail && values.length >= 0
156
+ var next = node.next;
157
+ var new_node = { value, prev: node, next };
158
+ node.next = new_node;
159
+ next.prev = new_node;
160
+ list.length++;
161
+ return new_node;
162
+ };
163
+ /**
164
+ * Removes `count` nodes after the given node. The given node will not be removed.
165
+ */
166
+ const remove_range = (list, node, count) => {
167
+ var next = node.next;
168
+ for (var i = 0; i < count && next !== list.tail; i++) {
169
+ next = next.next;
170
+ }
171
+ node.next = next;
172
+ next.prev = node;
173
+ list.length -= i;
174
+ };
175
+ const to_array = (list) => {
176
+ var array = [];
177
+ var node = list.head.next;
178
+ while (node !== list.tail) {
179
+ array.push(node.value);
180
+ node = node.next;
181
+ }
182
+ return array;
183
+ };
184
+ const match_pattern = (pattern, pos, text, lookbehind) => {
185
+ pattern.lastIndex = pos;
186
+ var match = pattern.exec(text);
187
+ if (match && lookbehind && match[1]) {
188
+ // change the match to remove the text matched by the lookbehind group
189
+ var lookbehind_length = match[1].length;
190
+ match.index += lookbehind_length;
191
+ match[0] = match[0].substring(lookbehind_length);
192
+ }
193
+ return match;
194
+ };
package/package.json ADDED
@@ -0,0 +1,117 @@
1
+ {
2
+ "name": "@fuzdev/fuz_code",
3
+ "version": "0.37.0",
4
+ "description": "syntax styling utilities and components for TypeScript, Svelte, and Markdown",
5
+ "glyph": "🎨",
6
+ "logo": "logo.svg",
7
+ "logo_alt": "a friendly pink spider facing you",
8
+ "public": true,
9
+ "license": "MIT",
10
+ "homepage": "https://code.fuz.dev/",
11
+ "repository": "https://github.com/ryanatkn/fuz_code",
12
+ "author": {
13
+ "name": "Ryan Atkinson",
14
+ "email": "mail@ryanatkn.com",
15
+ "url": "https://www.ryanatkn.com/"
16
+ },
17
+ "bugs": "https://github.com/ryanatkn/fuz_code/issues",
18
+ "funding": "https://www.ryanatkn.com/funding",
19
+ "scripts": {
20
+ "start": "gro dev",
21
+ "dev": "gro dev",
22
+ "build": "gro build",
23
+ "check": "gro check",
24
+ "test": "gro test",
25
+ "preview": "vite preview",
26
+ "deploy": "gro deploy",
27
+ "benchmark": "gro run benchmark/run_benchmarks.ts",
28
+ "benchmark-compare": "gro run benchmark/compare/run_compare.ts",
29
+ "update-generated-fixtures": "gro src/test/fixtures/update"
30
+ },
31
+ "type": "module",
32
+ "engines": {
33
+ "node": ">=22.15"
34
+ },
35
+ "peerDependencies": {
36
+ "@fuzdev/fuz_css": ">=0.40.0",
37
+ "svelte": "^5"
38
+ },
39
+ "peerDependenciesMeta": {
40
+ "@fuzdev/fuz_css": {
41
+ "optional": true
42
+ },
43
+ "svelte": {
44
+ "optional": true
45
+ }
46
+ },
47
+ "devDependencies": {
48
+ "@changesets/changelog-git": "^0.2.1",
49
+ "@ryanatkn/belt": "^0.41.1",
50
+ "@ryanatkn/eslint-config": "^0.9.0",
51
+ "@ryanatkn/fuz": "^0.165.0",
52
+ "@ryanatkn/gro": "^0.179.0",
53
+ "@fuzdev/fuz_css": "^0.40.0",
54
+ "@sveltejs/adapter-static": "^3.0.10",
55
+ "@sveltejs/kit": "^2.49.0",
56
+ "@sveltejs/package": "^2.5.6",
57
+ "@sveltejs/vite-plugin-svelte": "^6.2.1",
58
+ "@types/node": "^24.10.1",
59
+ "eslint": "^9.39.1",
60
+ "eslint-plugin-svelte": "^3.13.0",
61
+ "esm-env": "^1.2.2",
62
+ "prettier": "^3.6.2",
63
+ "prettier-plugin-svelte": "^3.4.0",
64
+ "svelte": "^5.44.1",
65
+ "svelte-check": "^4.3.4",
66
+ "tinybench": "^5.1.0",
67
+ "tslib": "^2.8.1",
68
+ "typescript": "^5.9.3",
69
+ "typescript-eslint": "^8.48.0",
70
+ "vitest": "^4.0.14"
71
+ },
72
+ "prettier": {
73
+ "plugins": [
74
+ "prettier-plugin-svelte"
75
+ ],
76
+ "useTabs": true,
77
+ "printWidth": 100,
78
+ "singleQuote": true,
79
+ "bracketSpacing": false,
80
+ "overrides": [
81
+ {
82
+ "files": "package.json",
83
+ "options": {
84
+ "useTabs": false
85
+ }
86
+ }
87
+ ]
88
+ },
89
+ "sideEffects": [
90
+ "**/*.css"
91
+ ],
92
+ "files": [
93
+ "dist",
94
+ "src/lib/**/*.ts",
95
+ "!src/lib/**/*.test.*",
96
+ "!dist/**/*.test.*"
97
+ ],
98
+ "exports": {
99
+ "./package.json": "./package.json",
100
+ "./*.js": {
101
+ "types": "./dist/*.d.ts",
102
+ "default": "./dist/*.js"
103
+ },
104
+ "./*.ts": {
105
+ "types": "./dist/*.d.ts",
106
+ "default": "./dist/*.js"
107
+ },
108
+ "./*.svelte": {
109
+ "types": "./dist/*.svelte.d.ts",
110
+ "svelte": "./dist/*.svelte",
111
+ "default": "./dist/*.svelte"
112
+ },
113
+ "./*.css": {
114
+ "default": "./dist/*.css"
115
+ }
116
+ }
117
+ }
@@ -0,0 +1,10 @@
1
+ export interface CodeSample {
2
+ name: string;
3
+ lang: string;
4
+ content: string;
5
+ }
6
+
7
+ // Languages ordered from simple to complex
8
+ export const sample_langs = ['json', 'css', 'ts', 'html', 'svelte', 'md'] as const;
9
+
10
+ export type SampleLang = (typeof sample_langs)[number];
@@ -0,0 +1,48 @@
1
+ import type {AddSyntaxGrammar, SyntaxGrammarRaw} from './syntax_styler.js';
2
+
3
+ export const class_keywords = 'class|extends|implements|instanceof|interface|new';
4
+
5
+ /**
6
+ * Based on Prism (https://github.com/PrismJS/prism)
7
+ * by Lea Verou (https://lea.verou.me/)
8
+ *
9
+ * MIT license
10
+ *
11
+ * @see LICENSE
12
+ */
13
+ export const add_grammar_clike: AddSyntaxGrammar = (syntax_styler) => {
14
+ const grammar_clike = {
15
+ comment: [
16
+ {
17
+ pattern: /(^|[^\\])\/\*[\s\S]*?(?:\*\/|$)/,
18
+ lookbehind: true,
19
+ greedy: true,
20
+ },
21
+ {
22
+ pattern: /(^|[^\\:])\/\/.*/,
23
+ lookbehind: true,
24
+ greedy: true,
25
+ },
26
+ ],
27
+ string: {
28
+ pattern: /(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,
29
+ greedy: true,
30
+ },
31
+ class_name: {
32
+ pattern: new RegExp(`(\\b(?:${class_keywords}|trait)\\s+|\\bcatch\\s+\\()[\\w.\\\\]+`, 'i'),
33
+ lookbehind: true,
34
+ inside: {
35
+ punctuation: /[.\\]/,
36
+ },
37
+ },
38
+ keyword:
39
+ /\b(?:break|catch|continue|do|else|finally|for|function|if|in|instanceof|new|null|return|throw|try|while)\b/,
40
+ boolean: /\b(?:false|true)\b/,
41
+ function: /\b\w+(?=\()/,
42
+ number: /\b0x[\da-f]+\b|(?:\b\d+(?:\.\d*)?|\B\.\d+)(?:e[+-]?\d+)?/i,
43
+ operator: /[<>]=?|[!=]=?=?|--?|\+\+?|&&?|\|\|?|[?*/~^%]/,
44
+ punctuation: /[{}[\];(),.:]/,
45
+ } satisfies SyntaxGrammarRaw;
46
+
47
+ syntax_styler.add_lang('clike', grammar_clike);
48
+ };
@@ -0,0 +1,84 @@
1
+ import type {AddSyntaxGrammar, SyntaxGrammarRaw} from './syntax_styler.js';
2
+ import {grammar_markup_add_attribute, grammar_markup_add_inlined} from './grammar_markup.js';
3
+
4
+ var string = /(?:"(?:\\(?:\r\n|[\s\S])|[^"\\\r\n])*"|'(?:\\(?:\r\n|[\s\S])|[^'\\\r\n])*')/;
5
+
6
+ /**
7
+ * Based on Prism (https://github.com/PrismJS/prism)
8
+ * by Lea Verou (https://lea.verou.me/)
9
+ *
10
+ * MIT license
11
+ *
12
+ * @see LICENSE
13
+ */
14
+ export const add_grammar_css: AddSyntaxGrammar = (syntax_styler) => {
15
+ const grammar_css = {
16
+ comment: /\/\*[\s\S]*?\*\//,
17
+ atrule: {
18
+ pattern: RegExp(
19
+ '@[\\w-](?:' +
20
+ /[^;{\s"']|\s+(?!\s)/.source +
21
+ '|' +
22
+ string.source +
23
+ ')*?' +
24
+ /(?:;|(?=\s*\{))/.source,
25
+ ),
26
+ inside: {
27
+ rule: /^@[\w-]+/,
28
+ selector_function_argument: {
29
+ pattern:
30
+ /(\bselector\s*\(\s*(?![\s)]))(?:[^()\s]|\s+(?![\s)])|\((?:[^()]|\([^()]*\))*\))+(?=\s*\))/,
31
+ lookbehind: true,
32
+ alias: 'selector',
33
+ },
34
+ keyword: {
35
+ pattern: /(^|[^\w-])(?:and|not|only|or)(?![\w-])/,
36
+ lookbehind: true,
37
+ },
38
+ } as SyntaxGrammarRaw, // see `rest` below
39
+ },
40
+ url: {
41
+ // https://drafts.csswg.org/css-values-3/#urls
42
+ pattern: RegExp(
43
+ '\\burl\\((?:' + string.source + '|' + /(?:[^\\\r\n()"']|\\[\s\S])*/.source + ')\\)',
44
+ 'i',
45
+ ),
46
+ greedy: true,
47
+ inside: {
48
+ function: /^url/i,
49
+ punctuation: /^\(|\)$/,
50
+ string: {
51
+ pattern: RegExp('^' + string.source + '$'),
52
+ alias: 'url',
53
+ },
54
+ },
55
+ },
56
+ selector: {
57
+ pattern: RegExp(
58
+ '(^|[{}\\s])[^{}\\s](?:[^{};"\'\\s]|\\s+(?![\\s{])|' + string.source + ')*(?=\\s*\\{)',
59
+ ),
60
+ lookbehind: true,
61
+ },
62
+ string: {
63
+ pattern: string,
64
+ greedy: true,
65
+ },
66
+ property: {
67
+ pattern: /(^|[^-\w\xA0-\uFFFF])(?!\s)[-_a-z\xA0-\uFFFF](?:(?!\s)[-\w\xA0-\uFFFF])*(?=\s*:)/i,
68
+ lookbehind: true,
69
+ },
70
+ important: /!important\b/i,
71
+ function: {
72
+ pattern: /(^|[^-a-z0-9])[-a-z0-9]+(?=\()/i,
73
+ lookbehind: true,
74
+ },
75
+ punctuation: /[(){};:,]/,
76
+ } satisfies SyntaxGrammarRaw;
77
+
78
+ grammar_css.atrule.inside.rest = grammar_css;
79
+
80
+ syntax_styler.add_lang('css', grammar_css);
81
+
82
+ grammar_markup_add_inlined(syntax_styler, 'style', 'css');
83
+ grammar_markup_add_attribute(syntax_styler, 'style', 'css');
84
+ };