pygments-ddot 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pygments_ddot-0.1.0/PKG-INFO +84 -0
- pygments_ddot-0.1.0/README.md +67 -0
- pygments_ddot-0.1.0/pygments_ddot/__init__.py +23 -0
- pygments_ddot-0.1.0/pygments_ddot/lexer.py +212 -0
- pygments_ddot-0.1.0/pygments_ddot.egg-info/PKG-INFO +84 -0
- pygments_ddot-0.1.0/pygments_ddot.egg-info/SOURCES.txt +10 -0
- pygments_ddot-0.1.0/pygments_ddot.egg-info/dependency_links.txt +1 -0
- pygments_ddot-0.1.0/pygments_ddot.egg-info/entry_points.txt +2 -0
- pygments_ddot-0.1.0/pygments_ddot.egg-info/requires.txt +1 -0
- pygments_ddot-0.1.0/pygments_ddot.egg-info/top_level.txt +1 -0
- pygments_ddot-0.1.0/pyproject.toml +32 -0
- pygments_ddot-0.1.0/setup.cfg +4 -0
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: pygments-ddot
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Pygments lexer for ddot.it (Knowledge Graph Notation).
|
|
5
|
+
Author-email: Calpano <hello@calpano.com>
|
|
6
|
+
License: MIT
|
|
7
|
+
Project-URL: Homepage, https://ddot.it
|
|
8
|
+
Project-URL: Source, https://github.com/Calpano/ddot.it-syntax-tools
|
|
9
|
+
Keywords: pygments,ddot.it,syntax-highlighting,knowledge-graph
|
|
10
|
+
Classifier: Development Status :: 4 - Beta
|
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
+
Classifier: Programming Language :: Python :: 3
|
|
13
|
+
Classifier: Topic :: Text Processing :: Markup
|
|
14
|
+
Requires-Python: >=3.8
|
|
15
|
+
Description-Content-Type: text/markdown
|
|
16
|
+
Requires-Dist: Pygments>=2.10
|
|
17
|
+
|
|
18
|
+
# pygments-ddot
|
|
19
|
+
|
|
20
|
+
[Pygments](https://pygments.org) lexer for **ddot.it** — a minimal text
|
|
21
|
+
format for typed knowledge graphs.
|
|
22
|
+
|
|
23
|
+
## Install
|
|
24
|
+
|
|
25
|
+
```sh
|
|
26
|
+
pip install pygments-ddot
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
The package registers itself via the `pygments.lexers` entry point, so
|
|
30
|
+
both `pygmentize` and any tool that uses `pygments.lexers.get_lexer_by_name`
|
|
31
|
+
will pick up `ddot` automatically.
|
|
32
|
+
|
|
33
|
+
## Use
|
|
34
|
+
|
|
35
|
+
### From the command line
|
|
36
|
+
|
|
37
|
+
```sh
|
|
38
|
+
pygmentize -l ddot -f html sample.ddot > sample.html
|
|
39
|
+
pygmentize -l ddot -f terminal sample.ddot
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
### From Python
|
|
43
|
+
|
|
44
|
+
```python
|
|
45
|
+
from pygments import highlight
|
|
46
|
+
from pygments.lexers import get_lexer_by_name
|
|
47
|
+
from pygments.formatters import HtmlFormatter
|
|
48
|
+
|
|
49
|
+
code = open("sample.ddot").read()
|
|
50
|
+
lexer = get_lexer_by_name("ddot")
|
|
51
|
+
print(highlight(code, lexer, HtmlFormatter()))
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
Aliases registered: `ddot.it`, `ddot`, `ddotit`. File pattern: `*.ddot`.
|
|
55
|
+
|
|
56
|
+
## Token mapping
|
|
57
|
+
|
|
58
|
+
The lexer emits Pygments tokens chosen to map cleanly onto the canonical
|
|
59
|
+
token vocabulary at
|
|
60
|
+
[`ddot.it/test-data/tokens.md`](https://github.com/Calpano/ddot.it/blob/main/test-data/tokens.md).
|
|
61
|
+
|
|
62
|
+
| Pygments token | Canonical name |
|
|
63
|
+
|-----------------------------|----------------|
|
|
64
|
+
| `Name.Class` | `subject` |
|
|
65
|
+
| `Name.Function` | `relation` |
|
|
66
|
+
| `Literal.String.Symbol` | `object` |
|
|
67
|
+
| `Operator` | `operator` |
|
|
68
|
+
| `Keyword.Pseudo` | `command` |
|
|
69
|
+
| `Comment.Preproc` | `meta-delim` |
|
|
70
|
+
| `Comment.Special.Operator` | `meta-operator`|
|
|
71
|
+
| `Comment.Special.Relation` | `meta-relation`|
|
|
72
|
+
| `Comment.Special.Object` | `meta-object` |
|
|
73
|
+
| `Comment.Multiline` | `meta-text` |
|
|
74
|
+
| `Comment.Single` | `disabled` |
|
|
75
|
+
|
|
76
|
+
## Conformance
|
|
77
|
+
|
|
78
|
+
The lexer is verified against the cross-implementation golden corpus —
|
|
79
|
+
identical to the TextMate, Shiki, and Rouge ports. See
|
|
80
|
+
[`tools/conformance-pygments.py`](https://github.com/Calpano/ddot.it-syntax-tools/blob/main/tools/conformance-pygments.py).
|
|
81
|
+
|
|
82
|
+
## License
|
|
83
|
+
|
|
84
|
+
MIT
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
# pygments-ddot
|
|
2
|
+
|
|
3
|
+
[Pygments](https://pygments.org) lexer for **ddot.it** — a minimal text
|
|
4
|
+
format for typed knowledge graphs.
|
|
5
|
+
|
|
6
|
+
## Install
|
|
7
|
+
|
|
8
|
+
```sh
|
|
9
|
+
pip install pygments-ddot
|
|
10
|
+
```
|
|
11
|
+
|
|
12
|
+
The package registers itself via the `pygments.lexers` entry point, so
|
|
13
|
+
both `pygmentize` and any tool that uses `pygments.lexers.get_lexer_by_name`
|
|
14
|
+
will pick up `ddot` automatically.
|
|
15
|
+
|
|
16
|
+
## Use
|
|
17
|
+
|
|
18
|
+
### From the command line
|
|
19
|
+
|
|
20
|
+
```sh
|
|
21
|
+
pygmentize -l ddot -f html sample.ddot > sample.html
|
|
22
|
+
pygmentize -l ddot -f terminal sample.ddot
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
### From Python
|
|
26
|
+
|
|
27
|
+
```python
|
|
28
|
+
from pygments import highlight
|
|
29
|
+
from pygments.lexers import get_lexer_by_name
|
|
30
|
+
from pygments.formatters import HtmlFormatter
|
|
31
|
+
|
|
32
|
+
code = open("sample.ddot").read()
|
|
33
|
+
lexer = get_lexer_by_name("ddot")
|
|
34
|
+
print(highlight(code, lexer, HtmlFormatter()))
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
Aliases registered: `ddot.it`, `ddot`, `ddotit`. File pattern: `*.ddot`.
|
|
38
|
+
|
|
39
|
+
## Token mapping
|
|
40
|
+
|
|
41
|
+
The lexer emits Pygments tokens chosen to map cleanly onto the canonical
|
|
42
|
+
token vocabulary at
|
|
43
|
+
[`ddot.it/test-data/tokens.md`](https://github.com/Calpano/ddot.it/blob/main/test-data/tokens.md).
|
|
44
|
+
|
|
45
|
+
| Pygments token | Canonical name |
|
|
46
|
+
|-----------------------------|----------------|
|
|
47
|
+
| `Name.Class` | `subject` |
|
|
48
|
+
| `Name.Function` | `relation` |
|
|
49
|
+
| `Literal.String.Symbol` | `object` |
|
|
50
|
+
| `Operator` | `operator` |
|
|
51
|
+
| `Keyword.Pseudo` | `command` |
|
|
52
|
+
| `Comment.Preproc` | `meta-delim` |
|
|
53
|
+
| `Comment.Special.Operator` | `meta-operator`|
|
|
54
|
+
| `Comment.Special.Relation` | `meta-relation`|
|
|
55
|
+
| `Comment.Special.Object` | `meta-object` |
|
|
56
|
+
| `Comment.Multiline` | `meta-text` |
|
|
57
|
+
| `Comment.Single` | `disabled` |
|
|
58
|
+
|
|
59
|
+
## Conformance
|
|
60
|
+
|
|
61
|
+
The lexer is verified against the cross-implementation golden corpus —
|
|
62
|
+
identical to the TextMate, Shiki, and Rouge ports. See
|
|
63
|
+
[`tools/conformance-pygments.py`](https://github.com/Calpano/ddot.it-syntax-tools/blob/main/tools/conformance-pygments.py).
|
|
64
|
+
|
|
65
|
+
## License
|
|
66
|
+
|
|
67
|
+
MIT
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"""Pygments lexer for ddot.it (https://ddot.it).
|
|
2
|
+
|
|
3
|
+
Emits Pygments tokens that — after the canonical-name mapping in
|
|
4
|
+
`tools/conformance-pygments.py` — equal the canonical token streams in
|
|
5
|
+
`../../ddot.it/test-data/cases/*/expected.tokens.json`.
|
|
6
|
+
|
|
7
|
+
Token mapping (Pygments → canonical, see ../../ddot.it/test-data/tokens.md):
|
|
8
|
+
Name.Class → subject
|
|
9
|
+
Name.Function → relation
|
|
10
|
+
Literal.String.Symbol → object
|
|
11
|
+
Operator → operator
|
|
12
|
+
Keyword.Pseudo → command
|
|
13
|
+
Comment.Preproc → meta-delim
|
|
14
|
+
Comment.Special.Operator → meta-operator
|
|
15
|
+
Comment.Special.Relation → meta-relation
|
|
16
|
+
Comment.Special.Object → meta-object
|
|
17
|
+
Comment.Multiline → meta-text
|
|
18
|
+
Comment.Single → disabled
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
from .lexer import DdotLexer
|
|
22
|
+
|
|
23
|
+
__all__ = ["DdotLexer"]
|
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
"""ddot.it Pygments lexer — direct port of the Rouge lexer in
|
|
2
|
+
`../../rouge/lib/rouge/lexers/ddot.rb`. Both implementations share the same
|
|
3
|
+
flat state machine: every state ends a line by transitioning back to
|
|
4
|
+
`root`, so each `\\n` resets dispatch."""
|
|
5
|
+
|
|
6
|
+
from pygments.lexer import RegexLexer
|
|
7
|
+
from pygments.token import (
|
|
8
|
+
Comment,
|
|
9
|
+
Keyword,
|
|
10
|
+
Literal,
|
|
11
|
+
Name,
|
|
12
|
+
Operator,
|
|
13
|
+
Text,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
OFF_RE = r"(?:ddot\.it/off|!!off)"
|
|
17
|
+
ON_RE = r"(?:ddot\.it/on|!!on)"
|
|
18
|
+
CMD_RE = r"(?:ddot\.it(?:/[\w.\-]+)?|!![\w.\-]*)"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class DdotLexer(RegexLexer):
|
|
22
|
+
name = "ddot.it"
|
|
23
|
+
aliases = ["ddot.it", "ddot", "ddotit"]
|
|
24
|
+
filenames = ["*.ddot"]
|
|
25
|
+
mimetypes = ["text/x-ddot"]
|
|
26
|
+
|
|
27
|
+
tokens = {
|
|
28
|
+
# ─────────── line-dispatch root ───────────
|
|
29
|
+
"root": [
|
|
30
|
+
# Standalone `,,` opens a free-form meta block.
|
|
31
|
+
(r"^[ \t]*,,[ \t]*(?=\n|\Z)", Comment.Preproc, "meta_block"),
|
|
32
|
+
# Off marker enters disabled span.
|
|
33
|
+
(r"^[ \t]*" + OFF_RE + r"[ \t]*(?=\n|\Z)", Keyword.Pseudo, "disabled"),
|
|
34
|
+
# Empty / whitespace-only line.
|
|
35
|
+
(r"^[ \t]*\n", Text),
|
|
36
|
+
# Continuation line (starts with `..` or `....`).
|
|
37
|
+
(r"^(?=[ \t]*\.{2,4})", Text, "after_subject"),
|
|
38
|
+
# Triple line — start with subject parsing.
|
|
39
|
+
(r"^", Text, "subject"),
|
|
40
|
+
],
|
|
41
|
+
|
|
42
|
+
# ─────────── disabled span ───────────
|
|
43
|
+
"disabled": [
|
|
44
|
+
(r"^[ \t]*" + ON_RE + r"[ \t]*(?=\n|\Z)", Keyword.Pseudo, "#pop"),
|
|
45
|
+
(r"^[ \t]*\n", Text),
|
|
46
|
+
(r"[^\n]+", Comment.Single),
|
|
47
|
+
(r"\n", Text),
|
|
48
|
+
],
|
|
49
|
+
|
|
50
|
+
# ─────────── multi-line meta block (free-form) ───────────
|
|
51
|
+
"meta_block": [
|
|
52
|
+
(r"^[ \t]*,,[ \t]*(?=\n|\Z)", Comment.Preproc, "#pop"),
|
|
53
|
+
(r"^[ \t]*\n", Text),
|
|
54
|
+
(r"[^\n]+", Comment.Multiline),
|
|
55
|
+
(r"\n", Text),
|
|
56
|
+
],
|
|
57
|
+
|
|
58
|
+
# ─────────── triple line: subject phase ───────────
|
|
59
|
+
"subject": [
|
|
60
|
+
(r"[ \t]+", Text),
|
|
61
|
+
(
|
|
62
|
+
CMD_RE + r"(?=[ \t]+\.{2,4}|[ \t]*,,|[ \t]*$|\n|\Z)",
|
|
63
|
+
Keyword.Pseudo,
|
|
64
|
+
("#pop", "after_subject"),
|
|
65
|
+
),
|
|
66
|
+
(
|
|
67
|
+
r"[^\s.,][^\n,]*?(?=[ \t]+\.{2,4}|[ \t]*,,|[ \t]*$|\n|\Z)",
|
|
68
|
+
Name.Class,
|
|
69
|
+
("#pop", "after_subject"),
|
|
70
|
+
),
|
|
71
|
+
(r"\n", Text, "#pop"),
|
|
72
|
+
],
|
|
73
|
+
|
|
74
|
+
# ─────────── after subject (or continuation entry) ───────────
|
|
75
|
+
"after_subject": [
|
|
76
|
+
(r"[ \t]+", Text),
|
|
77
|
+
# `.. ..` (spaced operator) — one token covering both dots+space,
|
|
78
|
+
# matching the canonical corpus.
|
|
79
|
+
(r"\.{2}[ \t]+\.{2}", Operator, ("#pop", "object")),
|
|
80
|
+
(r"\.{4}", Operator, ("#pop", "object")),
|
|
81
|
+
(r"\.{2}", Operator, ("#pop", "relation")),
|
|
82
|
+
(r",,", Comment.Preproc, ("#pop", "inline_meta")),
|
|
83
|
+
(r"\n", Text, "#pop"),
|
|
84
|
+
],
|
|
85
|
+
|
|
86
|
+
# ─────────── relation phase ───────────
|
|
87
|
+
"relation": [
|
|
88
|
+
(
|
|
89
|
+
r"[^\s.,][^\n.,]*?(?=[ \t]*\.{2}|[ \t]*$|\n|\Z)",
|
|
90
|
+
Name.Function,
|
|
91
|
+
("#pop", "after_relation"),
|
|
92
|
+
),
|
|
93
|
+
(r"[ \t]+", Text),
|
|
94
|
+
(r"\n", Text, "#pop"),
|
|
95
|
+
],
|
|
96
|
+
|
|
97
|
+
"after_relation": [
|
|
98
|
+
(r"[ \t]+", Text),
|
|
99
|
+
(r"\.{2}", Operator, ("#pop", "object")),
|
|
100
|
+
(r",,", Comment.Preproc, ("#pop", "inline_meta")),
|
|
101
|
+
(r"\n", Text, "#pop"),
|
|
102
|
+
],
|
|
103
|
+
|
|
104
|
+
# ─────────── object phase ───────────
|
|
105
|
+
"object": [
|
|
106
|
+
(
|
|
107
|
+
r"[^\s,][^\n,]*?(?=[ \t]*,,|[ \t]*$|\n|\Z)",
|
|
108
|
+
Literal.String.Symbol,
|
|
109
|
+
("#pop", "after_object"),
|
|
110
|
+
),
|
|
111
|
+
(r"[ \t]+", Text),
|
|
112
|
+
(r"\n", Text, "#pop"),
|
|
113
|
+
],
|
|
114
|
+
|
|
115
|
+
"after_object": [
|
|
116
|
+
(r"[ \t]+", Text),
|
|
117
|
+
# Trailing `,,` at end of line opens a typed multi-line meta block.
|
|
118
|
+
(
|
|
119
|
+
r",,(?=[ \t]*(?:\n|\Z))",
|
|
120
|
+
Comment.Preproc,
|
|
121
|
+
("#pop", "typed_meta_open"),
|
|
122
|
+
),
|
|
123
|
+
# Inline `,,` followed by content on the same line.
|
|
124
|
+
(r",,", Comment.Preproc, ("#pop", "inline_meta")),
|
|
125
|
+
(r"\n", Text, "#pop"),
|
|
126
|
+
],
|
|
127
|
+
|
|
128
|
+
# Right after trailing `,,` — wait for newline, then enter the block.
|
|
129
|
+
"typed_meta_open": [
|
|
130
|
+
(r"[ \t]+", Text),
|
|
131
|
+
(r"\n", Text, ("#pop", "typed_meta_block")),
|
|
132
|
+
],
|
|
133
|
+
|
|
134
|
+
# Typed meta block: each line is `..rel.. obj`, closed by standalone `,,`.
|
|
135
|
+
"typed_meta_block": [
|
|
136
|
+
(r"^[ \t]*,,[ \t]*(?=\n|\Z)", Comment.Preproc, "#pop"),
|
|
137
|
+
(r"^[ \t]*\n", Text),
|
|
138
|
+
(r"^(?=[ \t]*\.{2,4})", Text, "typed_meta_op1"),
|
|
139
|
+
(r"[^\n]+", Comment.Multiline),
|
|
140
|
+
(r"\n", Text),
|
|
141
|
+
],
|
|
142
|
+
|
|
143
|
+
"typed_meta_op1": [
|
|
144
|
+
(r"[ \t]+", Text),
|
|
145
|
+
(r"\.{2}", Comment.Special.Operator, ("#pop", "typed_meta_relation")),
|
|
146
|
+
(r"\n", Text, "#pop"),
|
|
147
|
+
],
|
|
148
|
+
|
|
149
|
+
"typed_meta_relation": [
|
|
150
|
+
(
|
|
151
|
+
r"[^\s.,][^\n.,]*?(?=[ \t]*\.{2}|[ \t]*$|\n|\Z)",
|
|
152
|
+
Comment.Special.Relation,
|
|
153
|
+
("#pop", "typed_meta_after_relation"),
|
|
154
|
+
),
|
|
155
|
+
(r"[ \t]+", Text),
|
|
156
|
+
(r"\n", Text, "#pop"),
|
|
157
|
+
],
|
|
158
|
+
|
|
159
|
+
"typed_meta_after_relation": [
|
|
160
|
+
(r"[ \t]+", Text),
|
|
161
|
+
(r"\.{2}", Comment.Special.Operator, ("#pop", "typed_meta_object")),
|
|
162
|
+
(r"\n", Text, "#pop"),
|
|
163
|
+
],
|
|
164
|
+
|
|
165
|
+
"typed_meta_object": [
|
|
166
|
+
(
|
|
167
|
+
r"[^\s,][^\n,]*?(?=[ \t]*$|\n|\Z)",
|
|
168
|
+
Comment.Special.Object,
|
|
169
|
+
("#pop", "typed_meta_after_object"),
|
|
170
|
+
),
|
|
171
|
+
(r"[ \t]+", Text),
|
|
172
|
+
(r"\n", Text, "#pop"),
|
|
173
|
+
],
|
|
174
|
+
|
|
175
|
+
"typed_meta_after_object": [
|
|
176
|
+
(r"[ \t]+", Text),
|
|
177
|
+
(r"\n", Text, "#pop"),
|
|
178
|
+
],
|
|
179
|
+
|
|
180
|
+
# ─────────── inline metadata (after `,,` on a triple line) ───────────
|
|
181
|
+
"inline_meta": [
|
|
182
|
+
(r"[ \t]+", Text),
|
|
183
|
+
(r"\.{2}", Comment.Special.Operator, ("#pop", "inline_meta_relation")),
|
|
184
|
+
(r"\n", Text, "#pop"),
|
|
185
|
+
],
|
|
186
|
+
|
|
187
|
+
"inline_meta_relation": [
|
|
188
|
+
(
|
|
189
|
+
r"[^\s.,][^\n.,]*?(?=[ \t]*\.{2}|[ \t]*$|\n|\Z)",
|
|
190
|
+
Comment.Special.Relation,
|
|
191
|
+
("#pop", "inline_meta_after_relation"),
|
|
192
|
+
),
|
|
193
|
+
(r"[ \t]+", Text),
|
|
194
|
+
(r"\n", Text, "#pop"),
|
|
195
|
+
],
|
|
196
|
+
|
|
197
|
+
"inline_meta_after_relation": [
|
|
198
|
+
(r"[ \t]+", Text),
|
|
199
|
+
(r"\.{2}", Comment.Special.Operator, ("#pop", "inline_meta_object")),
|
|
200
|
+
(r"\n", Text, "#pop"),
|
|
201
|
+
],
|
|
202
|
+
|
|
203
|
+
"inline_meta_object": [
|
|
204
|
+
(
|
|
205
|
+
r"[^\s,][^\n,]*?(?=[ \t]*$|\n|\Z)",
|
|
206
|
+
Comment.Special.Object,
|
|
207
|
+
("#pop", "after_object"),
|
|
208
|
+
),
|
|
209
|
+
(r"[ \t]+", Text),
|
|
210
|
+
(r"\n", Text, "#pop"),
|
|
211
|
+
],
|
|
212
|
+
}
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: pygments-ddot
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Pygments lexer for ddot.it (Knowledge Graph Notation).
|
|
5
|
+
Author-email: Calpano <hello@calpano.com>
|
|
6
|
+
License: MIT
|
|
7
|
+
Project-URL: Homepage, https://ddot.it
|
|
8
|
+
Project-URL: Source, https://github.com/Calpano/ddot.it-syntax-tools
|
|
9
|
+
Keywords: pygments,ddot.it,syntax-highlighting,knowledge-graph
|
|
10
|
+
Classifier: Development Status :: 4 - Beta
|
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
+
Classifier: Programming Language :: Python :: 3
|
|
13
|
+
Classifier: Topic :: Text Processing :: Markup
|
|
14
|
+
Requires-Python: >=3.8
|
|
15
|
+
Description-Content-Type: text/markdown
|
|
16
|
+
Requires-Dist: Pygments>=2.10
|
|
17
|
+
|
|
18
|
+
# pygments-ddot
|
|
19
|
+
|
|
20
|
+
[Pygments](https://pygments.org) lexer for **ddot.it** — a minimal text
|
|
21
|
+
format for typed knowledge graphs.
|
|
22
|
+
|
|
23
|
+
## Install
|
|
24
|
+
|
|
25
|
+
```sh
|
|
26
|
+
pip install pygments-ddot
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
The package registers itself via the `pygments.lexers` entry point, so
|
|
30
|
+
both `pygmentize` and any tool that uses `pygments.lexers.get_lexer_by_name`
|
|
31
|
+
will pick up `ddot` automatically.
|
|
32
|
+
|
|
33
|
+
## Use
|
|
34
|
+
|
|
35
|
+
### From the command line
|
|
36
|
+
|
|
37
|
+
```sh
|
|
38
|
+
pygmentize -l ddot -f html sample.ddot > sample.html
|
|
39
|
+
pygmentize -l ddot -f terminal sample.ddot
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
### From Python
|
|
43
|
+
|
|
44
|
+
```python
|
|
45
|
+
from pygments import highlight
|
|
46
|
+
from pygments.lexers import get_lexer_by_name
|
|
47
|
+
from pygments.formatters import HtmlFormatter
|
|
48
|
+
|
|
49
|
+
code = open("sample.ddot").read()
|
|
50
|
+
lexer = get_lexer_by_name("ddot")
|
|
51
|
+
print(highlight(code, lexer, HtmlFormatter()))
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
Aliases registered: `ddot.it`, `ddot`, `ddotit`. File pattern: `*.ddot`.
|
|
55
|
+
|
|
56
|
+
## Token mapping
|
|
57
|
+
|
|
58
|
+
The lexer emits Pygments tokens chosen to map cleanly onto the canonical
|
|
59
|
+
token vocabulary at
|
|
60
|
+
[`ddot.it/test-data/tokens.md`](https://github.com/Calpano/ddot.it/blob/main/test-data/tokens.md).
|
|
61
|
+
|
|
62
|
+
| Pygments token | Canonical name |
|
|
63
|
+
|-----------------------------|----------------|
|
|
64
|
+
| `Name.Class` | `subject` |
|
|
65
|
+
| `Name.Function` | `relation` |
|
|
66
|
+
| `Literal.String.Symbol` | `object` |
|
|
67
|
+
| `Operator` | `operator` |
|
|
68
|
+
| `Keyword.Pseudo` | `command` |
|
|
69
|
+
| `Comment.Preproc` | `meta-delim` |
|
|
70
|
+
| `Comment.Special.Operator` | `meta-operator`|
|
|
71
|
+
| `Comment.Special.Relation` | `meta-relation`|
|
|
72
|
+
| `Comment.Special.Object` | `meta-object` |
|
|
73
|
+
| `Comment.Multiline` | `meta-text` |
|
|
74
|
+
| `Comment.Single` | `disabled` |
|
|
75
|
+
|
|
76
|
+
## Conformance
|
|
77
|
+
|
|
78
|
+
The lexer is verified against the cross-implementation golden corpus —
|
|
79
|
+
identical to the TextMate, Shiki, and Rouge ports. See
|
|
80
|
+
[`tools/conformance-pygments.py`](https://github.com/Calpano/ddot.it-syntax-tools/blob/main/tools/conformance-pygments.py).
|
|
81
|
+
|
|
82
|
+
## License
|
|
83
|
+
|
|
84
|
+
MIT
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
README.md
|
|
2
|
+
pyproject.toml
|
|
3
|
+
pygments_ddot/__init__.py
|
|
4
|
+
pygments_ddot/lexer.py
|
|
5
|
+
pygments_ddot.egg-info/PKG-INFO
|
|
6
|
+
pygments_ddot.egg-info/SOURCES.txt
|
|
7
|
+
pygments_ddot.egg-info/dependency_links.txt
|
|
8
|
+
pygments_ddot.egg-info/entry_points.txt
|
|
9
|
+
pygments_ddot.egg-info/requires.txt
|
|
10
|
+
pygments_ddot.egg-info/top_level.txt
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
Pygments>=2.10
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
pygments_ddot
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=61"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "pygments-ddot"
|
|
7
|
+
version = "0.1.0"
|
|
8
|
+
description = "Pygments lexer for ddot.it (Knowledge Graph Notation)."
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
license = { text = "MIT" }
|
|
11
|
+
authors = [{ name = "Calpano", email = "hello@calpano.com" }]
|
|
12
|
+
requires-python = ">=3.8"
|
|
13
|
+
dependencies = ["Pygments>=2.10"]
|
|
14
|
+
keywords = ["pygments", "ddot.it", "syntax-highlighting", "knowledge-graph"]
|
|
15
|
+
classifiers = [
|
|
16
|
+
"Development Status :: 4 - Beta",
|
|
17
|
+
"License :: OSI Approved :: MIT License",
|
|
18
|
+
"Programming Language :: Python :: 3",
|
|
19
|
+
"Topic :: Text Processing :: Markup",
|
|
20
|
+
]
|
|
21
|
+
|
|
22
|
+
[project.urls]
|
|
23
|
+
Homepage = "https://ddot.it"
|
|
24
|
+
Source = "https://github.com/Calpano/ddot.it-syntax-tools"
|
|
25
|
+
|
|
26
|
+
# Pygments discovers third-party lexers via this entry point.
|
|
27
|
+
# After `pip install pygments-ddot`, `pygmentize -l ddot file.ddot` works.
|
|
28
|
+
[project.entry-points."pygments.lexers"]
|
|
29
|
+
ddot = "pygments_ddot:DdotLexer"
|
|
30
|
+
|
|
31
|
+
[tool.setuptools.packages.find]
|
|
32
|
+
include = ["pygments_ddot*"]
|