tabry 0.1.1 → 0.1.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/tabry/shells/bash.rb +12 -5
- data/sh/bash/README.md +2 -1
- data/tabry.gemspec +19 -2
- data/treesitter/Cargo.toml +26 -0
- data/treesitter/README.md +4 -0
- data/treesitter/binding.gyp +19 -0
- data/treesitter/bindings/node/binding.cc +28 -0
- data/treesitter/bindings/node/index.js +19 -0
- data/treesitter/bindings/rust/build.rs +40 -0
- data/treesitter/bindings/rust/lib.rs +52 -0
- data/treesitter/corpus/arg.txt +96 -0
- data/treesitter/corpus/at.txt +79 -0
- data/treesitter/corpus/comment.txt +13 -0
- data/treesitter/corpus/desc.txt +25 -0
- data/treesitter/corpus/examples_from_language_reference.txt +410 -0
- data/treesitter/corpus/flag.txt +48 -0
- data/treesitter/corpus/flag_desc_inline.txt +37 -0
- data/treesitter/corpus/opts.txt +21 -0
- data/treesitter/corpus/rapture.txt +61 -0
- data/treesitter/grammar.js +171 -0
- data/treesitter/jest_fixtures/examples_from_language_reference/argument_titles.yml +8 -0
- data/treesitter/jest_fixtures/examples_from_language_reference/arguments_and_possible_options__arg_.yml +23 -0
- data/treesitter/jest_fixtures/examples_from_language_reference/flags__flag__flagarg__reqd_flagarg_.yml +37 -0
- data/treesitter/jest_fixtures/examples_from_language_reference/getting_started.yml +13 -0
- data/treesitter/jest_fixtures/examples_from_language_reference/includes.yml +57 -0
- data/treesitter/jest_fixtures/examples_from_language_reference/multi_line_descriptions.yml +7 -0
- data/treesitter/jest_fixtures/examples_from_language_reference/optional_args_and_varargs__opt_arg__varargs__opt_varargs_.yml +16 -0
- data/treesitter/jest_fixtures/examples_from_language_reference/options.yml +24 -0
- data/treesitter/jest_fixtures/examples_from_language_reference/subcommands__sub__1.yml +23 -0
- data/treesitter/jest_fixtures/examples_from_language_reference/subcommands__sub__2.yml +15 -0
- data/treesitter/package.json +21 -0
- data/treesitter/parser_compile.sh +1 -0
- data/treesitter/src/grammar.json +615 -0
- data/treesitter/src/node-types.json +563 -0
- data/treesitter/src/parser.c +4706 -0
- data/treesitter/src/tree_sitter/parser.h +223 -0
- data/treesitter/tabry-compile.js +394 -0
- data/treesitter/tabry-compile.test.js +51 -0
- metadata +36 -1
@@ -0,0 +1,223 @@
|
|
1
|
+
#ifndef TREE_SITTER_PARSER_H_
|
2
|
+
#define TREE_SITTER_PARSER_H_
|
3
|
+
|
4
|
+
#ifdef __cplusplus
|
5
|
+
extern "C" {
|
6
|
+
#endif
|
7
|
+
|
8
|
+
#include <stdbool.h>
|
9
|
+
#include <stdint.h>
|
10
|
+
#include <stdlib.h>
|
11
|
+
|
12
|
+
#define ts_builtin_sym_error ((TSSymbol)-1)
|
13
|
+
#define ts_builtin_sym_end 0
|
14
|
+
#define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024
|
15
|
+
|
16
|
+
typedef uint16_t TSStateId;
|
17
|
+
|
18
|
+
#ifndef TREE_SITTER_API_H_
|
19
|
+
typedef uint16_t TSSymbol;
|
20
|
+
typedef uint16_t TSFieldId;
|
21
|
+
typedef struct TSLanguage TSLanguage;
|
22
|
+
#endif
|
23
|
+
|
24
|
+
typedef struct {
|
25
|
+
TSFieldId field_id;
|
26
|
+
uint8_t child_index;
|
27
|
+
bool inherited;
|
28
|
+
} TSFieldMapEntry;
|
29
|
+
|
30
|
+
typedef struct {
|
31
|
+
uint16_t index;
|
32
|
+
uint16_t length;
|
33
|
+
} TSFieldMapSlice;
|
34
|
+
|
35
|
+
typedef struct {
|
36
|
+
bool visible;
|
37
|
+
bool named;
|
38
|
+
bool supertype;
|
39
|
+
} TSSymbolMetadata;
|
40
|
+
|
41
|
+
typedef struct TSLexer TSLexer;
|
42
|
+
|
43
|
+
struct TSLexer {
|
44
|
+
int32_t lookahead;
|
45
|
+
TSSymbol result_symbol;
|
46
|
+
void (*advance)(TSLexer *, bool);
|
47
|
+
void (*mark_end)(TSLexer *);
|
48
|
+
uint32_t (*get_column)(TSLexer *);
|
49
|
+
bool (*is_at_included_range_start)(const TSLexer *);
|
50
|
+
bool (*eof)(const TSLexer *);
|
51
|
+
};
|
52
|
+
|
53
|
+
typedef enum {
|
54
|
+
TSParseActionTypeShift,
|
55
|
+
TSParseActionTypeReduce,
|
56
|
+
TSParseActionTypeAccept,
|
57
|
+
TSParseActionTypeRecover,
|
58
|
+
} TSParseActionType;
|
59
|
+
|
60
|
+
typedef union {
|
61
|
+
struct {
|
62
|
+
uint8_t type;
|
63
|
+
TSStateId state;
|
64
|
+
bool extra;
|
65
|
+
bool repetition;
|
66
|
+
} shift;
|
67
|
+
struct {
|
68
|
+
uint8_t type;
|
69
|
+
uint8_t child_count;
|
70
|
+
TSSymbol symbol;
|
71
|
+
int16_t dynamic_precedence;
|
72
|
+
uint16_t production_id;
|
73
|
+
} reduce;
|
74
|
+
uint8_t type;
|
75
|
+
} TSParseAction;
|
76
|
+
|
77
|
+
typedef struct {
|
78
|
+
uint16_t lex_state;
|
79
|
+
uint16_t external_lex_state;
|
80
|
+
} TSLexMode;
|
81
|
+
|
82
|
+
typedef union {
|
83
|
+
TSParseAction action;
|
84
|
+
struct {
|
85
|
+
uint8_t count;
|
86
|
+
bool reusable;
|
87
|
+
} entry;
|
88
|
+
} TSParseActionEntry;
|
89
|
+
|
90
|
+
struct TSLanguage {
|
91
|
+
uint32_t version;
|
92
|
+
uint32_t symbol_count;
|
93
|
+
uint32_t alias_count;
|
94
|
+
uint32_t token_count;
|
95
|
+
uint32_t external_token_count;
|
96
|
+
uint32_t state_count;
|
97
|
+
uint32_t large_state_count;
|
98
|
+
uint32_t production_id_count;
|
99
|
+
uint32_t field_count;
|
100
|
+
uint16_t max_alias_sequence_length;
|
101
|
+
const uint16_t *parse_table;
|
102
|
+
const uint16_t *small_parse_table;
|
103
|
+
const uint32_t *small_parse_table_map;
|
104
|
+
const TSParseActionEntry *parse_actions;
|
105
|
+
const char * const *symbol_names;
|
106
|
+
const char * const *field_names;
|
107
|
+
const TSFieldMapSlice *field_map_slices;
|
108
|
+
const TSFieldMapEntry *field_map_entries;
|
109
|
+
const TSSymbolMetadata *symbol_metadata;
|
110
|
+
const TSSymbol *public_symbol_map;
|
111
|
+
const uint16_t *alias_map;
|
112
|
+
const TSSymbol *alias_sequences;
|
113
|
+
const TSLexMode *lex_modes;
|
114
|
+
bool (*lex_fn)(TSLexer *, TSStateId);
|
115
|
+
bool (*keyword_lex_fn)(TSLexer *, TSStateId);
|
116
|
+
TSSymbol keyword_capture_token;
|
117
|
+
struct {
|
118
|
+
const bool *states;
|
119
|
+
const TSSymbol *symbol_map;
|
120
|
+
void *(*create)(void);
|
121
|
+
void (*destroy)(void *);
|
122
|
+
bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist);
|
123
|
+
unsigned (*serialize)(void *, char *);
|
124
|
+
void (*deserialize)(void *, const char *, unsigned);
|
125
|
+
} external_scanner;
|
126
|
+
};
|
127
|
+
|
128
|
+
/*
|
129
|
+
* Lexer Macros
|
130
|
+
*/
|
131
|
+
|
132
|
+
#define START_LEXER() \
|
133
|
+
bool result = false; \
|
134
|
+
bool skip = false; \
|
135
|
+
bool eof = false; \
|
136
|
+
int32_t lookahead; \
|
137
|
+
goto start; \
|
138
|
+
next_state: \
|
139
|
+
lexer->advance(lexer, skip); \
|
140
|
+
start: \
|
141
|
+
skip = false; \
|
142
|
+
lookahead = lexer->lookahead;
|
143
|
+
|
144
|
+
#define ADVANCE(state_value) \
|
145
|
+
{ \
|
146
|
+
state = state_value; \
|
147
|
+
goto next_state; \
|
148
|
+
}
|
149
|
+
|
150
|
+
#define SKIP(state_value) \
|
151
|
+
{ \
|
152
|
+
skip = true; \
|
153
|
+
state = state_value; \
|
154
|
+
goto next_state; \
|
155
|
+
}
|
156
|
+
|
157
|
+
#define ACCEPT_TOKEN(symbol_value) \
|
158
|
+
result = true; \
|
159
|
+
lexer->result_symbol = symbol_value; \
|
160
|
+
lexer->mark_end(lexer);
|
161
|
+
|
162
|
+
#define END_STATE() return result;
|
163
|
+
|
164
|
+
/*
|
165
|
+
* Parse Table Macros
|
166
|
+
*/
|
167
|
+
|
168
|
+
#define SMALL_STATE(id) id - LARGE_STATE_COUNT
|
169
|
+
|
170
|
+
#define STATE(id) id
|
171
|
+
|
172
|
+
#define ACTIONS(id) id
|
173
|
+
|
174
|
+
#define SHIFT(state_value) \
|
175
|
+
{{ \
|
176
|
+
.shift = { \
|
177
|
+
.type = TSParseActionTypeShift, \
|
178
|
+
.state = state_value \
|
179
|
+
} \
|
180
|
+
}}
|
181
|
+
|
182
|
+
#define SHIFT_REPEAT(state_value) \
|
183
|
+
{{ \
|
184
|
+
.shift = { \
|
185
|
+
.type = TSParseActionTypeShift, \
|
186
|
+
.state = state_value, \
|
187
|
+
.repetition = true \
|
188
|
+
} \
|
189
|
+
}}
|
190
|
+
|
191
|
+
#define SHIFT_EXTRA() \
|
192
|
+
{{ \
|
193
|
+
.shift = { \
|
194
|
+
.type = TSParseActionTypeShift, \
|
195
|
+
.extra = true \
|
196
|
+
} \
|
197
|
+
}}
|
198
|
+
|
199
|
+
#define REDUCE(symbol_val, child_count_val, ...) \
|
200
|
+
{{ \
|
201
|
+
.reduce = { \
|
202
|
+
.type = TSParseActionTypeReduce, \
|
203
|
+
.symbol = symbol_val, \
|
204
|
+
.child_count = child_count_val, \
|
205
|
+
__VA_ARGS__ \
|
206
|
+
}, \
|
207
|
+
}}
|
208
|
+
|
209
|
+
#define RECOVER() \
|
210
|
+
{{ \
|
211
|
+
.type = TSParseActionTypeRecover \
|
212
|
+
}}
|
213
|
+
|
214
|
+
#define ACCEPT_INPUT() \
|
215
|
+
{{ \
|
216
|
+
.type = TSParseActionTypeAccept \
|
217
|
+
}}
|
218
|
+
|
219
|
+
#ifdef __cplusplus
|
220
|
+
}
|
221
|
+
#endif
|
222
|
+
|
223
|
+
#endif // TREE_SITTER_PARSER_H_
|
@@ -0,0 +1,394 @@
|
|
1
|
+
#!/usr/bin/env node
|
2
|
+
// TODO: run through grammar and take stock of what is NOT implemented here. look at TODOs.
|
3
|
+
// TODO: tests, maybe more syntax checks
|
4
|
+
const fs = require('fs');
|
5
|
+
const YAML = require('yaml');
|
6
|
+
const Parser = require('tree-sitter');
|
7
|
+
const TabryGrammar = require('./bindings/node/index.js');
|
8
|
+
|
9
|
+
///////////////////////////////////////////////////////////////////
|
10
|
+
|
11
|
+
// HELPERS
|
12
|
+
|
13
|
+
function die(err) {
|
14
|
+
console.error(`ERROR: ${err}`);
|
15
|
+
process.exit(1);
|
16
|
+
}
|
17
|
+
|
18
|
+
function handleChildren(state, node) {
|
19
|
+
for (const child of node.namedChildren) {
|
20
|
+
const handler = 'handle' + child.type.replace(/((_|^)\w)/g, k => (k[1] || k[0]).toUpperCase());
|
21
|
+
if (handlers[handler]) {
|
22
|
+
handlers[handler](state, child);
|
23
|
+
} else {
|
24
|
+
console.error(`WARNING: no handler for type ${child.type} (make fn ${handler})`)
|
25
|
+
}
|
26
|
+
}
|
27
|
+
}
|
28
|
+
|
29
|
+
const childrenOfType = (node, type) => node.namedChildren.filter(x => x.type == type);
|
30
|
+
const firstChildOfType = (node, type) => childrenOfType(node, type)[0];
|
31
|
+
|
32
|
+
function pickMultiple(node, mappings) {
|
33
|
+
const result = {};
|
34
|
+
for (const name in mappings) {
|
35
|
+
result[name] = childrenOfType(node, mappings[name]);
|
36
|
+
}
|
37
|
+
return result;
|
38
|
+
}
|
39
|
+
function pickFirst(node, mappings) {
|
40
|
+
const result = {};
|
41
|
+
for (const name in mappings) {
|
42
|
+
result[name] = firstChildOfType(node, mappings[name]);
|
43
|
+
}
|
44
|
+
return result;
|
45
|
+
}
|
46
|
+
|
47
|
+
function pick(node, firsts, multiples) {
|
48
|
+
return {...pickFirst(node, firsts), ...pickMultiple(node, multiples)};
|
49
|
+
}
|
50
|
+
|
51
|
+
function textFromString(node) {
|
52
|
+
if (node.type !== 'string') {
|
53
|
+
throw new Error("node is not a string");
|
54
|
+
}
|
55
|
+
let text = node.text;
|
56
|
+
if (text[0] == '"') {
|
57
|
+
try {
|
58
|
+
// TODO: this may notbe all technically correct, may want to just replace
|
59
|
+
// \\ and \" with a regex (carefully constructed, probably equal to the
|
60
|
+
// one in grammar.js)
|
61
|
+
const replacedNewlines = text.replace(/\n/g, "\\n");
|
62
|
+
text = JSON.parse(replacedNewlines);
|
63
|
+
} catch (e) {
|
64
|
+
}
|
65
|
+
}
|
66
|
+
return unindent(text);
|
67
|
+
}
|
68
|
+
|
69
|
+
// Remove indentation up to the minimum indentation level of the string.
|
70
|
+
// e.g.:
|
71
|
+
// desc "
|
72
|
+
// ---- Hello! ----
|
73
|
+
// My thing:
|
74
|
+
// * xyz
|
75
|
+
// * abc
|
76
|
+
// "
|
77
|
+
// Will have indentation removed such that My thing has no indentation "---- Hello! ----"
|
78
|
+
// has two spaes of indentation.
|
79
|
+
function unindent(text) {
|
80
|
+
if (text.includes("\n")) {
|
81
|
+
const lines = text.split("\n").filter(l => l.trim().length !== 0);
|
82
|
+
const minIndent = Math.min.apply(Math, lines.map(l => l.match(/^( *)/)[0].length));
|
83
|
+
const removeIndent = new RegExp(`^ {${minIndent}}`);
|
84
|
+
text = text.split("\n").map(l => l.replace(removeIndent, '')).join("\n");
|
85
|
+
text = text.replace(/^\s*\n/, '').replace(/\s*$/, '');
|
86
|
+
}
|
87
|
+
return text;
|
88
|
+
}
|
89
|
+
|
90
|
+
function nameFromAt(node) {
|
91
|
+
if (node.type !== 'at_identifier') {
|
92
|
+
throw new Error("node is not a at_identifier");
|
93
|
+
}
|
94
|
+
return node.text.slice(1);
|
95
|
+
}
|
96
|
+
|
97
|
+
function safePush(object, field, value) {
|
98
|
+
if (!object[field]) {
|
99
|
+
object[field] = [];
|
100
|
+
}
|
101
|
+
object[field].push(value);
|
102
|
+
}
|
103
|
+
|
104
|
+
function safeSet(object, field, key, value) {
|
105
|
+
if (!object[field]) {
|
106
|
+
object[field] = {}
|
107
|
+
}
|
108
|
+
if (object[field][key]) {
|
109
|
+
die(`ERROR! duplicate ${field} -- ${key}`);
|
110
|
+
}
|
111
|
+
object[field][key] = value;
|
112
|
+
}
|
113
|
+
|
114
|
+
function checkContext(state, node, allowable) {
|
115
|
+
if (!allowable.includes(state.context)) {
|
116
|
+
die(`ERROR: ${node.type} found in context ${state.context}. Allowable: ${allowable}`);
|
117
|
+
}
|
118
|
+
}
|
119
|
+
|
120
|
+
/// CREATE* (part II of handlers)
|
121
|
+
|
122
|
+
function createFlag(state, {nameAndAliases, desc, mods, ats, block, arg}) {
|
123
|
+
const [name, ...aliases] = textFromString(nameAndAliases).split(",");
|
124
|
+
const flag = {name};
|
125
|
+
if (aliases.length) {
|
126
|
+
flag.aliases = aliases;
|
127
|
+
}
|
128
|
+
|
129
|
+
for (const at of ats) {
|
130
|
+
safePush(flag, 'options', {type: 'include', value: nameFromAt(at)});
|
131
|
+
}
|
132
|
+
|
133
|
+
setMods(flag, mods, {reqd: 'required'});
|
134
|
+
|
135
|
+
if (desc) {
|
136
|
+
flag.description = textFromString(desc);
|
137
|
+
}
|
138
|
+
if (arg) {
|
139
|
+
flag.arg = true;
|
140
|
+
}
|
141
|
+
|
142
|
+
safePush(state.currentNode, 'flags', flag);
|
143
|
+
|
144
|
+
if (block) {
|
145
|
+
handleChildren({...state, currentNode: flag, context: 'flag'}, block);
|
146
|
+
}
|
147
|
+
}
|
148
|
+
|
149
|
+
function createIncludeObjectOnSub(state, {at}) {
|
150
|
+
const name = nameFromAt(at);
|
151
|
+
for (const type of ['arg', 'flag', 'sub']) {
|
152
|
+
safePush(state.currentNode, type + 's', {include: name});
|
153
|
+
}
|
154
|
+
}
|
155
|
+
|
156
|
+
function createSub(state, {nameAndAliases, ats, block, desc}) {
|
157
|
+
const [name, ...aliases] = textFromString(nameAndAliases).split(",");
|
158
|
+
const sub = {name};
|
159
|
+
safePush(state.currentNode, 'subs', sub);
|
160
|
+
|
161
|
+
if (aliases.length) {
|
162
|
+
sub.aliases = aliases;
|
163
|
+
}
|
164
|
+
if (desc) {
|
165
|
+
sub.description = textFromString(desc);
|
166
|
+
}
|
167
|
+
|
168
|
+
const newState = {...state, currentNode: sub, context: 'sub'};
|
169
|
+
|
170
|
+
for (const at of ats) {
|
171
|
+
createIncludeObjectOnSub(newState, {at});
|
172
|
+
}
|
173
|
+
|
174
|
+
if (block) {
|
175
|
+
handleChildren(newState, block);
|
176
|
+
}
|
177
|
+
}
|
178
|
+
|
179
|
+
function createOpts(state, {type, value}) {
|
180
|
+
safePush(state.currentNode, 'options', { type: type, value: value});
|
181
|
+
}
|
182
|
+
|
183
|
+
function setMods(object, mods, tabryToYaml) {
|
184
|
+
for (const mod of mods) {
|
185
|
+
const yamlName = tabryToYaml[mod.text];
|
186
|
+
if (yamlName) {
|
187
|
+
object[yamlName] = true
|
188
|
+
} else {
|
189
|
+
die(`Unknown mod ${mod.text}`);
|
190
|
+
}
|
191
|
+
}
|
192
|
+
}
|
193
|
+
|
194
|
+
function createArg(state, {name, desc, mods, ats, block, varargs}) {
|
195
|
+
const arg = {};
|
196
|
+
if (name) {
|
197
|
+
arg.name = textFromString(name);
|
198
|
+
}
|
199
|
+
setMods(arg, mods, {opt: 'optional'})
|
200
|
+
|
201
|
+
for (const at of ats) {
|
202
|
+
safePush(arg, 'options', {type: 'include', value: nameFromAt(at)});
|
203
|
+
}
|
204
|
+
|
205
|
+
if (desc) {
|
206
|
+
arg.description = textFromString(desc);
|
207
|
+
}
|
208
|
+
if (varargs) {
|
209
|
+
arg.varargs = true;
|
210
|
+
}
|
211
|
+
|
212
|
+
safePush(state.currentNode, 'args', arg);
|
213
|
+
|
214
|
+
if (block) {
|
215
|
+
handleChildren({...state, currentNode: arg, context: 'arg'}, block);
|
216
|
+
}
|
217
|
+
}
|
218
|
+
|
219
|
+
function createOptionInclude(state, {type, at, block}) {
|
220
|
+
const list = [];
|
221
|
+
const include = { options: list };
|
222
|
+
safeSet(state.output, 'option_includes', nameFromAt(at), list);
|
223
|
+
handleChildren({...state, currentNode: include, context: 'option_include'}, block);
|
224
|
+
}
|
225
|
+
|
226
|
+
function createArgInclude(state, {type, at, block}) {
|
227
|
+
const include = {};
|
228
|
+
safeSet(state.output, 'arg_includes', nameFromAt(at), include);
|
229
|
+
handleChildren({...state, currentNode: include, context: 'arg_include'}, block);
|
230
|
+
}
|
231
|
+
|
232
|
+
/// STATEMENT HANDLERS
|
233
|
+
|
234
|
+
const handlers = {
|
235
|
+
handleDescStatement(state, node) {
|
236
|
+
checkContext(state, node, ['main', 'sub', 'arg', 'flag']);
|
237
|
+
state.currentNode.description = textFromString(firstChildOfType(node, 'string'));
|
238
|
+
},
|
239
|
+
|
240
|
+
handleTitleStatement(state, node) {
|
241
|
+
checkContext(state, node, ['arg']);
|
242
|
+
state.currentNode.title = textFromString(firstChildOfType(node, 'string'));
|
243
|
+
},
|
244
|
+
|
245
|
+
handleFlagStatement(state, node, arg=false) {
|
246
|
+
checkContext(state, node, ['sub', 'main', 'arg_include']);
|
247
|
+
const {mods, desc, names, ats, block} = pick(node, {
|
248
|
+
block: 'block',
|
249
|
+
names: 'flag_name_list',
|
250
|
+
desc: 'string',
|
251
|
+
}, {
|
252
|
+
mods: 'flag_modifier',
|
253
|
+
ats: 'at_identifier',
|
254
|
+
});
|
255
|
+
|
256
|
+
for (const nameAndAliases of names.namedChildren) {
|
257
|
+
createFlag(state, {nameAndAliases, desc, mods, ats, block, arg});
|
258
|
+
}
|
259
|
+
},
|
260
|
+
|
261
|
+
handleFlagargStatement(state, node) {
|
262
|
+
handlers.handleFlagStatement(state, node, true);
|
263
|
+
},
|
264
|
+
|
265
|
+
handleCmdStatement(state, node) {
|
266
|
+
checkContext(state, node, ['main']);
|
267
|
+
state.output.cmd = textFromString(firstChildOfType(node, 'string'));
|
268
|
+
},
|
269
|
+
|
270
|
+
handleSubStatement(state, node) {
|
271
|
+
checkContext(state, node, ['main', 'sub', 'arg_include']);
|
272
|
+
const {names, ats, block, desc} = pick(node, {
|
273
|
+
block: 'block',
|
274
|
+
names: 'sub_name_list',
|
275
|
+
desc: 'string',
|
276
|
+
}, {
|
277
|
+
ats: 'at_identifier',
|
278
|
+
});
|
279
|
+
|
280
|
+
for (const nameAndAliases of names.namedChildren) {
|
281
|
+
createSub(state, {nameAndAliases, ats, block, desc});
|
282
|
+
}
|
283
|
+
},
|
284
|
+
|
285
|
+
handleArgStatement(state, node) {
|
286
|
+
checkContext(state, node, ['main', 'sub', 'arg_include']);
|
287
|
+
const {mods, type, names, ats, block, desc} = pick(node, {
|
288
|
+
type: 'arg_type',
|
289
|
+
block: 'block',
|
290
|
+
desc: 'string',
|
291
|
+
names: 'arg_name_list',
|
292
|
+
}, {
|
293
|
+
mods: 'arg_modifier',
|
294
|
+
ats: 'at_identifier',
|
295
|
+
});
|
296
|
+
const varargs = type.text === 'varargs';
|
297
|
+
if (varargs && names && names.length > 1) {
|
298
|
+
die(`Args statement may have a maximum of one name`);
|
299
|
+
}
|
300
|
+
const effectiveNames = names ? names.namedChildren : [undefined];
|
301
|
+
for (const name of effectiveNames) {
|
302
|
+
createArg(state, {name, mods, ats, block, desc, varargs});
|
303
|
+
}
|
304
|
+
},
|
305
|
+
|
306
|
+
handleOptsConstStatement(state, node) {
|
307
|
+
checkContext(state, node, ['arg', 'flag', 'option_include']);
|
308
|
+
const {names} = pick(node, {}, {names: 'string'});
|
309
|
+
for (const name of names) {
|
310
|
+
createOpts(state, {type: 'const', value: textFromString(name)});
|
311
|
+
}
|
312
|
+
},
|
313
|
+
|
314
|
+
handleOptsShellStatement(state, node) {
|
315
|
+
checkContext(state, node, ['arg', 'flag', 'option_include']);
|
316
|
+
const {value} = pick(node, {value: 'string'});
|
317
|
+
createOpts(state, {type: 'shell', value: textFromString(value)});
|
318
|
+
},
|
319
|
+
|
320
|
+
handleOptsFileStatement(state, node) {
|
321
|
+
checkContext(state, node, ['arg', 'flag', 'option_include']);
|
322
|
+
createOpts(state, {type: 'file'});
|
323
|
+
},
|
324
|
+
|
325
|
+
handleOptsDirStatement(state, node) {
|
326
|
+
checkContext(state, node, ['arg', 'flag', 'option_include']);
|
327
|
+
createOpts(state, {type: 'dir'});
|
328
|
+
},
|
329
|
+
|
330
|
+
handleIncludeStatement(state, node) {
|
331
|
+
checkContext(state, node, ['arg', 'flag', 'option_include',
|
332
|
+
'arg_include', 'sub', 'main']);
|
333
|
+
const {at} = pick(node, {at: 'at_identifier'});
|
334
|
+
if (['arg_include', 'sub', 'main'].includes(state.context)) {
|
335
|
+
createIncludeObjectOnSub(state, {at});
|
336
|
+
} else {
|
337
|
+
createOpts(state, {type: 'include', value: nameFromAt(at)});
|
338
|
+
}
|
339
|
+
},
|
340
|
+
|
341
|
+
handleDefargsStatement(state, node) {
|
342
|
+
checkContext(state, node, ['main']);
|
343
|
+
const {at, block} = pick(node, {at: 'at_identifier', block: 'block'});
|
344
|
+
createArgInclude(state, {at, block});
|
345
|
+
},
|
346
|
+
handleDefoptsStatement(state, node) {
|
347
|
+
checkContext(state, node, ['main']);
|
348
|
+
const {at, block} = pick(node, {at: 'at_identifier', block: 'block'});
|
349
|
+
createOptionInclude(state, {at, block});
|
350
|
+
},
|
351
|
+
|
352
|
+
handleERROR(state, node) {
|
353
|
+
die(`Tree-sitter parse error. Try running npx tree-sitter parse. State:\n${JSON.stringify(state)}`);
|
354
|
+
},
|
355
|
+
}
|
356
|
+
|
357
|
+
|
358
|
+
function parseText(text) {
|
359
|
+
const parser = new Parser();
|
360
|
+
parser.setLanguage(TabryGrammar);
|
361
|
+
const tree = parser.parse(text);
|
362
|
+
return tree;
|
363
|
+
}
|
364
|
+
|
365
|
+
function transformText(text) {
|
366
|
+
const tree = parseText(text);
|
367
|
+
const output = {cmd: null, main: {}};
|
368
|
+
const state = {
|
369
|
+
output,
|
370
|
+
context: 'main', currentNode: output.main,
|
371
|
+
};
|
372
|
+
handleChildren(state, tree.rootNode);
|
373
|
+
return output;
|
374
|
+
}
|
375
|
+
|
376
|
+
///// MAIN
|
377
|
+
|
378
|
+
if (require.main === module) {
|
379
|
+
const filename = process.argv[2];
|
380
|
+
const outputFn = process.argv[3];
|
381
|
+
if (!filename) { die("usage: compile.js file.tabry [output.json] # or output.yml"); }
|
382
|
+
const text = fs.readFileSync(filename).toString();
|
383
|
+
const output = transformText(text);
|
384
|
+
|
385
|
+
if (!outputFn) {
|
386
|
+
console.log(YAML.stringify(output, null, {aliasDuplicateObjects: false, version: '1.1'}));
|
387
|
+
} else if (outputFn.match(/\.yml/)) {
|
388
|
+
fs.writeFileSync(outputFn, YAML.stringify(output, null, {aliasDuplicateObjects: false, version: '1.1'}));
|
389
|
+
} else {
|
390
|
+
fs.writeFileSync(outputFn, JSON.stringify(output));
|
391
|
+
}
|
392
|
+
}
|
393
|
+
|
394
|
+
module.exports = {transformText};
|
@@ -0,0 +1,51 @@
|
|
1
|
+
const tabryCompile = require('./tabry-compile');
|
2
|
+
const fs = require('fs');
|
3
|
+
const yaml = require('yaml');
|
4
|
+
|
5
|
+
function loadTreesitterExamplesFromTxtFile(filename) {
|
6
|
+
const examples = [];
|
7
|
+
const lines = fs.readFileSync(filename).toString().split("\n");
|
8
|
+
let nameMode = false;
|
9
|
+
// ============
|
10
|
+
// Example name
|
11
|
+
// ============
|
12
|
+
// example stuff
|
13
|
+
// ---
|
14
|
+
// example stuff2
|
15
|
+
for (const l of lines) {
|
16
|
+
if (l.match(/^===+/)) {
|
17
|
+
nameMode = !nameMode;
|
18
|
+
} else if (nameMode) {
|
19
|
+
examples.push({name: l, text: ''});
|
20
|
+
} else if (examples.length) {
|
21
|
+
const lastExample = examples[examples.length - 1];
|
22
|
+
lastExample.text += l + "\n";
|
23
|
+
}
|
24
|
+
};
|
25
|
+
|
26
|
+
// result: {name: "example_name", text: "example stuff"}
|
27
|
+
return examples.map(ex => ({
|
28
|
+
name: ex.name,
|
29
|
+
filename: ex.name.toLowerCase().replace(/[^a-zA-Z0-9]/g, '_'),
|
30
|
+
text: ex.text.split("\n---\n")[0]
|
31
|
+
}));
|
32
|
+
}
|
33
|
+
|
34
|
+
const corpusFile = __dirname + "/corpus/examples_from_language_reference.txt";
|
35
|
+
const examples = loadTreesitterExamplesFromTxtFile(corpusFile);
|
36
|
+
const fixtureDir = __dirname + "/jest_fixtures/examples_from_language_reference/";
|
37
|
+
|
38
|
+
describe('examples from language reference', () => {
|
39
|
+
for (const example of examples) {
|
40
|
+
describe(example.name, () => {
|
41
|
+
it("parses correctly", () => {
|
42
|
+
const fixture = `${fixtureDir}${example.filename}.yml`;
|
43
|
+
const expected = yaml.parse(fs.readFileSync(fixture).toString());
|
44
|
+
|
45
|
+
const actual = tabryCompile.transformText(example.text);
|
46
|
+
|
47
|
+
expect(actual).toEqual(expected);
|
48
|
+
});
|
49
|
+
});
|
50
|
+
}
|
51
|
+
});
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: tabry
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Evan Battaglia
|
@@ -165,6 +165,41 @@ files:
|
|
165
165
|
- spec/tabry/runner_spec.rb
|
166
166
|
- spec/tabry/usage_generator_spec.rb
|
167
167
|
- tabry.gemspec
|
168
|
+
- treesitter/Cargo.toml
|
169
|
+
- treesitter/README.md
|
170
|
+
- treesitter/binding.gyp
|
171
|
+
- treesitter/bindings/node/binding.cc
|
172
|
+
- treesitter/bindings/node/index.js
|
173
|
+
- treesitter/bindings/rust/build.rs
|
174
|
+
- treesitter/bindings/rust/lib.rs
|
175
|
+
- treesitter/corpus/arg.txt
|
176
|
+
- treesitter/corpus/at.txt
|
177
|
+
- treesitter/corpus/comment.txt
|
178
|
+
- treesitter/corpus/desc.txt
|
179
|
+
- treesitter/corpus/examples_from_language_reference.txt
|
180
|
+
- treesitter/corpus/flag.txt
|
181
|
+
- treesitter/corpus/flag_desc_inline.txt
|
182
|
+
- treesitter/corpus/opts.txt
|
183
|
+
- treesitter/corpus/rapture.txt
|
184
|
+
- treesitter/grammar.js
|
185
|
+
- treesitter/jest_fixtures/examples_from_language_reference/argument_titles.yml
|
186
|
+
- treesitter/jest_fixtures/examples_from_language_reference/arguments_and_possible_options__arg_.yml
|
187
|
+
- treesitter/jest_fixtures/examples_from_language_reference/flags__flag__flagarg__reqd_flagarg_.yml
|
188
|
+
- treesitter/jest_fixtures/examples_from_language_reference/getting_started.yml
|
189
|
+
- treesitter/jest_fixtures/examples_from_language_reference/includes.yml
|
190
|
+
- treesitter/jest_fixtures/examples_from_language_reference/multi_line_descriptions.yml
|
191
|
+
- treesitter/jest_fixtures/examples_from_language_reference/optional_args_and_varargs__opt_arg__varargs__opt_varargs_.yml
|
192
|
+
- treesitter/jest_fixtures/examples_from_language_reference/options.yml
|
193
|
+
- treesitter/jest_fixtures/examples_from_language_reference/subcommands__sub__1.yml
|
194
|
+
- treesitter/jest_fixtures/examples_from_language_reference/subcommands__sub__2.yml
|
195
|
+
- treesitter/package.json
|
196
|
+
- treesitter/parser_compile.sh
|
197
|
+
- treesitter/src/grammar.json
|
198
|
+
- treesitter/src/node-types.json
|
199
|
+
- treesitter/src/parser.c
|
200
|
+
- treesitter/src/tree_sitter/parser.h
|
201
|
+
- treesitter/tabry-compile.js
|
202
|
+
- treesitter/tabry-compile.test.js
|
168
203
|
homepage: https://github.com/evanbattaglia/tabry
|
169
204
|
licenses:
|
170
205
|
- MIT
|