tailog 0.4.7 → 0.4.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,285 @@
1
+ // CodeMirror, copyright (c) by Marijn Haverbeke and others
2
+ // Distributed under an MIT license: http://codemirror.net/LICENSE
3
+
4
+ (function(mod) {
5
+ if (typeof exports == "object" && typeof module == "object") // CommonJS
6
+ mod(require("../../lib/codemirror"));
7
+ else if (typeof define == "function" && define.amd) // AMD
8
+ define(["../../lib/codemirror"], mod);
9
+ else // Plain browser env
10
+ mod(CodeMirror);
11
+ })(function(CodeMirror) {
12
+ "use strict";
13
+
14
+ CodeMirror.defineMode("ruby", function(config) {
15
+ function wordObj(words) {
16
+ var o = {};
17
+ for (var i = 0, e = words.length; i < e; ++i) o[words[i]] = true;
18
+ return o;
19
+ }
20
+ var keywords = wordObj([
21
+ "alias", "and", "BEGIN", "begin", "break", "case", "class", "def", "defined?", "do", "else",
22
+ "elsif", "END", "end", "ensure", "false", "for", "if", "in", "module", "next", "not", "or",
23
+ "redo", "rescue", "retry", "return", "self", "super", "then", "true", "undef", "unless",
24
+ "until", "when", "while", "yield", "nil", "raise", "throw", "catch", "fail", "loop", "callcc",
25
+ "caller", "lambda", "proc", "public", "protected", "private", "require", "load",
26
+ "require_relative", "extend", "autoload", "__END__", "__FILE__", "__LINE__", "__dir__"
27
+ ]);
28
+ var indentWords = wordObj(["def", "class", "case", "for", "while", "until", "module", "then",
29
+ "catch", "loop", "proc", "begin"]);
30
+ var dedentWords = wordObj(["end", "until"]);
31
+ var matching = {"[": "]", "{": "}", "(": ")"};
32
+ var curPunc;
33
+
34
+ function chain(newtok, stream, state) {
35
+ state.tokenize.push(newtok);
36
+ return newtok(stream, state);
37
+ }
38
+
39
+ function tokenBase(stream, state) {
40
+ if (stream.sol() && stream.match("=begin") && stream.eol()) {
41
+ state.tokenize.push(readBlockComment);
42
+ return "comment";
43
+ }
44
+ if (stream.eatSpace()) return null;
45
+ var ch = stream.next(), m;
46
+ if (ch == "`" || ch == "'" || ch == '"') {
47
+ return chain(readQuoted(ch, "string", ch == '"' || ch == "`"), stream, state);
48
+ } else if (ch == "/") {
49
+ var currentIndex = stream.current().length;
50
+ if (stream.skipTo("/")) {
51
+ var search_till = stream.current().length;
52
+ stream.backUp(stream.current().length - currentIndex);
53
+ var balance = 0; // balance brackets
54
+ while (stream.current().length < search_till) {
55
+ var chchr = stream.next();
56
+ if (chchr == "(") balance += 1;
57
+ else if (chchr == ")") balance -= 1;
58
+ if (balance < 0) break;
59
+ }
60
+ stream.backUp(stream.current().length - currentIndex);
61
+ if (balance == 0)
62
+ return chain(readQuoted(ch, "string-2", true), stream, state);
63
+ }
64
+ return "operator";
65
+ } else if (ch == "%") {
66
+ var style = "string", embed = true;
67
+ if (stream.eat("s")) style = "atom";
68
+ else if (stream.eat(/[WQ]/)) style = "string";
69
+ else if (stream.eat(/[r]/)) style = "string-2";
70
+ else if (stream.eat(/[wxq]/)) { style = "string"; embed = false; }
71
+ var delim = stream.eat(/[^\w\s=]/);
72
+ if (!delim) return "operator";
73
+ if (matching.propertyIsEnumerable(delim)) delim = matching[delim];
74
+ return chain(readQuoted(delim, style, embed, true), stream, state);
75
+ } else if (ch == "#") {
76
+ stream.skipToEnd();
77
+ return "comment";
78
+ } else if (ch == "<" && (m = stream.match(/^<-?[\`\"\']?([a-zA-Z_?]\w*)[\`\"\']?(?:;|$)/))) {
79
+ return chain(readHereDoc(m[1]), stream, state);
80
+ } else if (ch == "0") {
81
+ if (stream.eat("x")) stream.eatWhile(/[\da-fA-F]/);
82
+ else if (stream.eat("b")) stream.eatWhile(/[01]/);
83
+ else stream.eatWhile(/[0-7]/);
84
+ return "number";
85
+ } else if (/\d/.test(ch)) {
86
+ stream.match(/^[\d_]*(?:\.[\d_]+)?(?:[eE][+\-]?[\d_]+)?/);
87
+ return "number";
88
+ } else if (ch == "?") {
89
+ while (stream.match(/^\\[CM]-/)) {}
90
+ if (stream.eat("\\")) stream.eatWhile(/\w/);
91
+ else stream.next();
92
+ return "string";
93
+ } else if (ch == ":") {
94
+ if (stream.eat("'")) return chain(readQuoted("'", "atom", false), stream, state);
95
+ if (stream.eat('"')) return chain(readQuoted('"', "atom", true), stream, state);
96
+
97
+ // :> :>> :< :<< are valid symbols
98
+ if (stream.eat(/[\<\>]/)) {
99
+ stream.eat(/[\<\>]/);
100
+ return "atom";
101
+ }
102
+
103
+ // :+ :- :/ :* :| :& :! are valid symbols
104
+ if (stream.eat(/[\+\-\*\/\&\|\:\!]/)) {
105
+ return "atom";
106
+ }
107
+
108
+ // Symbols can't start by a digit
109
+ if (stream.eat(/[a-zA-Z$@_\xa1-\uffff]/)) {
110
+ stream.eatWhile(/[\w$\xa1-\uffff]/);
111
+ // Only one ? ! = is allowed and only as the last character
112
+ stream.eat(/[\?\!\=]/);
113
+ return "atom";
114
+ }
115
+ return "operator";
116
+ } else if (ch == "@" && stream.match(/^@?[a-zA-Z_\xa1-\uffff]/)) {
117
+ stream.eat("@");
118
+ stream.eatWhile(/[\w\xa1-\uffff]/);
119
+ return "variable-2";
120
+ } else if (ch == "$") {
121
+ if (stream.eat(/[a-zA-Z_]/)) {
122
+ stream.eatWhile(/[\w]/);
123
+ } else if (stream.eat(/\d/)) {
124
+ stream.eat(/\d/);
125
+ } else {
126
+ stream.next(); // Must be a special global like $: or $!
127
+ }
128
+ return "variable-3";
129
+ } else if (/[a-zA-Z_\xa1-\uffff]/.test(ch)) {
130
+ stream.eatWhile(/[\w\xa1-\uffff]/);
131
+ stream.eat(/[\?\!]/);
132
+ if (stream.eat(":")) return "atom";
133
+ return "ident";
134
+ } else if (ch == "|" && (state.varList || state.lastTok == "{" || state.lastTok == "do")) {
135
+ curPunc = "|";
136
+ return null;
137
+ } else if (/[\(\)\[\]{}\\;]/.test(ch)) {
138
+ curPunc = ch;
139
+ return null;
140
+ } else if (ch == "-" && stream.eat(">")) {
141
+ return "arrow";
142
+ } else if (/[=+\-\/*:\.^%<>~|]/.test(ch)) {
143
+ var more = stream.eatWhile(/[=+\-\/*:\.^%<>~|]/);
144
+ if (ch == "." && !more) curPunc = ".";
145
+ return "operator";
146
+ } else {
147
+ return null;
148
+ }
149
+ }
150
+
151
+ function tokenBaseUntilBrace(depth) {
152
+ if (!depth) depth = 1;
153
+ return function(stream, state) {
154
+ if (stream.peek() == "}") {
155
+ if (depth == 1) {
156
+ state.tokenize.pop();
157
+ return state.tokenize[state.tokenize.length-1](stream, state);
158
+ } else {
159
+ state.tokenize[state.tokenize.length - 1] = tokenBaseUntilBrace(depth - 1);
160
+ }
161
+ } else if (stream.peek() == "{") {
162
+ state.tokenize[state.tokenize.length - 1] = tokenBaseUntilBrace(depth + 1);
163
+ }
164
+ return tokenBase(stream, state);
165
+ };
166
+ }
167
+ function tokenBaseOnce() {
168
+ var alreadyCalled = false;
169
+ return function(stream, state) {
170
+ if (alreadyCalled) {
171
+ state.tokenize.pop();
172
+ return state.tokenize[state.tokenize.length-1](stream, state);
173
+ }
174
+ alreadyCalled = true;
175
+ return tokenBase(stream, state);
176
+ };
177
+ }
178
+ function readQuoted(quote, style, embed, unescaped) {
179
+ return function(stream, state) {
180
+ var escaped = false, ch;
181
+
182
+ if (state.context.type === 'read-quoted-paused') {
183
+ state.context = state.context.prev;
184
+ stream.eat("}");
185
+ }
186
+
187
+ while ((ch = stream.next()) != null) {
188
+ if (ch == quote && (unescaped || !escaped)) {
189
+ state.tokenize.pop();
190
+ break;
191
+ }
192
+ if (embed && ch == "#" && !escaped) {
193
+ if (stream.eat("{")) {
194
+ if (quote == "}") {
195
+ state.context = {prev: state.context, type: 'read-quoted-paused'};
196
+ }
197
+ state.tokenize.push(tokenBaseUntilBrace());
198
+ break;
199
+ } else if (/[@\$]/.test(stream.peek())) {
200
+ state.tokenize.push(tokenBaseOnce());
201
+ break;
202
+ }
203
+ }
204
+ escaped = !escaped && ch == "\\";
205
+ }
206
+ return style;
207
+ };
208
+ }
209
+ function readHereDoc(phrase) {
210
+ return function(stream, state) {
211
+ if (stream.match(phrase)) state.tokenize.pop();
212
+ else stream.skipToEnd();
213
+ return "string";
214
+ };
215
+ }
216
+ function readBlockComment(stream, state) {
217
+ if (stream.sol() && stream.match("=end") && stream.eol())
218
+ state.tokenize.pop();
219
+ stream.skipToEnd();
220
+ return "comment";
221
+ }
222
+
223
+ return {
224
+ startState: function() {
225
+ return {tokenize: [tokenBase],
226
+ indented: 0,
227
+ context: {type: "top", indented: -config.indentUnit},
228
+ continuedLine: false,
229
+ lastTok: null,
230
+ varList: false};
231
+ },
232
+
233
+ token: function(stream, state) {
234
+ curPunc = null;
235
+ if (stream.sol()) state.indented = stream.indentation();
236
+ var style = state.tokenize[state.tokenize.length-1](stream, state), kwtype;
237
+ var thisTok = curPunc;
238
+ if (style == "ident") {
239
+ var word = stream.current();
240
+ style = state.lastTok == "." ? "property"
241
+ : keywords.propertyIsEnumerable(stream.current()) ? "keyword"
242
+ : /^[A-Z]/.test(word) ? "tag"
243
+ : (state.lastTok == "def" || state.lastTok == "class" || state.varList) ? "def"
244
+ : "variable";
245
+ if (style == "keyword") {
246
+ thisTok = word;
247
+ if (indentWords.propertyIsEnumerable(word)) kwtype = "indent";
248
+ else if (dedentWords.propertyIsEnumerable(word)) kwtype = "dedent";
249
+ else if ((word == "if" || word == "unless") && stream.column() == stream.indentation())
250
+ kwtype = "indent";
251
+ else if (word == "do" && state.context.indented < state.indented)
252
+ kwtype = "indent";
253
+ }
254
+ }
255
+ if (curPunc || (style && style != "comment")) state.lastTok = thisTok;
256
+ if (curPunc == "|") state.varList = !state.varList;
257
+
258
+ if (kwtype == "indent" || /[\(\[\{]/.test(curPunc))
259
+ state.context = {prev: state.context, type: curPunc || style, indented: state.indented};
260
+ else if ((kwtype == "dedent" || /[\)\]\}]/.test(curPunc)) && state.context.prev)
261
+ state.context = state.context.prev;
262
+
263
+ if (stream.eol())
264
+ state.continuedLine = (curPunc == "\\" || style == "operator");
265
+ return style;
266
+ },
267
+
268
+ indent: function(state, textAfter) {
269
+ if (state.tokenize[state.tokenize.length-1] != tokenBase) return 0;
270
+ var firstChar = textAfter && textAfter.charAt(0);
271
+ var ct = state.context;
272
+ var closing = ct.type == matching[firstChar] ||
273
+ ct.type == "keyword" && /^(?:end|until|else|elsif|when|rescue)\b/.test(textAfter);
274
+ return ct.indented + (closing ? 0 : config.indentUnit) +
275
+ (state.continuedLine ? config.indentUnit : 0);
276
+ },
277
+
278
+ electricInput: /^\s*(?:end|rescue|\})$/,
279
+ lineComment: "#"
280
+ };
281
+ });
282
+
283
+ CodeMirror.defineMIME("text/x-ruby", "ruby");
284
+
285
+ });
@@ -0,0 +1,139 @@
1
+ // CodeMirror, copyright (c) by Marijn Haverbeke and others
2
+ // Distributed under an MIT license: http://codemirror.net/LICENSE
3
+
4
+ (function(mod) {
5
+ if (typeof exports == "object" && typeof module == "object") // CommonJS
6
+ mod(require("../../lib/codemirror"));
7
+ else if (typeof define == "function" && define.amd) // AMD
8
+ define(["../../lib/codemirror"], mod);
9
+ else // Plain browser env
10
+ mod(CodeMirror);
11
+ })(function(CodeMirror) {
12
+ "use strict";
13
+
14
+ CodeMirror.defineMode('shell', function() {
15
+
16
+ var words = {};
17
+ function define(style, string) {
18
+ var split = string.split(' ');
19
+ for(var i = 0; i < split.length; i++) {
20
+ words[split[i]] = style;
21
+ }
22
+ };
23
+
24
+ // Atoms
25
+ define('atom', 'true false');
26
+
27
+ // Keywords
28
+ define('keyword', 'if then do else elif while until for in esac fi fin ' +
29
+ 'fil done exit set unset export function');
30
+
31
+ // Commands
32
+ define('builtin', 'ab awk bash beep cat cc cd chown chmod chroot clear cp ' +
33
+ 'curl cut diff echo find gawk gcc get git grep kill killall ln ls make ' +
34
+ 'mkdir openssl mv nc node npm ping ps restart rm rmdir sed service sh ' +
35
+ 'shopt shred source sort sleep ssh start stop su sudo tee telnet top ' +
36
+ 'touch vi vim wall wc wget who write yes zsh');
37
+
38
+ function tokenBase(stream, state) {
39
+ if (stream.eatSpace()) return null;
40
+
41
+ var sol = stream.sol();
42
+ var ch = stream.next();
43
+
44
+ if (ch === '\\') {
45
+ stream.next();
46
+ return null;
47
+ }
48
+ if (ch === '\'' || ch === '"' || ch === '`') {
49
+ state.tokens.unshift(tokenString(ch));
50
+ return tokenize(stream, state);
51
+ }
52
+ if (ch === '#') {
53
+ if (sol && stream.eat('!')) {
54
+ stream.skipToEnd();
55
+ return 'meta'; // 'comment'?
56
+ }
57
+ stream.skipToEnd();
58
+ return 'comment';
59
+ }
60
+ if (ch === '$') {
61
+ state.tokens.unshift(tokenDollar);
62
+ return tokenize(stream, state);
63
+ }
64
+ if (ch === '+' || ch === '=') {
65
+ return 'operator';
66
+ }
67
+ if (ch === '-') {
68
+ stream.eat('-');
69
+ stream.eatWhile(/\w/);
70
+ return 'attribute';
71
+ }
72
+ if (/\d/.test(ch)) {
73
+ stream.eatWhile(/\d/);
74
+ if(stream.eol() || !/\w/.test(stream.peek())) {
75
+ return 'number';
76
+ }
77
+ }
78
+ stream.eatWhile(/[\w-]/);
79
+ var cur = stream.current();
80
+ if (stream.peek() === '=' && /\w+/.test(cur)) return 'def';
81
+ return words.hasOwnProperty(cur) ? words[cur] : null;
82
+ }
83
+
84
+ function tokenString(quote) {
85
+ return function(stream, state) {
86
+ var next, end = false, escaped = false;
87
+ while ((next = stream.next()) != null) {
88
+ if (next === quote && !escaped) {
89
+ end = true;
90
+ break;
91
+ }
92
+ if (next === '$' && !escaped && quote !== '\'') {
93
+ escaped = true;
94
+ stream.backUp(1);
95
+ state.tokens.unshift(tokenDollar);
96
+ break;
97
+ }
98
+ escaped = !escaped && next === '\\';
99
+ }
100
+ if (end || !escaped) {
101
+ state.tokens.shift();
102
+ }
103
+ return (quote === '`' || quote === ')' ? 'quote' : 'string');
104
+ };
105
+ };
106
+
107
+ var tokenDollar = function(stream, state) {
108
+ if (state.tokens.length > 1) stream.eat('$');
109
+ var ch = stream.next(), hungry = /\w/;
110
+ if (ch === '{') hungry = /[^}]/;
111
+ if (ch === '(') {
112
+ state.tokens[0] = tokenString(')');
113
+ return tokenize(stream, state);
114
+ }
115
+ if (!/\d/.test(ch)) {
116
+ stream.eatWhile(hungry);
117
+ stream.eat('}');
118
+ }
119
+ state.tokens.shift();
120
+ return 'def';
121
+ };
122
+
123
+ function tokenize(stream, state) {
124
+ return (state.tokens[0] || tokenBase) (stream, state);
125
+ };
126
+
127
+ return {
128
+ startState: function() {return {tokens:[]};},
129
+ token: function(stream, state) {
130
+ return tokenize(stream, state);
131
+ },
132
+ lineComment: '#',
133
+ fold: "brace"
134
+ };
135
+ });
136
+
137
+ CodeMirror.defineMIME('text/x-sh', 'shell');
138
+
139
+ });