ios_parser 0.5.1 → 0.7.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 2de99dbf9bae8e78c136f2ae91976e4989dcf1b38ee329184c379a8e23346183
4
- data.tar.gz: a5a644079df051eb363682d43773b056fe9b0101493494bd6ae4571cf1196920
3
+ metadata.gz: 7ca27e440ca7f9a38c976bff002dc996725d77e1ec3fcd12749e119b910ed08d
4
+ data.tar.gz: 5c59a60ccdf9c10e5d6fd8841dfb2eac7fae046b3447aa401cc61982210d9b08
5
5
  SHA512:
6
- metadata.gz: 58f820e3c067173aa657c137a0da9c2d3da94eb9b6ebe489281ea29a5185ee36c3a3f6bbba7bd1c4e5317845c629dfd6ce05b4a29c704e5bd457de589835be6d
7
- data.tar.gz: 7df6f8eed3246238da8db8a8d5aa7d19ebd7a45c32c01951615de5314c729b713640192496e04f3fcb8fddcca0c2701d72c51970da4eb9d9a2c55333559c58ac
6
+ metadata.gz: 68394ff82c8d23b91accdd947529e352d09459d1fd6a5747ab4bb3ec7a53863b01cf8079fffb0e3f9b941c443451664e2acd2c4201ba7c06cea5b6bfe4ab584a
7
+ data.tar.gz: 33106cf36832dc6d082f278bf5d2a324d03274b37eb496828b29180587e812577b94387bcb9ec6428ed961710da9662c9b24b6b2923fe4f35003e196dcbd95f2
data/.travis.yml CHANGED
@@ -3,14 +3,15 @@ rvm:
3
3
  - 2.0.0
4
4
  - 2.1.10
5
5
  - 2.2.10
6
- - 2.3.7
7
- - 2.4.4
8
- - 2.5.1
6
+ - 2.3.8
7
+ - 2.4.5
8
+ - 2.5.3
9
9
  - jruby-9.1.16.0
10
+ - jruby-9.2.0.0
10
11
  matrix:
11
12
  include:
12
13
  - rvm: jruby
13
14
  env: JRUBY_OPTS='-Xcompat.version=2.0'
14
15
  bundler_args: --without guard
15
16
  before_install:
16
- - if [ "jruby" != "$TRAVIS_RUBY_VERSION" ]; then gem install bundler --without guard; fi
17
+ - if [ "jruby" != "$TRAVIS_RUBY_VERSION" ]; then gem i rubygems-update -v '<3' && update_rubygems; gem install bundler -v 1.17.3 --without guard; fi
@@ -2,6 +2,7 @@
2
2
 
3
3
  static VALUE rb_mIOSParser;
4
4
  static VALUE rb_cCLexer;
5
+ static VALUE rb_cToken;
5
6
  VALUE rb_eLexError;
6
7
 
7
8
  typedef enum lex_token_state {
@@ -21,6 +22,9 @@ struct LexInfo {
21
22
  size_t pos;
22
23
  size_t token_start;
23
24
  size_t token_length;
25
+ size_t line;
26
+ size_t start_of_line;
27
+ size_t token_line;
24
28
  lex_token_state token_state;
25
29
  VALUE tokens;
26
30
  int indent;
@@ -46,13 +50,16 @@ typedef struct LexInfo LexInfo;
46
50
 
47
51
  #define CURRENT_CHAR(LEX) LEX->text[LEX->pos]
48
52
  #define TOKEN_EMPTY(LEX) LEX->token_length <= 0
53
+ #define TOKEN_VALUE(TOK) RSTRUCT_GET(TOK, 0)
49
54
 
50
- #define MAKE_TOKEN(LEX, TOK) rb_ary_new3(2, rb_int_new(LEX->token_start), TOK)
51
- #define ADD_TOKEN(LEX, TOK) rb_ary_push(LEX->tokens, MAKE_TOKEN(LEX, TOK))
55
+ #define ADD_TOKEN(LEX, TOK) rb_ary_push(LEX->tokens, make_token(LEX, TOK))
52
56
 
53
57
  #define CMD_LEN(CMD) (sizeof(CMD) - 1)
58
+
59
+ static VALUE make_token(LexInfo *lex, VALUE tok);
60
+
54
61
  int is_certificate(LexInfo *lex) {
55
- VALUE indent_ary, indent, command_ary, command;
62
+ VALUE indent_token, indent, command_token, command;
56
63
  int token_count, indent_pos, command_pos;
57
64
 
58
65
  token_count = RARRAY_LEN(lex->tokens);
@@ -62,16 +69,15 @@ int is_certificate(LexInfo *lex) {
62
69
  command_pos = token_count - 5;
63
70
  if (command_pos < 0) { return 0; }
64
71
 
65
- indent_ary = rb_ary_entry(lex->tokens, indent_pos);
66
- indent = rb_ary_entry(indent_ary, 1);
72
+ indent_token = rb_ary_entry(lex->tokens, indent_pos);
73
+ indent = TOKEN_VALUE(indent_token);
67
74
  if (TYPE(indent) != T_SYMBOL) { return 0; }
68
75
  if (rb_intern("INDENT") != SYM2ID(indent)) { return 0; }
69
76
 
70
- command_ary = rb_ary_entry(lex->tokens, command_pos);
71
- if (TYPE(command_ary) != T_ARRAY) { return 0; }
72
- if (RARRAY_LEN(command_ary) < 2) { return 0; }
77
+ command_token = rb_ary_entry(lex->tokens, command_pos);
78
+ if (TYPE(command_token) != T_STRUCT) { return 0; }
73
79
 
74
- command = rb_ary_entry(command_ary, 1);
80
+ command = TOKEN_VALUE(command_token);
75
81
  if (TYPE(command) != T_STRING) { return 0; }
76
82
 
77
83
  StringValue(command);
@@ -81,6 +87,33 @@ int is_certificate(LexInfo *lex) {
81
87
  return 1;
82
88
  }
83
89
 
90
+ int is_authentication_banner_begin(LexInfo *lex) {
91
+ VALUE authentication_ary, authentication, banner_ary, banner;
92
+ int token_count = RARRAY_LEN(lex->tokens);
93
+ int authentication_pos = token_count -2;
94
+ int banner_pos = token_count - 1;
95
+
96
+ if (banner_pos < 0) { return 0; }
97
+
98
+ banner_ary = rb_ary_entry(lex->tokens, banner_pos);
99
+ banner = TOKEN_VALUE(banner_ary);
100
+ if (TYPE(banner) != T_STRING) { return 0; }
101
+
102
+ StringValue(banner);
103
+ if (RSTRING_LEN(banner) != CMD_LEN("banner")) { return 0; }
104
+ if (0 != strncmp(RSTRING_PTR(banner), "banner", 6)) { return 0; }
105
+
106
+ authentication_ary = rb_ary_entry(lex->tokens, authentication_pos);
107
+ authentication = TOKEN_VALUE(authentication_ary);
108
+ if (TYPE(authentication) != T_STRING) { return 0; }
109
+
110
+ StringValue(authentication);
111
+ if (RSTRING_LEN(authentication) != CMD_LEN("authentication")) { return 0; }
112
+ if (0 != strncmp(RSTRING_PTR(authentication), "authentication", 14)) { return 0; }
113
+
114
+ return 1;
115
+ }
116
+
84
117
  int is_banner_begin(LexInfo *lex) {
85
118
  VALUE banner_ary, banner;
86
119
  int token_count = RARRAY_LEN(lex->tokens);
@@ -88,8 +121,10 @@ int is_banner_begin(LexInfo *lex) {
88
121
 
89
122
  if (banner_pos < 0) { return 0; }
90
123
 
124
+ if (is_authentication_banner_begin(lex)) { return 1; }
125
+
91
126
  banner_ary = rb_ary_entry(lex->tokens, banner_pos);
92
- banner = rb_ary_entry(banner_ary, 1);
127
+ banner = TOKEN_VALUE(banner_ary);
93
128
  if (TYPE(banner) != T_STRING) { return 0; }
94
129
 
95
130
  StringValue(banner);
@@ -119,7 +154,7 @@ static void delimit(LexInfo *lex) {
119
154
  case (LEX_STATE_INTEGER):
120
155
  strncpy(string, &lex->text[lex->token_start], lex->token_length);
121
156
  string[lex->token_length] = '\0';
122
- token = rb_int_new(atoi(string));
157
+ token = rb_int_new(atoll(string));
123
158
  break;
124
159
 
125
160
  case (LEX_STATE_DECIMAL):
@@ -148,6 +183,14 @@ static void deallocate(void * lex) {
148
183
  xfree(lex);
149
184
  }
150
185
 
186
+ static VALUE make_token(LexInfo *lex, VALUE tok) {
187
+ return rb_struct_new(rb_cToken,
188
+ tok,
189
+ rb_int_new(lex->token_start),
190
+ rb_int_new(lex->line),
191
+ rb_int_new(lex->token_start - lex->start_of_line + 1));
192
+ }
193
+
151
194
  static void mark(void *ptr) {
152
195
  LexInfo *lex = (LexInfo *)ptr;
153
196
  rb_gc_mark(lex->tokens);
@@ -164,6 +207,9 @@ static VALUE initialize(VALUE self, VALUE input_text) {
164
207
 
165
208
  lex->text = NULL;
166
209
  lex->pos = 0;
210
+ lex->line = 1;
211
+ lex->start_of_line = 0;
212
+ lex->token_line = 0;
167
213
  lex->token_start = 0;
168
214
  lex->token_length = 0;
169
215
  lex->token_state = LEX_STATE_ROOT;
@@ -180,6 +226,22 @@ static void process_root(LexInfo * lex);
180
226
  static void process_start_of_line(LexInfo * lex);
181
227
  static void start_banner(LexInfo * lex);
182
228
 
229
+ static void find_start_of_line(LexInfo *lex, size_t from) {
230
+ size_t pos = from;
231
+
232
+ for (;;) {
233
+ if (IS_NEWLINE(lex->text[pos])) {
234
+ lex->start_of_line = pos + 1;
235
+ return;
236
+ } else if (pos <= 0) {
237
+ lex->start_of_line = 0;
238
+ return;
239
+ } else {
240
+ pos--;
241
+ }
242
+ }
243
+ }
244
+
183
245
  static void process_newline(LexInfo *lex) {
184
246
  delimit(lex);
185
247
 
@@ -189,6 +251,8 @@ static void process_newline(LexInfo *lex) {
189
251
  lex->pos = lex->pos + 1;
190
252
  lex->token_start = lex->pos;
191
253
  lex->token_length = 0;
254
+ lex->token_line = 0;
255
+ lex->line = lex->line + 1;
192
256
  return;
193
257
  }
194
258
 
@@ -196,6 +260,7 @@ static void process_newline(LexInfo *lex) {
196
260
  ADD_TOKEN(lex, ID2SYM(rb_intern("EOL")));
197
261
  lex->token_state = LEX_STATE_INDENT;
198
262
  lex->indent = 0;
263
+ lex->line = lex->line + 1;
199
264
  }
200
265
 
201
266
  static void process_space(LexInfo *lex) {
@@ -204,11 +269,23 @@ static void process_space(LexInfo *lex) {
204
269
 
205
270
  static void process_comment(LexInfo *lex) {
206
271
  char c = CURRENT_CHAR(lex);
272
+ int token_count = RARRAY_LEN(lex->tokens);
273
+ VALUE last_token, last_value;
274
+
275
+ if (0 < token_count) {
276
+ last_token = rb_ary_entry(lex->tokens, token_count - 1);
277
+ last_value = TOKEN_VALUE(last_token);
278
+
279
+ if (TYPE(last_value) != T_SYMBOL) {
280
+ ADD_TOKEN(lex, ID2SYM(rb_intern("EOL")));
281
+ }
282
+ }
207
283
 
208
284
  if (IS_NEWLINE(c)) {
209
285
  delimit(lex);
210
286
  lex->token_state = LEX_STATE_INDENT;
211
287
  lex->indent = 0;
288
+ lex->line = lex->line + 1;
212
289
  }
213
290
  }
214
291
 
@@ -269,7 +346,7 @@ static void process_integer(LexInfo *lex) {
269
346
  }
270
347
 
271
348
  static void process_certificate(LexInfo *lex) {
272
- char quit[5];
349
+ char quit[6] = "quit\n";
273
350
 
274
351
  strncpy(quit, &CURRENT_CHAR(lex) - 5, 5);
275
352
 
@@ -295,17 +372,31 @@ static void process_certificate(LexInfo *lex) {
295
372
  lex->token_length = 0;
296
373
 
297
374
  lex->token_start = lex->pos;
375
+ lex->line = lex->line + lex->token_line - 1;
376
+ find_start_of_line(lex, lex->pos);
298
377
  ADD_TOKEN(lex, ID2SYM(rb_intern("CERTIFICATE_END")));
299
378
 
300
- process_newline(lex);
379
+ find_start_of_line(lex, lex->pos - 2);
380
+ lex->start_of_line++;
381
+ lex->token_start = lex->pos;
382
+ ADD_TOKEN(lex, ID2SYM(rb_intern("EOL")));
383
+
384
+ lex->token_state = LEX_STATE_INDENT;
385
+ lex->indent = 0;
386
+ lex->line = lex->line + 1;
387
+
301
388
  process_start_of_line(lex);
302
389
  } else {
390
+ if (IS_NEWLINE(CURRENT_CHAR(lex))) {
391
+ lex->token_line++;
392
+ }
303
393
  lex->token_length++;
304
394
  }
305
395
  }
306
396
 
307
397
  static void start_certificate(LexInfo *lex) {
308
398
  lex->indent_pos--;
399
+ lex->token_line = 0;
309
400
  rb_ary_pop(lex->tokens);
310
401
  rb_ary_pop(lex->tokens);
311
402
  ADD_TOKEN(lex, ID2SYM(rb_intern("CERTIFICATE_BEGIN")));
@@ -331,15 +422,22 @@ static void process_banner(LexInfo *lex) {
331
422
  lex->token_length++;
332
423
  delimit(lex);
333
424
  lex->token_start = lex->pos;
425
+ lex->line = lex->line + lex->token_line;
426
+ find_start_of_line(lex, lex->pos);
334
427
  ADD_TOKEN(lex, ID2SYM(rb_intern("BANNER_END")));
335
428
  if (lex->text[lex->pos + 1] == 'C') { lex->pos++; }
336
429
  } else if (!lex->banner_delimiter && is_banner_end_string(lex)) {
337
430
  lex->token_length -= 1;
338
431
  delimit(lex);
339
432
  lex->token_start = lex->pos;
433
+ lex->line = lex->line + lex->token_line;
434
+ find_start_of_line(lex, lex->pos);
340
435
  ADD_TOKEN(lex, ID2SYM(rb_intern("BANNER_END")));
341
436
  } else {
342
- lex->token_length++;
437
+ if (IS_NEWLINE(lex->text[lex->pos + lex->token_length])) {
438
+ lex->token_line++;
439
+ }
440
+ lex->token_length++;
343
441
  }
344
442
  }
345
443
 
@@ -347,12 +445,17 @@ static void start_banner(LexInfo *lex) {
347
445
  char c = CURRENT_CHAR(lex);
348
446
  lex->banner_delimiter = (c == '\n') ? 0 : c;
349
447
  ADD_TOKEN(lex, ID2SYM(rb_intern("BANNER_BEGIN")));
448
+ if ('\n' == lex->text[lex->pos + 1]) lex->line++;
350
449
  if ('\n' == lex->text[lex->pos + 2]) lex->pos++;
351
450
  }
352
451
 
353
452
  static void process_start_of_line(LexInfo *lex) {
354
453
  char c = CURRENT_CHAR(lex);
355
454
 
455
+ if (lex->indent == 0) {
456
+ lex->start_of_line = lex->pos;
457
+ }
458
+
356
459
  if (IS_SPACE(c)) {
357
460
  lex->indent++;
358
461
  return;
@@ -487,7 +590,8 @@ static VALUE call(VALUE self, VALUE input_text) {
487
590
  }
488
591
 
489
592
  delimit(lex);
490
- lex->token_start = lex->pos;
593
+ lex->token_start = lex->pos - 1;
594
+ lex->line = lex->line - 1;
491
595
 
492
596
  for (; lex->indent_pos > 0; lex->indent_pos--) {
493
597
  ADD_TOKEN(lex, ID2SYM(rb_intern("DEDENT")));
@@ -501,6 +605,7 @@ void Init_c_lexer() {
501
605
  rb_cCLexer = rb_define_class_under(rb_mIOSParser, "CLexer", rb_cObject);
502
606
  rb_eLexError = rb_define_class_under(rb_mIOSParser, "LexError",
503
607
  rb_eStandardError);
608
+ rb_cToken = rb_path2class("IOSParser::Token");
504
609
  rb_define_alloc_func(rb_cCLexer, allocate);
505
610
  rb_define_method(rb_cCLexer, "initialize", initialize, 0);
506
611
  rb_define_method(rb_cCLexer, "call", call, 1);
@@ -6,19 +6,22 @@ module IOSParser
6
6
  class Command
7
7
  include Enumerable
8
8
  include Queryable
9
- attr_accessor :args, :commands, :parent, :pos, :document
10
-
11
- def initialize(args: [], commands: [],
12
- parent: nil, pos: nil, document: nil)
13
- @args = args
9
+ attr_accessor :commands, :parent, :document, :indent, :tokens
10
+ def initialize(tokens: [], commands: [],
11
+ parent: nil, document: nil, indent: nil)
12
+ @tokens = tokens
14
13
  @commands = commands
15
14
  @parent = parent
16
- @pos = pos
17
15
  @document = document
16
+ @indent = indent || 0
17
+ end
18
+
19
+ def args
20
+ tokens.map(&:value)
18
21
  end
19
22
 
20
23
  def name
21
- args[0]
24
+ tokens.first.value
22
25
  end
23
26
 
24
27
  def ==(other)
@@ -37,6 +40,10 @@ module IOSParser
37
40
  parent ? parent.path + [parent.line] : []
38
41
  end
39
42
 
43
+ def pos
44
+ tokens.first && tokens.first.pos
45
+ end
46
+
40
47
  def indentation(base: 0)
41
48
  ' ' * (path.length - base)
42
49
  end
@@ -48,9 +55,10 @@ module IOSParser
48
55
 
49
56
  def inspect
50
57
  "<IOSParser::IOS::Command:0x#{object_id.to_s(16)} "\
51
- "@args=#{args.inspect}, "\
58
+ "@tokens=#{tokens.inspect}, "\
52
59
  "@commands=#{commands.inspect}, "\
53
60
  "@pos=#{pos.inspect}, "\
61
+ "@indent=#{indent}, "\
54
62
  "@document=<IOSParser::IOS::Document:0x#{document.object_id.to_s(16)}>>"
55
63
  end
56
64
 
@@ -63,7 +71,8 @@ module IOSParser
63
71
  {
64
72
  args: args,
65
73
  commands: commands.map(&:to_hash),
66
- pos: pos
74
+ pos: pos,
75
+ indent: indent
67
76
  }
68
77
  end
69
78
 
@@ -11,6 +11,7 @@ module IOSParser
11
11
  @document = Document.new(nil)
12
12
  @parent = parent
13
13
  @lexer = lexer
14
+ @indent = 0
14
15
  end
15
16
 
16
17
  def tokens
@@ -29,40 +30,50 @@ module IOSParser
29
30
 
30
31
  def section(parent = nil)
31
32
  [].tap do |commands|
32
- until tokens.empty? || tokens.first.last == :DEDENT
33
+ until tokens.empty? || tokens.first.value == :DEDENT
33
34
  commands.push(command(parent, @document))
34
35
  end
35
- tokens.shift # discard :DEDENT
36
+ token = tokens.shift # discard :DEDENT
37
+ @indent -= 1 if token && token.value == :DEDENT
36
38
  end
37
39
  end
38
40
 
39
41
  def command(parent = nil, document = nil)
40
- pos = tokens.first.first
41
- opts = { args: arguments, parent: parent, document: document, pos: pos }
42
+ opts = {
43
+ tokens: command_tokens,
44
+ parent: parent,
45
+ document: document,
46
+ indent: @indent
47
+ }
42
48
 
43
49
  Command.new(opts).tap do |cmd|
44
50
  cmd.commands = subsections(cmd)
45
51
  end
46
52
  end
47
53
 
54
+ def command_tokens
55
+ toks = []
56
+ until tokens.empty? || tokens.first.value == :EOL
57
+ tok = tokens.shift
58
+ toks << tok unless argument_to_discard?(tok.value)
59
+ end
60
+ tokens.shift # discard :EOL
61
+ toks
62
+ end
63
+
64
+ def argument_to_discard?(arg)
65
+ arguments_to_discard.include?(arg)
66
+ end
67
+
48
68
  def arguments_to_discard
49
69
  [:INDENT, :DEDENT,
50
70
  :CERTIFICATE_BEGIN, :CERTIFICATE_END,
51
71
  :BANNER_BEGIN, :BANNER_END]
52
72
  end
53
73
 
54
- def arguments
55
- [].tap do |args|
56
- until tokens.empty? || tokens.first.last == :EOL
57
- _, arg = tokens.shift
58
- args << arg unless arguments_to_discard.include?(arg)
59
- end
60
- tokens.shift # discard :EOL
61
- end
62
- end
63
-
64
74
  def subsections(parent = nil)
65
- if !tokens.empty? && tokens.first.last == :INDENT
75
+ if !tokens.empty? && tokens.first.value == :INDENT
76
+ @indent += 1
66
77
  tokens.shift # discard :INDENT
67
78
  section(parent)
68
79
  else
@@ -3,6 +3,7 @@ module IOSParser
3
3
  LexError = IOSParser::LexError
4
4
 
5
5
  attr_accessor :tokens, :token, :indents, :indent, :state, :char,
6
+ :line, :start_of_line, :token_line,
6
7
  :string_terminator
7
8
 
8
9
  def initialize
@@ -12,8 +13,10 @@ module IOSParser
12
13
  @indent = 0
13
14
  @indents = [0]
14
15
  @state = :root
15
- @token_char = 0
16
- @this_char = -1
16
+ @this_char = -1
17
+ @line = 1
18
+ @start_of_line = 0
19
+ @token_line = 0
17
20
  end
18
21
 
19
22
  def call(input_text)
@@ -57,16 +60,37 @@ module IOSParser
57
60
  end
58
61
  end
59
62
 
60
- def make_token(value, pos: nil)
63
+ def make_token(value, pos: nil, col: nil)
61
64
  pos ||= @token_start || @this_char
65
+ col ||= pos - start_of_line + 1
62
66
  @token_start = nil
63
- [pos, value]
67
+ Token.new(value, pos, line, col)
68
+ end
69
+
70
+ def find_start_of_line(from: @this_char)
71
+ from.downto(0) do |pos|
72
+ if @text[pos] == "\n"
73
+ self.start_of_line = pos + 1
74
+ return start_of_line
75
+ end
76
+ end
77
+
78
+ self.line_start = 0
64
79
  end
65
80
 
66
81
  def comment
67
82
  self.state = :comment
68
- update_indentation
69
- self.state = :root if newline?
83
+ tokens << make_token(:EOL) if tokens.last &&
84
+ !tokens.last.value.is_a?(Symbol)
85
+ comment_newline if newline?
86
+ end
87
+
88
+ def comment_newline
89
+ delimit
90
+ self.start_of_line = @this_char + 1
91
+ self.state = :line_start
92
+ self.indent = 0
93
+ self.line += 1
70
94
  end
71
95
 
72
96
  def comment?
@@ -79,16 +103,25 @@ module IOSParser
79
103
 
80
104
  def banner_begin
81
105
  self.state = :banner
106
+ self.token_line = 0
82
107
  tokens << make_token(:BANNER_BEGIN)
83
108
  @token_start = @this_char + 2
84
109
  @banner_delimiter = char == "\n" ? 'EOF' : char
110
+ return unless @text[@this_char + 1] == "\n"
111
+ self.token_line -= 1
112
+ self.line += 1
85
113
  end
86
114
 
87
115
  def banner_begin?
88
- tokens[-2] && tokens[-2].last == 'banner'
116
+ tokens[-2] && (
117
+ tokens[-2].value == 'banner' ||
118
+ tokens[-2..-1].map(&:value) == %w[authentication banner]
119
+ )
89
120
  end
90
121
 
91
122
  def banner
123
+ self.token_line += 1 if newline?
124
+
92
125
  if banner_end_char?
93
126
  banner_end_char
94
127
  elsif banner_end_string?
@@ -101,7 +134,10 @@ module IOSParser
101
134
  def banner_end_string
102
135
  self.state = :root
103
136
  token.chomp!(@banner_delimiter[0..-2])
104
- tokens << make_token(token) << make_token(:BANNER_END)
137
+ tokens << make_token(token)
138
+ self.line += token_line
139
+ find_start_of_line
140
+ tokens << make_token(:BANNER_END)
105
141
  self.token = ''
106
142
  end
107
143
 
@@ -112,7 +148,10 @@ module IOSParser
112
148
  def banner_end_char
113
149
  self.state = :root
114
150
  banner_end_clean_token
115
- tokens << make_token(token) << make_token(:BANNER_END)
151
+ tokens << make_token(token)
152
+ self.line += token_line
153
+ find_start_of_line
154
+ tokens << make_token(:BANNER_END)
116
155
  self.token = ''
117
156
  end
118
157
 
@@ -134,42 +173,60 @@ module IOSParser
134
173
  end
135
174
 
136
175
  def banner_garbage?(pos)
137
- tokens[pos].last == :BANNER_END && tokens[pos + 1].last == 'C'
176
+ tokens[pos].value == :BANNER_END && tokens[pos + 1].value == 'C'
138
177
  end
139
178
 
140
179
  def certificate_begin?
141
- tokens[-6] && tokens[-6].last == :INDENT &&
142
- tokens[-5] && tokens[-5].last == 'certificate'
180
+ tokens[-6] && tokens[-6].value == :INDENT &&
181
+ tokens[-5] && tokens[-5].value == 'certificate'
143
182
  end
144
183
 
145
184
  def certificate_begin
146
185
  self.state = :certificate
147
186
  indents.pop
148
- tokens[-2..-1] = [make_token(:CERTIFICATE_BEGIN, pos: tokens[-1][0])]
187
+ tokens[-2..-1] = [make_token(:CERTIFICATE_BEGIN, pos: tokens[-1].pos)]
188
+ self.token_line = 0
149
189
  certificate
150
190
  end
151
191
 
152
192
  def certificate
153
- token[-5..-1] == "quit\n" ? certificate_end : token << char
193
+ if token.end_with?("quit\n")
194
+ certificate_end
195
+ else
196
+ self.token_line += 1 if char == "\n"
197
+ token << char
198
+ end
154
199
  end
155
200
 
156
201
  def certificate_end
157
202
  tokens.concat certificate_end_tokens
203
+ self.line += 1
158
204
  update_indentation
159
205
  @token_start = @this_char
160
206
 
161
207
  @token = ''
162
208
  self.state = :line_start
163
209
  self.indent = 0
210
+ self.line += 1
164
211
  root
165
212
  end
166
213
 
214
+ # rubocop: disable AbcSize
167
215
  def certificate_end_tokens
168
- [
169
- make_token(token[0..-6].gsub!(/\s+/, ' ').strip, pos: tokens[-1][0]),
170
- make_token(:CERTIFICATE_END, pos: @this_char),
171
- make_token(:EOL, pos: @this_char)
172
- ]
216
+ cluster = []
217
+ cluster << make_token(certificate_token_value, pos: tokens[-1].pos)
218
+ self.line += self.token_line - 1
219
+ cluster << make_token(:CERTIFICATE_END, pos: @this_char, col: 1)
220
+ find_start_of_line(from: @this_char - 2)
221
+ cluster << make_token(:EOL,
222
+ pos: @this_char,
223
+ col: @this_char - start_of_line)
224
+ cluster
225
+ end
226
+ # rubocop: enable AbcSize
227
+
228
+ def certificate_token_value
229
+ token[0..-6].gsub!(/\s+/, ' ').strip
173
230
  end
174
231
 
175
232
  def integer
@@ -235,7 +292,7 @@ module IOSParser
235
292
 
236
293
  def space
237
294
  delimit
238
- self.indent += 1 if tokens.last && tokens.last.last == :EOL
295
+ self.indent += 1 if tokens.last && tokens.last.value == :EOL
239
296
  end
240
297
 
241
298
  def space?
@@ -266,6 +323,8 @@ module IOSParser
266
323
  self.state = :line_start
267
324
  self.indent = 0
268
325
  tokens << make_token(:EOL)
326
+ self.start_of_line = @this_char + 1
327
+ self.line += 1
269
328
  end
270
329
 
271
330
  def newline?
@@ -302,7 +361,14 @@ module IOSParser
302
361
  end
303
362
 
304
363
  def pop_dedent
305
- tokens << make_token(:DEDENT)
364
+ col =
365
+ if tokens.last.line == line
366
+ tokens.last.col
367
+ else
368
+ 1
369
+ end
370
+
371
+ tokens << make_token(:DEDENT, col: col)
306
372
  indents.pop
307
373
  end
308
374
 
@@ -319,6 +385,7 @@ module IOSParser
319
385
  end
320
386
 
321
387
  delimit
388
+ self.line -= 1
322
389
  update_indentation
323
390
  scrub_banner_garbage
324
391
  tokens
@@ -0,0 +1,8 @@
1
+ module IOSParser
2
+ Token = Struct.new(
3
+ :value,
4
+ :pos,
5
+ :line,
6
+ :col
7
+ )
8
+ end
@@ -1,7 +1,7 @@
1
1
  module IOSParser
2
2
  class << self
3
3
  def version
4
- '0.5.1'
4
+ '0.7.1'
5
5
  end
6
6
  end
7
7
  end
data/lib/ios_parser.rb CHANGED
@@ -1,4 +1,5 @@
1
1
  require 'json'
2
+ require 'ios_parser/token'
2
3
 
3
4
  module IOSParser
4
5
  class LexError < StandardError; end
@@ -6,11 +7,27 @@ module IOSParser
6
7
  def self.lexer
7
8
  if const_defined?(:PureLexer)
8
9
  PureLexer
10
+ else
11
+ c_lexer
12
+ end
13
+ rescue LoadError
14
+ pure_lexer
15
+ end
16
+
17
+ def self.c_lexer
18
+ if RUBY_VERSION < '2.1'
19
+ warn 'The C Lexer requires Ruby 2.1 or later. The pure Ruby lexer will '\
20
+ 'be used instead. You can eliminate this warning by upgrading ruby '\
21
+ 'or explicitly using the pure-Ruby lexer '\
22
+ "(require 'ios_parser/pure')"
23
+ pure_lexer
9
24
  else
10
25
  require_relative 'ios_parser/c_lexer'
11
26
  CLexer
12
27
  end
13
- rescue LoadError
28
+ end
29
+
30
+ def self.pure_lexer
14
31
  require 'ios_parser/lexer'
15
32
  PureLexer
16
33
  end
@@ -25,22 +25,26 @@ module IOSParser
25
25
  commands: [{ args: ['police', 300_000_000, 1_000_000,
26
26
  'exceed-action',
27
27
  'policed-dscp-transmit'],
28
- commands: [{ args: %w[set dscp cs1],
29
- commands: [], pos: 114 }],
30
- pos: 50 }],
31
- pos: 24 },
28
+ commands: [
29
+ { args: %w[set dscp cs1],
30
+ commands: [], pos: 114, indent: 3 }
31
+ ],
32
+ pos: 50, indent: 2 }],
33
+ pos: 24, indent: 1 },
32
34
 
33
35
  { args: %w[class other_service],
34
36
  commands: [{ args: ['police', 600_000_000, 1_000_000,
35
37
  'exceed-action',
36
38
  'policed-dscp-transmit'],
37
- commands: [{ args: %w[set dscp cs2],
38
- commands: [], pos: 214 },
39
- { args: ['command_with_no_args'],
40
- commands: [], pos: 230 }],
41
- pos: 150 }],
42
- pos: 128 }],
43
- pos: 0 }]
39
+ commands: [
40
+ { args: %w[set dscp cs2],
41
+ commands: [], pos: 214, indent: 3 },
42
+ { args: ['command_with_no_args'],
43
+ commands: [], pos: 230, indent: 3 }
44
+ ],
45
+ pos: 150, indent: 2 }],
46
+ pos: 128, indent: 1 }],
47
+ pos: 0, indent: 0 }]
44
48
  }
45
49
  end
46
50
 
@@ -59,9 +63,9 @@ module IOSParser
59
63
  it('can be searched by an exact command') do
60
64
  expect(subject.find_all(name: 'set').map(&:to_hash))
61
65
  .to eq [{ args: %w[set dscp cs1],
62
- commands: [], pos: 114 },
66
+ commands: [], pos: 114, indent: 3 },
63
67
  { args: %w[set dscp cs2],
64
- commands: [], pos: 214 }]
68
+ commands: [], pos: 214, indent: 3 }]
65
69
  end
66
70
 
67
71
  context 'can be searched by name and the first argument' do
@@ -92,8 +96,8 @@ module IOSParser
92
96
  [{ args: ['police', 300_000_000, 1_000_000, 'exceed-action',
93
97
  'policed-dscp-transmit'],
94
98
  commands: [{ args: %w[set dscp cs1],
95
- commands: [], pos: 114 }],
96
- pos: 50 }]
99
+ commands: [], pos: 114, indent: 3 }],
100
+ pos: 50, indent: 2 }]
97
101
  end
98
102
 
99
103
  context 'integer query' do
@@ -115,7 +119,7 @@ module IOSParser
115
119
  .find('set')
116
120
  .to_hash)
117
121
  .to eq(args: %w[set dscp cs1],
118
- commands: [], pos: 114)
122
+ commands: [], pos: 114, indent: 3)
119
123
  end
120
124
  end # context 'nested search'
121
125
 
@@ -252,15 +256,15 @@ module IOSParser
252
256
  cmd_ary = [
253
257
  { args: ['ip', 'route', '10.0.0.1', '255.255.255.255',
254
258
  'Null0'],
255
- commands: [], pos: 0 },
259
+ commands: [], pos: 0, indent: 0 },
256
260
  { args: ['ip', 'route', '9.9.9.199', '255.255.255.255',
257
261
  '42.42.42.142', 'name', 'PONIES'],
258
- commands: [], pos: 40 },
262
+ commands: [], pos: 40, indent: 0 },
259
263
  { args: ['ip', 'route', 'vrf', 'Mgmt-intf', '0.0.0.0',
260
264
  '0.0.0.0', '9.9.9.199'],
261
- commands: [], pos: 100 },
265
+ commands: [], pos: 100, indent: 0 },
262
266
  { args: ['ip', 'route', '0.0.0.0/0', '11.11.0.111', 120],
263
- commands: [], pos: 149 }
267
+ commands: [], pos: 149, indent: 0 }
264
268
  ]
265
269
 
266
270
  expect(result.find_all('ip route').map(&:to_hash)).to eq(cmd_ary)
@@ -269,7 +273,7 @@ module IOSParser
269
273
 
270
274
  cmd_hash = { args: ['ip', 'route', '9.9.9.199', '255.255.255.255',
271
275
  '42.42.42.142', 'name', 'PONIES'],
272
- commands: [], pos: 40 }
276
+ commands: [], pos: 40, indent: 0 }
273
277
  expect(result.find('ip route 9.9.9.199').to_hash).to eq(cmd_hash)
274
278
  end # end context '#call'
275
279
 
@@ -40,11 +40,11 @@ END
40
40
  'set', 'dscp', 'cs2', :EOL, :DEDENT, :DEDENT, :DEDENT]
41
41
  end
42
42
 
43
- subject { klass.new.call(input).map(&:last) }
43
+ subject { klass.new.call(input).map(&:value) }
44
44
  it('enclosed in symbols') { should == output }
45
45
 
46
46
  it('enclosed in symbols (using the pure ruby lexer)') do
47
- expect(subject_pure.map(&:last)).to eq output
47
+ expect(subject_pure.map(&:value)).to eq output
48
48
  end
49
49
  end
50
50
 
@@ -75,12 +75,12 @@ END
75
75
 
76
76
  it 'pure' do
77
77
  tokens = IOSParser::PureLexer.new.call(input)
78
- expect(tokens.map(&:last)).to eq expectation
78
+ expect(tokens.map(&:value)).to eq expectation
79
79
  end # it 'pure' do
80
80
 
81
81
  it 'default' do
82
82
  tokens = IOSParser.lexer.new.call(input)
83
- expect(tokens.map(&:last)).to eq expectation
83
+ expect(tokens.map(&:value)).to eq expectation
84
84
  end # it 'c' do
85
85
  end # context 'indented region' do
86
86
  end # context 'ASR indented regions' do
@@ -97,9 +97,11 @@ END
97
97
  end
98
98
 
99
99
  let(:output) do
100
- [[0, 'banner'], [7, 'foobar'], [14, :BANNER_BEGIN],
101
- [16, "asdf 1234 9786 asdf\nline 2\nline 3\n "],
102
- [52, :BANNER_END], [53, :EOL]]
100
+ [[0, 1, 1, 'banner'], [7, 1, 8, 'foobar'],
101
+ [14, 1, 15, :BANNER_BEGIN],
102
+ [16, 2, 17, "asdf 1234 9786 asdf\nline 2\nline 3\n "],
103
+ [52, 5, 3, :BANNER_END], [53, 5, 4, :EOL]]
104
+ .map { |pos, line, col, val| Token.new(val, pos, line, col) }
103
105
  end
104
106
 
105
107
  it('tokenized and enclosed in symbols') { should == output }
@@ -119,8 +121,8 @@ END
119
121
  ['banner', 'exec', :BANNER_BEGIN, content, :BANNER_END, :EOL]
120
122
  end
121
123
 
122
- it { expect(subject.map(&:last)).to eq output }
123
- it { expect(subject_pure.map(&:last)).to eq output }
124
+ it { expect(subject.map(&:value)).to eq output }
125
+ it { expect(subject_pure.map(&:value)).to eq output }
124
126
  end
125
127
 
126
128
  context 'complex eos banner' do
@@ -131,14 +133,37 @@ END
131
133
  ['banner', 'motd', :BANNER_BEGIN, content, :BANNER_END, :EOL]
132
134
  end
133
135
 
134
- it { expect(subject.map(&:last)).to eq output }
135
- it { expect(subject_pure.map(&:last)).to eq output }
136
+ it { expect(subject.map(&:value)).to eq output }
137
+ it { expect(subject_pure.map(&:value)).to eq output }
138
+ end
139
+
140
+ context 'aaa authentication banner' do
141
+ let(:input) { <<END.unindent }
142
+ aaa authentication banner ^C
143
+ xyz
144
+ ^C
145
+ aaa blah
146
+ END
147
+
148
+ let(:output) do
149
+ ['aaa', 'authentication', 'banner',
150
+ :BANNER_BEGIN, "xyz\n", :BANNER_END, :EOL,
151
+ 'aaa', 'blah', :EOL]
152
+ end
153
+
154
+ it 'lexes (c lexer)' do
155
+ expect(subject.map(&:value)).to eq output
156
+ end
157
+
158
+ it 'lexes (ruby lexer)' do
159
+ expect(subject_pure.map(&:value)).to eq output
160
+ end
136
161
  end
137
162
 
138
163
  context 'decimal number' do
139
164
  let(:input) { 'boson levels at 93.2' }
140
165
  let(:output) { ['boson', 'levels', 'at', 93.2] }
141
- subject { klass.new.call(input).map(&:last) }
166
+ subject { klass.new.call(input).map(&:value) }
142
167
  it('converts to Float') { should == output }
143
168
  end
144
169
 
@@ -156,29 +181,33 @@ END
156
181
  end
157
182
 
158
183
  let(:output) do
159
- [[0, 'crypto'],
160
- [7, 'pki'],
161
- [11, 'certificate'],
162
- [23, 'chain'],
163
- [29, 'TP-self-signed-0123456789'],
164
- [54, :EOL],
165
- [56, :INDENT],
166
- [56, 'certificate'],
167
- [68, 'self-signed'],
168
- [80, '01'],
169
- [85, :CERTIFICATE_BEGIN],
170
- [85,
184
+ [[0, 1, 1, 'crypto'],
185
+ [7, 1, 8, 'pki'],
186
+ [11, 1, 12, 'certificate'],
187
+ [23, 1, 24, 'chain'],
188
+ [29, 1, 30, 'TP-self-signed-0123456789'],
189
+ [54, 1, 55, :EOL],
190
+ [56, 2, 2, :INDENT],
191
+ [56, 2, 2, 'certificate'],
192
+ [68, 2, 14, 'self-signed'],
193
+ [80, 2, 26, '01'],
194
+ [85, 3, 3, :CERTIFICATE_BEGIN],
195
+ [85, 3, 3,
171
196
  'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF '\
172
197
  'FFFFFFFF EEEEEEEE EEEEEEEE EEEEEEEE EEEEEEEE EEEEEEEE EEEEEEEE '\
173
198
  'EEEEEEEE EEEEEEEE DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD '\
174
199
  'DDDDDDDD DDDDDDDD DDDDDDDD AAAA'],
175
- [323, :CERTIFICATE_END],
176
- [323, :EOL],
177
- [323, :DEDENT]]
200
+ [323, 6, 1, :CERTIFICATE_END],
201
+ [323, 6, 13, :EOL],
202
+ [323, 7, 1, :DEDENT]]
203
+ .map { |pos, line, col, val| Token.new(val, pos, line, col) }
178
204
  end
179
205
 
180
206
  subject { klass.new.call(input) }
181
- it('tokenized') { expect(subject).to eq output }
207
+
208
+ it('tokenized') do
209
+ expect(subject).to eq output
210
+ end
182
211
 
183
212
  it('tokenized (using the pure ruby lexer)') do
184
213
  expect(subject_pure).to eq output
@@ -187,8 +216,8 @@ END
187
216
 
188
217
  context 'comments' do
189
218
  let(:input) { 'ip addr 127.0.0.0.1 ! asdfsdf' }
190
- let(:output) { ['ip', 'addr', '127.0.0.0.1'] }
191
- subject { klass.new.call(input).map(&:last) }
219
+ let(:output) { ['ip', 'addr', '127.0.0.0.1', :EOL] }
220
+ subject { klass.new.call(input).map(&:value) }
192
221
  it('dropped') { should == output }
193
222
  end
194
223
 
@@ -211,20 +240,20 @@ END
211
240
  ]
212
241
  end
213
242
 
214
- it { expect(subject_pure.map(&:last)).to eq output }
215
- it { expect(subject.map(&:last)).to eq output }
243
+ it { expect(subject_pure.map(&:value)).to eq output }
244
+ it { expect(subject.map(&:value)).to eq output }
216
245
  end # context 'quoted octothorpe' do
217
246
 
218
247
  context 'vlan range' do
219
248
  let(:input) { 'switchport trunk allowed vlan 50-90' }
220
249
  let(:output) do
221
250
  [
222
- [0, 'switchport'],
223
- [11, 'trunk'],
224
- [17, 'allowed'],
225
- [25, 'vlan'],
226
- [30, '50-90']
227
- ]
251
+ [0, 1, 1, 'switchport'],
252
+ [11, 1, 12, 'trunk'],
253
+ [17, 1, 18, 'allowed'],
254
+ [25, 1, 26, 'vlan'],
255
+ [30, 1, 31, '50-90']
256
+ ].map { |pos, line, col, val| Token.new(val, pos, line, col) }
228
257
  end
229
258
  it { should == output }
230
259
  end # context 'vlan range' do
@@ -247,31 +276,31 @@ END
247
276
  ]
248
277
  end
249
278
 
250
- it { expect(subject_pure.map(&:last)).to eq output }
279
+ it { expect(subject_pure.map(&:value)).to eq output }
251
280
  end
252
281
 
253
282
  context '# in the middle of a line is not a comment' do
254
283
  let(:input) { "vlan 1\n name #31337" }
255
284
  let(:output) { ['vlan', 1, :EOL, :INDENT, 'name', '#31337', :DEDENT] }
256
285
 
257
- it { expect(subject_pure.map(&:last)).to eq output }
258
- it { expect(subject.map(&:last)).to eq output }
286
+ it { expect(subject_pure.map(&:value)).to eq output }
287
+ it { expect(subject.map(&:value)).to eq output }
259
288
  end
260
289
 
261
290
  context '# at the start of a line is a comment' do
262
291
  let(:input) { "vlan 1\n# comment\nvlan 2" }
263
292
  let(:output) { ['vlan', 1, :EOL, 'vlan', 2] }
264
293
 
265
- it { expect(subject_pure.map(&:last)).to eq output }
266
- it { expect(subject.map(&:last)).to eq output }
294
+ it { expect(subject_pure.map(&:value)).to eq output }
295
+ it { expect(subject.map(&:value)).to eq output }
267
296
  end
268
297
 
269
298
  context '# after indentation is a comment' do
270
299
  let(:input) { "vlan 1\n # comment\nvlan 2" }
271
300
  let(:output) { ['vlan', 1, :EOL, :INDENT, :DEDENT, 'vlan', 2] }
272
301
 
273
- it { expect(subject_pure.map(&:last)).to eq output }
274
- it { expect(subject.map(&:last)).to eq output }
302
+ it { expect(subject_pure.map(&:value)).to eq output }
303
+ it { expect(subject.map(&:value)).to eq output }
275
304
  end
276
305
 
277
306
  context 'unterminated quoted string' do
@@ -285,6 +314,84 @@ END
285
314
  expect { subject }.to raise_error(pattern)
286
315
  end
287
316
  end
317
+
318
+ context 'subcommands separated by comment line' do
319
+ let(:input) do
320
+ <<-END.unindent
321
+ router static
322
+ address-family ipv4 unicast
323
+ !
324
+ address-family ipv6 unicast
325
+ END
326
+ end
327
+
328
+ let(:expected) do
329
+ expected_full.map(&:value)
330
+ end
331
+
332
+ let(:expected_full) do
333
+ [
334
+ [0, 1, 1, 'router'],
335
+ [7, 1, 8, 'static'],
336
+ [13, 1, 14, :EOL],
337
+ [15, 2, 2, :INDENT],
338
+ [15, 2, 2, 'address-family'],
339
+ [30, 2, 17, 'ipv4'],
340
+ [35, 2, 22, 'unicast'],
341
+ [42, 2, 29, :EOL],
342
+ [47, 4, 2, 'address-family'],
343
+ [62, 4, 17, 'ipv6'],
344
+ [67, 4, 22, 'unicast'],
345
+ [74, 4, 29, :EOL],
346
+ [74, 4, 29, :DEDENT]
347
+ ].map { |pos, line, col, val| Token.new(val, pos, line, col) }
348
+ end
349
+
350
+ it 'lexes both subcommands' do
351
+ expect(subject.map(&:value)).to eq expected
352
+ end
353
+
354
+ it 'lexes both subcommands (with the pure ruby lexer)' do
355
+ expect(subject_pure.map(&:value)).to eq expected
356
+ end
357
+
358
+ it 'lexes position, line, and column' do
359
+ expect(subject).to eq expected_full
360
+ end
361
+
362
+ it 'lexes position, line, and column (with the pure ruby lexer)' do
363
+ expect(subject_pure).to eq expected_full
364
+ end
365
+ end
366
+
367
+ context 'comment at end of line' do
368
+ let(:input) do
369
+ <<-END.unindent
370
+ description !
371
+ switchport access vlan 2
372
+ END
373
+ end
374
+
375
+ let(:output) do
376
+ ['description', :EOL, 'switchport', 'access', 'vlan', 2, :EOL]
377
+ end
378
+
379
+ it { expect(subject_pure.map(&:value)).to eq output }
380
+ it { expect(subject.map(&:value)).to eq output }
381
+ end # context 'comment at end of line' do
382
+
383
+ context 'large integers up to 2^63-1' do
384
+ let(:input) do
385
+ "42 4200000000 9223372036854775807"
386
+ end
387
+
388
+ let(:output) do
389
+ [42, 4200000000, 9223372036854775807]
390
+ end
391
+
392
+ it { expect(subject_pure.map(&:value)).to eq output }
393
+ it { expect(subject.map(&:value)).to eq output }
394
+ end # context 'large integers up to 2^63-1' do
288
395
  end
289
396
  end
290
397
  end
@@ -24,22 +24,26 @@ describe IOSParser do
24
24
  commands: [{ args: ['police', 300_000_000, 1_000_000,
25
25
  'exceed-action',
26
26
  'policed-dscp-transmit'],
27
- commands: [{ args: %w[set dscp cs1],
28
- commands: [], pos: 114 }],
29
- pos: 50 }],
30
- pos: 24 },
27
+ commands: [
28
+ { args: %w[set dscp cs1],
29
+ commands: [], pos: 114, indent: 3 }
30
+ ],
31
+ pos: 50, indent: 2 }],
32
+ pos: 24, indent: 1 },
31
33
 
32
34
  { args: %w[class other_service],
33
35
  commands: [{ args: ['police', 600_000_000, 1_000_000,
34
36
  'exceed-action',
35
37
  'policed-dscp-transmit'],
36
- commands: [{ args: %w[set dscp cs2],
37
- commands: [], pos: 214 },
38
- { args: ['command_with_no_args'],
39
- commands: [], pos: 230 }],
40
- pos: 150 }],
41
- pos: 128 }],
42
- pos: 0 }]
38
+ commands: [
39
+ { args: %w[set dscp cs2],
40
+ commands: [], pos: 214, indent: 3 },
41
+ { args: ['command_with_no_args'],
42
+ commands: [], pos: 230, indent: 3 }
43
+ ],
44
+ pos: 150, indent: 2 }],
45
+ pos: 128, indent: 1 }],
46
+ pos: 0, indent: 0 }]
43
47
  }
44
48
  end
45
49
 
@@ -70,15 +74,18 @@ describe IOSParser do
70
74
  {
71
75
  args: %w[description blah blah blah],
72
76
  commands: [],
73
- pos: 29
77
+ pos: 29,
78
+ indent: 1
74
79
  },
75
80
  {
76
81
  args: ['match', 'access-group', 'fred'],
77
82
  commands: [],
78
- pos: 57
83
+ pos: 57,
84
+ indent: 1
79
85
  }
80
86
  ],
81
- pos: 0
87
+ pos: 0,
88
+ indent: 0
82
89
  }
83
90
  ]
84
91
  }
@@ -92,5 +99,22 @@ describe IOSParser do
92
99
  expect(actual).to eq(output)
93
100
  end
94
101
  end # context "partial outdent" do
102
+
103
+ context 'comment at end of line' do
104
+ let(:input) do
105
+ <<END.unindent
106
+ description !
107
+ switchport access vlan 2
108
+ END
109
+ end
110
+
111
+ subject { described_class.parse(input) }
112
+
113
+ it 'parses both commands' do
114
+ should be_a IOSParser::IOS::Document
115
+ expect(subject.find(starts_with: 'description')).not_to be_nil
116
+ expect(subject.find(starts_with: 'switchport')).not_to be_nil
117
+ end
118
+ end
95
119
  end # describe '.parse'
96
120
  end # describe IOSParser
data/spec/spec_helper.rb CHANGED
@@ -1,5 +1,9 @@
1
1
  $LOAD_PATH << File.dirname(__FILE__) + '/../lib'
2
2
 
3
+ RSpec.configure do |rspec|
4
+ rspec.filter_run_when_matching(focus: true)
5
+ end
6
+
3
7
  def klass
4
8
  described_class
5
9
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ios_parser
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.5.1
4
+ version: 0.7.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ben Miller
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-11-10 00:00:00.000000000 Z
11
+ date: 2022-01-13 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rake-compiler
@@ -52,7 +52,7 @@ dependencies:
52
52
  - - "~>"
53
53
  - !ruby/object:Gem::Version
54
54
  version: '0.54'
55
- description:
55
+ description:
56
56
  email: bjmllr@gmail.com
57
57
  executables: []
58
58
  extensions:
@@ -84,6 +84,7 @@ files:
84
84
  - lib/ios_parser/ios/queryable.rb
85
85
  - lib/ios_parser/lexer.rb
86
86
  - lib/ios_parser/pure.rb
87
+ - lib/ios_parser/token.rb
87
88
  - lib/ios_parser/version.rb
88
89
  - spec/lib/ios_parser/ios/queryable_spec.rb
89
90
  - spec/lib/ios_parser/ios_spec.rb
@@ -94,7 +95,7 @@ homepage: https://github.com/bjmllr/ios_parser
94
95
  licenses:
95
96
  - GPL-3.0
96
97
  metadata: {}
97
- post_install_message:
98
+ post_install_message:
98
99
  rdoc_options: []
99
100
  require_paths:
100
101
  - lib
@@ -109,9 +110,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
109
110
  - !ruby/object:Gem::Version
110
111
  version: '0'
111
112
  requirements: []
112
- rubyforge_project:
113
- rubygems_version: 2.7.3
114
- signing_key:
113
+ rubygems_version: 3.1.4
114
+ signing_key:
115
115
  specification_version: 4
116
116
  summary: convert network switch and router config files to structured data
117
117
  test_files: