ast_ast 0.1.0 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
data/Rakefile CHANGED
@@ -1,10 +1,58 @@
1
1
  require 'rake'
2
+ require 'rspec/core/rake_task'
2
3
 
3
4
  require File.expand_path('../lib/ast_ast/version', __FILE__)
4
5
 
6
+ desc "Run rspec"
7
+ RSpec::Core::RakeTask.new do |t|
8
+ t.rspec_opts = ["-c", "-f progress", "-r ./spec/spec_helper.rb"]
9
+ t.pattern = 'spec/**/*_spec.rb'
10
+ end
11
+
12
+ task :publish => ['release:pre', 'release:build', 'release:push']
13
+
14
+
15
+ # Tag stuff is partially from `gem this`, I would use it but don't
16
+ # think it fits what I need. Nor would I write my own, there
17
+ # are way too many different tools for this sort of thing.
5
18
  namespace :release do
19
+
20
+ desc 'Last minute checks before pushing'
21
+ task :pre => [:spec] do
22
+ require 'highline/import'
23
+ ok = ask "OK to publish (y/n): "
24
+ if ok.strip != "y"
25
+ exit 0
26
+ end
27
+
28
+ tag = ask "Create tag for ast_ast v#{Ast::VERSION} (y/n): "
29
+ if tag.strip == "y"
30
+ Rake::Task['release:tag'].invoke
31
+ end
32
+ end
33
+
34
+ desc 'Tag vX.X.X'
6
35
  task :tag do
7
- system("git tag v#{Ast::VERSION}")
8
- system('git push origin --tags')
36
+ if `git diff --cached`.empty?
37
+ if `git tag`.split("\n").include?("v#{Ast::VERSION}")
38
+ raise "Version #{Ast::VERSION} has already been tagged"
39
+ end
40
+ system "git tag v#{Ast::VERSION}"
41
+ system 'git push origin --tags'
42
+ else
43
+ raise "Unstaged changes still waiting to be commited"
44
+ end
45
+ end
46
+
47
+ desc 'Build gemspec'
48
+ task :build do
49
+ system "gem build ast_ast.gemspec"
50
+ system "mkdir -p pkg"
51
+ system "mv ast_ast-#{Ast::VERSION}.gem pkg/ast_ast-#{Ast::VERSION}.gem"
52
+ end
53
+
54
+ desc 'Push to RubyGems'
55
+ task :push do
56
+ system "gem push pkg/ast_ast-#{Ast::VERSION}.gem"
9
57
  end
10
58
  end
@@ -1,4 +1,5 @@
1
1
  $: << File.dirname(__FILE__)
2
+ require 'tokens'
2
3
  require 'token'
3
4
 
4
5
  module Ast
@@ -3,7 +3,7 @@ module Ast
3
3
  class Tokeniser
4
4
 
5
5
  class Rule
6
- attr_accessor :name, :regex, :block
6
+ attr_accessor :name, :regex, :proc
7
7
 
8
8
  # Creates a new Rule instance
9
9
  #
@@ -11,13 +11,13 @@ module Ast
11
11
  # Name of the token to be created.
12
12
  # @param regex [Regexp]
13
13
  # Regular expression to be matched
14
- # @param block [Proc]
15
- # Optional block to be executed with match(es)
14
+ # @param proc [Proc]
15
+ # Optional proc to be executed with match(es)
16
16
  #
17
- def initialize(name, regex, &block)
17
+ def initialize(name, regex, proc=nil, &block)
18
18
  @name = name
19
19
  @regex = regex
20
- @block = block || Proc.new {|i| i}
20
+ @proc = proc || block
21
21
  end
22
22
 
23
23
  # Runs the block that was given using either, the full match if there
@@ -56,7 +56,7 @@ module Ast
56
56
  val = arr unless arr.empty?
57
57
  val = arr[0] if arr.size == 1
58
58
  val = arr[0] if arr[0] == arr[1] # this happens with /(a|b|c)/ regexs
59
- @block.call val
59
+ @proc.call val
60
60
  end
61
61
  end
62
62
 
@@ -69,8 +69,24 @@ module Ast
69
69
  def self.rule(name, regex, &block)
70
70
  @rules ||= []
71
71
  # make rules with same name overwrite first rule
72
- @rules.delete_if {|i| i.name == name}
73
- @rules << Rule.new(name, regex, &block)
72
+ @rules.delete_if {|i| i.name == name}
73
+
74
+ # Create default block which just returns a value
75
+ block ||= Proc.new {|i| i}
76
+ # Make sure to return a token
77
+ proc = Proc.new {|_i|
78
+ block_result = block.call(_i)
79
+ if block_result.is_a? Array
80
+ r = []
81
+ block_result.each do |j|
82
+ r << Ast::Token.new(name, j)
83
+ end
84
+ r
85
+ else
86
+ Ast::Token.new(name, block_result)
87
+ end
88
+ }
89
+ @rules << Rule.new(name, regex, proc)
74
90
  end
75
91
 
76
92
  # @return [Array]
@@ -78,6 +94,26 @@ module Ast
78
94
  #
79
95
  def self.rules; @rules; end
80
96
 
97
+ # Creates a new token rule, that is the block returns an Ast::Token instance.
98
+ #
99
+ # @example
100
+ #
101
+ # keywords = ['def', 'next', 'while', 'end']
102
+ #
103
+ # token /[a-z]+/ do |i|
104
+ # if keywords.include?(i)
105
+ # Ast::Token.new(:keyword, i)
106
+ # else
107
+ # Ast::Token.new(:word, i)
108
+ # end
109
+ #
110
+ # @param regex [Regexp]
111
+ #
112
+ def self.token(regex, &block)
113
+ @rules ||= []
114
+ @rules << Rule.new(nil, regex, block)
115
+ end
116
+
81
117
  # Takes the input and uses the rules that were created to scan it.
82
118
  #
83
119
  # @param [String]
@@ -98,9 +134,11 @@ module Ast
98
134
  ran = i.run(a)
99
135
  # split array into separate tokens, *not* values
100
136
  if ran.is_a? Array
101
- ran.each {|a| result << [i.name, a]}
137
+ #ran.each {|a| result << [i.name, a]}
138
+ ran.each {|a| result << a }
102
139
  else
103
- result << [i.name, ran]
140
+ #result << [i.name, ran]
141
+ result << ran
104
142
  end
105
143
  end
106
144
  end
@@ -1,3 +1,3 @@
1
1
  module Ast
2
- VERSION = "0.1.0"
2
+ VERSION = "0.2.0"
3
3
  end
@@ -11,15 +11,6 @@ describe Ast::Tokeniser::Rule do
11
11
  specify { subject.regex.should be_kind_of Regexp }
12
12
  end
13
13
 
14
- describe "#block" do
15
- specify { subject.block.should be_kind_of Proc }
16
- context "when no block is given" do
17
- it "use default proc which returns argument" do
18
- subject.block.call(1).should == 1
19
- end
20
- end
21
- end
22
-
23
14
  describe "#run" do
24
15
 
25
16
  context "when returning a string" do
@@ -59,9 +50,30 @@ describe Ast::Tokeniser do
59
50
  end
60
51
  end
61
52
 
53
+ describe ".token" do
54
+
55
+ class KlassToken < Ast::Tokeniser
56
+ token /[a-z]+/ do |i|
57
+ if i.include? "a"
58
+ Ast::Token.new(:a_tok, i)
59
+ else
60
+ Ast::Token.new(:not_a, i)
61
+ end
62
+ end
63
+ end
64
+
65
+ it "adds a new rule to list" do
66
+ KlassToken.rules.map {|i| i.regex}.should include /[a-z]+/
67
+ end
68
+
69
+ end
70
+
62
71
  describe ".tokenise" do
63
72
 
64
73
  class Klass2 < Ast::Tokeniser
74
+
75
+ commands = %w(git commit status)
76
+
65
77
  rule :long, /--([a-zA-Z0-9]+)/ do |i|
66
78
  i[1]
67
79
  end
@@ -70,14 +82,21 @@ describe Ast::Tokeniser do
70
82
  i[1].split('')
71
83
  end
72
84
 
73
- rule :word, /[a-zA-Z0-9]+/
85
+ token /[a-zA-Z0-9]+/ do |i|
86
+ if commands.include?(i)
87
+ Ast::Token.new(:command, i)
88
+ else
89
+ Ast::Token.new(:word, i)
90
+ end
91
+ end
92
+
74
93
  end
75
94
 
76
95
  specify { Klass2.tokenise("").should be_kind_of Ast::Tokens }
77
96
 
78
97
  it "retuns the correct tokens" do
79
- r = Klass2.tokenise("--along -sh aword")
80
- r.to_a.should == [[:long, "along"], [:short, "s"], [:short, "h"], [:word, "aword"]]
98
+ r = Klass2.tokenise("git --along -sh aword")
99
+ r.to_a.should == [[:command, "git"], [:long, "along"], [:short, "s"], [:short, "h"], [:word, "aword"]]
81
100
  end
82
101
 
83
102
  it "runs example in Readme" do
metadata CHANGED
@@ -4,9 +4,9 @@ version: !ruby/object:Gem::Version
4
4
  prerelease: false
5
5
  segments:
6
6
  - 0
7
- - 1
7
+ - 2
8
8
  - 0
9
- version: 0.1.0
9
+ version: 0.2.0
10
10
  platform: ruby
11
11
  authors:
12
12
  - Joshua Hawxwell
@@ -14,7 +14,7 @@ autorequire:
14
14
  bindir: bin
15
15
  cert_chain: []
16
16
 
17
- date: 2010-11-29 00:00:00 +00:00
17
+ date: 2010-12-07 00:00:00 +00:00
18
18
  default_executable:
19
19
  dependencies:
20
20
  - !ruby/object:Gem::Dependency