ruby-druid 0.1.9 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,3 @@
1
+ module Druid
2
+ VERSION = "0.9.0"
3
+ end
@@ -0,0 +1,141 @@
1
+ require 'zk'
2
+ require 'multi_json'
3
+ require 'rest_client'
4
+
5
+ module Druid
6
+ class ZK
7
+ def initialize(uri, opts = {})
8
+ @zk = ::ZK.new(uri, chroot: :check)
9
+ @registry = Hash.new { |hash, key| hash[key] = Array.new }
10
+ @discovery_path = opts[:discovery_path] || '/discovery'
11
+ @watched_services = Hash.new
12
+ register
13
+ end
14
+
15
+ def register
16
+ $log.info("druid.zk register discovery path") if $log
17
+ @zk.on_expired_session { register }
18
+ @zk.register(@discovery_path, only: :child) do |event|
19
+ $log.info("druid.zk got event on discovery path") if $log
20
+ check_services
21
+ end
22
+ check_services
23
+ end
24
+
25
+ def close!
26
+ $log.info("druid.zk shutting down") if $log
27
+ @zk.close!
28
+ end
29
+
30
+ def register_service(service, brokers)
31
+ $log.info("druid.zk register", service: service, brokers: brokers) if $log
32
+ # poor mans load balancing
33
+ @registry[service] = brokers.shuffle
34
+ end
35
+
36
+ def unregister_service(service)
37
+ $log.info("druid.zk unregister", service: service) if $log
38
+ @registry.delete(service)
39
+ unwatch_service(service)
40
+ end
41
+
42
+ def watch_service(service)
43
+ return if @watched_services.include?(service)
44
+ $log.info("druid.zk watch", service: service) if $log
45
+ watch = @zk.register(watch_path(service), only: :child) do |event|
46
+ $log.info("druid.zk got event on watch path for", service: service, event: event) if $log
47
+ unwatch_service(service)
48
+ check_service(service)
49
+ end
50
+ @watched_services[service] = watch
51
+ end
52
+
53
+ def unwatch_service(service)
54
+ return unless @watched_services.include?(service)
55
+ $log.info("druid.zk unwatch", service: service) if $log
56
+ @watched_services.delete(service).unregister
57
+ end
58
+
59
+ def check_services
60
+ $log.info("druid.zk checking services") if $log
61
+ zk_services = @zk.children(@discovery_path, watch: true)
62
+
63
+ (services - zk_services).each do |service|
64
+ unregister_service(service)
65
+ end
66
+
67
+ zk_services.each do |service|
68
+ check_service(service)
69
+ end
70
+ end
71
+
72
+ def verify_broker(service, name)
73
+ $log.info("druid.zk verify", broker: name, service: service) if $log
74
+ info = @zk.get("#{watch_path(service)}/#{name}")
75
+ node = MultiJson.load(info[0])
76
+ uri = "http://#{node['address']}:#{node['port']}/druid/v2/"
77
+ check = RestClient::Request.execute({
78
+ method: :get, url: "#{uri}datasources/",
79
+ timeout: 5, open_timeout: 5
80
+ })
81
+ $log.info("druid.zk verified", uri: uri, sources: check) if $log
82
+ return [uri, MultiJson.load(check.to_str)] if check.code == 200
83
+ rescue
84
+ return false
85
+ end
86
+
87
+ def watch_path(service)
88
+ "#{@discovery_path}/#{service}"
89
+ end
90
+
91
+ def check_service(service)
92
+ return if @watched_services.include?(service)
93
+
94
+ watch_service(service)
95
+
96
+ known = @registry[service].map { |node| node[:name] }
97
+ live = @zk.children(watch_path(service), watch: true)
98
+ new_list = @registry[service].select { |node| live.include?(node[:name]) }
99
+ $log.info("druid.zk checking", service: service, known: known, live: live, new_list: new_list) if $log
100
+
101
+ # verify the new entries to be living brokers
102
+ (live - known).each do |name|
103
+ uri, sources = verify_broker(service, name)
104
+ new_list.push({ name: name, uri: uri, data_sources: sources }) if uri
105
+ end
106
+
107
+ if new_list.empty?
108
+ # don't show services w/o active brokers
109
+ unregister_service(service)
110
+ else
111
+ register_service(service, new_list)
112
+ end
113
+ end
114
+
115
+ def services
116
+ @registry.keys
117
+ end
118
+
119
+ def data_sources
120
+ result = Hash.new { |hash, key| hash[key] = [] }
121
+
122
+ @registry.each do |service, brokers|
123
+ brokers.each do |broker|
124
+ broker[:data_sources].each do |data_source|
125
+ result["#{service}/#{data_source}"] << broker[:uri]
126
+ end
127
+ end
128
+ end
129
+
130
+ result.each do |source, uris|
131
+ result[source] = uris.sample if uris.respond_to?(:sample)
132
+ end
133
+
134
+ result
135
+ end
136
+
137
+ def to_s
138
+ @registry.to_s
139
+ end
140
+ end
141
+ end
@@ -1,20 +1,32 @@
1
- # coding: utf-8
1
+ lib = File.expand_path("../lib", __FILE__)
2
+ $:.unshift(lib) unless $:.include?(lib)
3
+
4
+ require "druid/version"
2
5
 
3
6
  Gem::Specification.new do |spec|
4
7
  spec.name = "ruby-druid"
5
- spec.version = "0.1.9"
6
- spec.authors = ["LiquidM, Inc."]
7
- spec.email = ["opensource@liquidm.com"]
8
- spec.summary = %q{Ruby client for metamx druid}
9
- spec.description = %q{Ruby client for metamx druid}
10
- spec.homepage = "https://github.com/liquidm/ruby-druid"
8
+ spec.version = Druid::VERSION
9
+ spec.authors = ["Ruby Druid Community"]
10
+ spec.summary = %q{A Ruby client for Druid}
11
+ spec.description = <<-EOF
12
+ ruby-druid is a Ruby client for Druid. It includes a Squeel-like query DSL
13
+ and generates a JSON query that can be sent to Druid directly.
14
+ EOF
15
+ spec.homepage = "https://github.com/ruby-druid/ruby-druid"
11
16
  spec.license = "MIT"
12
17
 
13
- spec.files = `git ls-files`.split($/)
14
- spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
15
- spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
18
+ spec.files = Dir["lib/**/*"] + %w{LICENSE README.md ruby-druid.gemspec}
19
+ spec.test_files = Dir["spec/**/*"]
16
20
  spec.require_paths = ["lib"]
17
21
 
18
- spec.add_dependency "zk"
19
- spec.add_dependency "rest-client"
22
+ spec.add_dependency "activesupport", "~> 4.2"
23
+ spec.add_dependency "activemodel", "~> 4.2"
24
+ spec.add_dependency "iso8601", "~> 0.9"
25
+ spec.add_dependency "multi_json", "~> 1.12"
26
+ spec.add_dependency "rest-client", "~> 2.0"
27
+ spec.add_dependency "zk", "~> 1.9"
28
+ spec.add_development_dependency "bundler", "~> 1.12"
29
+ spec.add_development_dependency "rake", "~> 11.2"
30
+ spec.add_development_dependency "rspec", "~> 3.4"
31
+ spec.add_development_dependency "webmock", "~> 2.1"
20
32
  end
@@ -1,67 +1,20 @@
1
1
  describe Druid::Client do
2
2
 
3
3
  it 'calls zookeeper on intialize' do
4
- Druid::ZooHandler.should_receive(:new)
5
- Druid::Client.new('test_uri', zk_keepalive: true)
6
- end
7
-
8
- it 'creates a query' do
9
- Druid::ZooHandler.stub(:new).and_return(double(Druid::ZooHandler, :data_sources => {'test/test' => 'http://www.example.com'}, :close! => true))
10
- Druid::Client.new('test_uri', zk_keepalive: true).query('test/test').should be_a Druid::Query
11
- end
12
-
13
- it 'sends query if block is given' do
14
- Druid::ZooHandler.stub(:new).and_return(double(Druid::ZooHandler, :data_sources => {'test/test' => 'http://www.example.com'}, :close! => true))
15
- client = Druid::Client.new('test_uri', zk_keepalive: true)
16
- client.should_receive(:send)
17
- client.query('test/test') do
18
- group(:group1)
19
- end
20
- end
21
-
22
- it 'parses response on 200' do
23
- stub_request(:post, "http://www.example.com/druid/v2").
24
- with(:body => "{\"dataSource\":\"test\",\"granularity\":\"all\",\"intervals\":[\"2013-04-04T00:00:00+00:00/2013-04-04T00:00:00+00:00\"]}",
25
- :headers => {'Accept'=>'*/*', 'Content-Type'=>'application/json', 'User-Agent'=>'Ruby'}).
26
- to_return(:status => 200, :body => "[]", :headers => {})
27
- Druid::ZooHandler.stub(:new).and_return(double(Druid::ZooHandler, :data_sources => {'test/test' => 'http://www.example.com/druid/v2'}, :close! => true))
28
- client = Druid::Client.new('test_uri', zk_keepalive: true)
29
- JSON.should_receive(:parse).and_return([])
30
- client.send(client.query('test/test').interval("2013-04-04", "2013-04-04"))
31
- end
32
-
33
- it 'raises on request failure' do
34
- stub_request(:post, "http://www.example.com/druid/v2").
35
- with(:body => "{\"dataSource\":\"test\",\"granularity\":\"all\",\"intervals\":[\"2013-04-04T00:00:00+00:00/2013-04-04T00:00:00+00:00\"]}",
36
- :headers => {'Accept'=>'*/*', 'Content-Type'=>'application/json', 'User-Agent'=>'Ruby'}).
37
- to_return(:status => 666, :body => "Strange server error", :headers => {})
38
- Druid::ZooHandler.stub(:new).and_return(double(Druid::ZooHandler, :data_sources => {'test/test' => 'http://www.example.com/druid/v2'}, :close! => true))
39
- client = Druid::Client.new('test_uri', zk_keepalive: true)
40
- expect { client.send(client.query('test/test').interval("2013-04-04", "2013-04-04")) }.to raise_error(RuntimeError, /Request failed: 666: Strange server error/)
41
- end
42
-
43
- it 'should have a static setup' do
44
- client = Druid::Client.new('test_uri', :static_setup => {'madvertise/mock' => 'mock_uri'})
45
- client.data_sources.should == ['madvertise/mock']
46
- client.data_source_uri('madvertise/mock').should == URI('mock_uri')
47
- end
48
-
49
- it 'should report dimensions of a data source correctly' do
50
- stub_request(:get, "http://www.example.com/druid/v2/datasources/mock").
51
- with(:headers =>{'Accept'=>'*/*', 'User-Agent'=>'Ruby'}).
52
- to_return(:status => 200, :body => '{"dimensions":["d1","d2","d3"],"metrics":["m1", "m2"]}')
53
-
54
- client = Druid::Client.new('test_uri', :static_setup => {'madvertise/mock' => 'http://www.example.com/druid/v2/'})
55
- client.data_source('madvertise/mock').dimensions.should == ["d1","d2","d3"]
56
- end
57
-
58
- it 'should report metrics of a data source correctly' do
59
- stub_request(:get, "http://www.example.com/druid/v2/datasources/mock").
60
- with(:headers =>{'Accept'=>'*/*', 'User-Agent'=>'Ruby'}).
61
- to_return(:status => 200, :body => '{"dimensions":["d1","d2","d3"],"metrics":["m1", "m2"]}')
62
-
63
- client = Druid::Client.new('test_uri', :static_setup => {'madvertise/mock' => 'http://www.example.com/druid/v2/'})
64
- client.data_source('madvertise/mock').metrics.should == ["m1","m2"]
4
+ expect(Druid::ZK).to receive(:new).with('test_uri', {})
5
+ Druid::Client.new('test_uri')
6
+ end
7
+
8
+ it 'returns the correct data source' do
9
+ stub_request(:get, "http://www.example.com/druid/v2/datasources/test").
10
+ with(:headers => { 'Accept'=>'*/*', 'Accept-Encoding' => 'gzip;q=1.0,deflate;q=0.6,identity;q=0.3', 'User-Agent' => 'Ruby' }).
11
+ to_return(:status => 200, :body => "{\"dimensions\":[\"d1\", \"d2\"], \"metrics\":[\"m1\", \"m2\"]}", :headers => {})
12
+ expect(Druid::ZK).to receive(:new).and_return(double(Druid::ZK, :data_sources => { 'test/test' => 'http://www.example.com/druid/v2/' }, :close! => true))
13
+ client = Druid::Client.new('test_uri')
14
+ ds = client.data_source('test/test')
15
+ expect(ds.name).to eq('test')
16
+ expect(ds.metrics).to eq(['m1', 'm2'])
17
+ expect(ds.dimensions).to eq(['d1', 'd2'])
65
18
  end
66
19
 
67
20
  end
@@ -0,0 +1,65 @@
1
+ describe Druid::DataSource do
2
+
3
+ context '#post' do
4
+ it 'parses response on 200' do
5
+ # MRI
6
+ stub_request(:post, "http://www.example.com/druid/v2").
7
+ with(:body => "{\"context\":{\"queryId\":null},\"queryType\":\"timeseries\",\"intervals\":[\"2013-04-04T00:00:00+00:00/2013-04-04T00:00:00+00:00\"],\"granularity\":\"all\",\"dataSource\":\"test\"}",
8
+ :headers => { 'Accept' => '*/*', 'Accept-Encoding' => 'gzip;q=1.0,deflate;q=0.6,identity;q=0.3', 'Content-Type' => 'application/json', 'User-Agent' => 'Ruby' }).
9
+ to_return(:status => 200, :body => '[]', :headers => {})
10
+ # JRuby ... *sigh
11
+ stub_request(:post, "http://www.example.com/druid/v2").
12
+ with(:body => "{\"context\":{\"queryId\":null},\"granularity\":\"all\",\"intervals\":[\"2013-04-04T00:00:00+00:00/2013-04-04T00:00:00+00:00\"],\"queryType\":\"timeseries\",\"dataSource\":\"test\"}",
13
+ :headers => { 'Accept' => '*/*', 'Accept-Encoding' => 'gzip;q=1.0,deflate;q=0.6,identity;q=0.3', 'Content-Type' => 'application/json', 'User-Agent' => 'Ruby' }).
14
+ to_return(:status => 200, :body => '[]', :headers => {})
15
+ ds = Druid::DataSource.new('test/test', 'http://www.example.com/druid/v2')
16
+ query = Druid::Query::Builder.new.interval('2013-04-04', '2013-04-04').granularity(:all).query
17
+ query.context.queryId = nil
18
+ expect(ds.post(query)).to be_empty
19
+ end
20
+
21
+ it 'raises on request failure' do
22
+ # MRI
23
+ stub_request(:post, 'http://www.example.com/druid/v2').
24
+ with(:body => "{\"context\":{\"queryId\":null},\"queryType\":\"timeseries\",\"intervals\":[\"2013-04-04T00:00:00+00:00/2013-04-04T00:00:00+00:00\"],\"granularity\":\"all\",\"dataSource\":\"test\"}",
25
+ :headers => { 'Accept' => '*/*', 'Accept-Encoding' => 'gzip;q=1.0,deflate;q=0.6,identity;q=0.3', 'Content-Type' => 'application/json', 'User-Agent' => 'Ruby' }).
26
+ to_return(:status => 666, :body => 'Strange server error', :headers => {})
27
+ # JRuby ... *sigh
28
+ stub_request(:post, 'http://www.example.com/druid/v2').
29
+ with(:body => "{\"context\":{\"queryId\":null},\"granularity\":\"all\",\"intervals\":[\"2013-04-04T00:00:00+00:00/2013-04-04T00:00:00+00:00\"],\"queryType\":\"timeseries\",\"dataSource\":\"test\"}",
30
+ :headers => { 'Accept' => '*/*', 'Accept-Encoding' => 'gzip;q=1.0,deflate;q=0.6,identity;q=0.3', 'Content-Type' => 'application/json', 'User-Agent' => 'Ruby' }).
31
+ to_return(:status => 666, :body => 'Strange server error', :headers => {})
32
+ ds = Druid::DataSource.new('test/test', 'http://www.example.com/druid/v2')
33
+ query = Druid::Query::Builder.new.interval('2013-04-04', '2013-04-04').granularity(:all).query
34
+ query.context.queryId = nil
35
+ expect { ds.post(query) }.to raise_error(Druid::DataSource::Error)
36
+ end
37
+ end
38
+
39
+ context '#metadata' do
40
+ it 'parses metrics on 200' do
41
+ stub_request(:get, 'http://www.example.com/druid/v2/datasources/test').
42
+ to_return(:status => 200, :body => '{}', :headers => {})
43
+ ds = Druid::DataSource.new('test/test', 'http://www.example.com/druid/v2/')
44
+ expect(ds.metrics).to be_nil
45
+ end
46
+
47
+ it 'raises on request failure' do
48
+ stub_request(:get, 'http://www.example.com/druid/v2/datasources/test').
49
+ to_return(:status => 666, :body => 'Strange server error', :headers => {})
50
+ ds = Druid::DataSource.new('test/test', 'http://www.example.com/druid/v2/')
51
+ expect { ds.metrics }.to raise_error(RuntimeError)
52
+ end
53
+ end
54
+
55
+ context '#metadata!' do
56
+ it 'includes interval in metadata request' do
57
+ stub = stub_request(:get, 'http://www.example.com/druid/v2/datasources/test?interval=2015-04-10T00:00:00+00:00/2015-04-17T00:00:00+00:00').
58
+ to_return(:status => 200, :body => '{}', :headers => {})
59
+ ds = Druid::DataSource.new('test/test', 'http://www.example.com/druid/v2/')
60
+ ds.metadata!(:interval => ['2015-04-10', '2015-04-17'])
61
+ expect(stub).to have_been_requested
62
+ end
63
+ end
64
+
65
+ end
@@ -1,142 +1,137 @@
1
1
  describe Druid::Query do
2
2
 
3
3
  before :each do
4
- @query = Druid::Query.new('test')
5
- end
6
-
7
- it 'takes a datasource in the constructor' do
8
- query = Druid::Query.new('test')
9
- JSON.parse(query.to_json)['dataSource'].should == 'test'
4
+ @query = Druid::Query::Builder.new
10
5
  end
11
6
 
12
7
  it 'takes a query type' do
13
8
  @query.query_type('query_type')
14
- JSON.parse(@query.to_json)['queryType'].should == 'query_type'
9
+ expect(JSON.parse(@query.query.to_json)['queryType']).to eq('query_type')
15
10
  end
16
11
 
17
12
  it 'sets query type by group_by' do
18
- @query.group_by()
19
- JSON.parse(@query.to_json)['queryType'].should == 'groupBy'
13
+ @query.group_by
14
+ expect(JSON.parse(@query.query.to_json)['queryType']).to eq('groupBy')
20
15
  end
21
16
 
22
17
  it 'sets query type to timeseries' do
23
- @query.time_series()
24
- JSON.parse(@query.to_json)['queryType'].should == 'timeseries'
18
+ @query.timeseries
19
+ expect(JSON.parse(@query.query.to_json)['queryType']).to eq('timeseries')
25
20
  end
26
21
 
27
22
  it 'takes dimensions from group_by method' do
28
23
  @query.group_by(:a, :b, :c)
29
- JSON.parse(@query.to_json)['dimensions'].should == ['a', 'b', 'c']
24
+ expect(JSON.parse(@query.query.to_json)['dimensions']).to eq(['a', 'b', 'c'])
30
25
  end
31
26
 
32
27
  it 'takes dimension, metric and threshold from topn method' do
33
28
  @query.topn(:a, :b, 25)
34
- result = JSON.parse(@query.to_json)
35
- result['dimension'].should == 'a'
36
- result['metric'].should == 'b'
37
- result['threshold'].should == 25
29
+ result = JSON.parse(@query.query.to_json)
30
+ expect(result['dimension']).to eq('a')
31
+ expect(result['metric']).to eq('b')
32
+ expect(result['threshold']).to eq(25)
38
33
  end
39
34
 
40
35
  describe '#postagg' do
41
36
  it 'build a post aggregation with a constant right' do
42
37
  @query.postagg{(a + 1).as ctr }
43
38
 
44
- JSON.parse(@query.to_json)['postAggregations'].should == [{"type"=>"arithmetic",
39
+ expect(JSON.parse(@query.query.to_json)['postAggregations']).to eq([{"type"=>"arithmetic",
45
40
  "fn"=>"+",
46
41
  "fields"=>
47
- [{"type"=>"fieldAccess", "name"=>"a", "fieldName"=>"a"},
42
+ [{"type"=>"fieldAccess", "fieldName"=>"a"},
48
43
  {"type"=>"constant", "value"=>1}],
49
- "name"=>"ctr"}]
44
+ "name"=>"ctr"}])
50
45
  end
51
46
 
52
47
  it 'build a + post aggregation' do
53
48
  @query.postagg{(a + b).as ctr }
54
- JSON.parse(@query.to_json)['postAggregations'].should == [{"type"=>"arithmetic",
49
+ expect(JSON.parse(@query.query.to_json)['postAggregations']).to eq([{"type"=>"arithmetic",
55
50
  "fn"=>"+",
56
51
  "fields"=>
57
- [{"type"=>"fieldAccess","name"=>"a", "fieldName"=>"a"},
58
- {"type"=>"fieldAccess", "name"=>"b", "fieldName"=>"b"}],
59
- "name"=>"ctr"}]
52
+ [{"type"=>"fieldAccess", "fieldName"=>"a"},
53
+ {"type"=>"fieldAccess", "fieldName"=>"b"}],
54
+ "name"=>"ctr"}])
60
55
  end
61
56
 
62
57
  it 'build a - post aggregation' do
63
58
  @query.postagg{(a - b).as ctr }
64
- JSON.parse(@query.to_json)['postAggregations'].should == [{"type"=>"arithmetic",
59
+ expect(JSON.parse(@query.query.to_json)['postAggregations']).to eq([{"type"=>"arithmetic",
65
60
  "fn"=>"-",
66
61
  "fields"=>
67
- [{"type"=>"fieldAccess", "name"=>"a", "fieldName"=>"a"},
68
- {"type"=>"fieldAccess", "name"=>"b", "fieldName"=>"b"}],
69
- "name"=>"ctr"}]
62
+ [{"type"=>"fieldAccess", "fieldName"=>"a"},
63
+ {"type"=>"fieldAccess", "fieldName"=>"b"}],
64
+ "name"=>"ctr"}])
70
65
  end
71
66
 
72
67
  it 'build a * post aggregation' do
73
68
  @query.postagg{(a * b).as ctr }
74
- JSON.parse(@query.to_json)['postAggregations'].should == [{"type"=>"arithmetic",
69
+ expect(JSON.parse(@query.query.to_json)['postAggregations']).to eq([{"type"=>"arithmetic",
75
70
  "fn"=>"*",
76
71
  "fields"=>
77
- [{"type"=>"fieldAccess", "name"=>"a", "fieldName"=>"a"},
78
- {"type"=>"fieldAccess", "name"=>"b", "fieldName"=>"b"}],
79
- "name"=>"ctr"}]
72
+ [{"type"=>"fieldAccess", "fieldName"=>"a"},
73
+ {"type"=>"fieldAccess", "fieldName"=>"b"}],
74
+ "name"=>"ctr"}])
80
75
  end
81
76
 
82
77
  it 'build a / post aggregation' do
83
78
  @query.postagg{(a / b).as ctr }
84
- JSON.parse(@query.to_json)['postAggregations'].should == [{"type"=>"arithmetic",
79
+ expect(JSON.parse(@query.query.to_json)['postAggregations']).to eq([{"type"=>"arithmetic",
85
80
  "fn"=>"/",
86
81
  "fields"=>
87
- [{"type"=>"fieldAccess", "name"=>"a", "fieldName"=>"a"},
88
- {"type"=>"fieldAccess", "name"=>"b", "fieldName"=>"b"}],
89
- "name"=>"ctr"}]
82
+ [{"type"=>"fieldAccess", "fieldName"=>"a"},
83
+ {"type"=>"fieldAccess", "fieldName"=>"b"}],
84
+ "name"=>"ctr"}])
90
85
  end
91
86
 
92
87
  it 'build a complex post aggregation' do
93
88
  @query.postagg{((a / b) * 1000).as ctr }
94
- JSON.parse(@query.to_json)['postAggregations'].should == [{"type"=>"arithmetic",
89
+ expect(JSON.parse(@query.query.to_json)['postAggregations']).to eq([{"type"=>"arithmetic",
95
90
  "fn"=>"*",
96
91
  "fields"=>
97
92
  [{"type"=>"arithmetic", "fn"=>"/", "fields"=>
98
- [{"type"=>"fieldAccess", "name"=>"a", "fieldName"=>"a"},
99
- {"type"=>"fieldAccess", "name"=>"b", "fieldName"=>"b"}]},
93
+ [{"type"=>"fieldAccess", "fieldName"=>"a"},
94
+ {"type"=>"fieldAccess", "fieldName"=>"b"}]},
100
95
  {"type"=>"constant", "value"=>1000}],
101
- "name"=>"ctr"}]
96
+ "name"=>"ctr"}])
102
97
  end
103
98
 
104
99
  it 'adds fields required by the postagg operation to longsum' do
105
100
  @query.postagg{ (a/b).as c }
106
- JSON.parse(@query.to_json)['aggregations'].should == [
101
+ expect(JSON.parse(@query.query.to_json)['aggregations']).to eq([
107
102
  {"type"=>"longSum", "name"=>"a", "fieldName"=>"a"},
108
103
  {"type"=>"longSum", "name"=>"b", "fieldName"=>"b"}
109
- ]
104
+ ])
110
105
  end
111
106
 
112
107
  it 'chains aggregations' do
113
108
  @query.postagg{(a / b).as ctr }.postagg{(b / a).as rtc }
114
109
 
115
- JSON.parse(@query.to_json)['postAggregations'].should == [{"type"=>"arithmetic",
110
+ expect(JSON.parse(@query.query.to_json)['postAggregations']).to eq([{"type"=>"arithmetic",
116
111
  "fn"=>"/",
117
112
  "fields"=>
118
- [{"type"=>"fieldAccess", "name"=>"a", "fieldName"=>"a"},
119
- {"type"=>"fieldAccess", "name"=>"b", "fieldName"=>"b"}],
113
+ [{"type"=>"fieldAccess", "fieldName"=>"a"},
114
+ {"type"=>"fieldAccess", "fieldName"=>"b"}],
120
115
  "name"=>"ctr"},
121
116
  {"type"=>"arithmetic",
122
117
  "fn"=>"/",
123
118
  "fields"=>
124
- [{"type"=>"fieldAccess", "name"=>"b", "fieldName"=>"b"},
125
- {"type"=>"fieldAccess", "name"=>"a", "fieldName"=>"a"}],
119
+ [{"type"=>"fieldAccess", "fieldName"=>"b"},
120
+ {"type"=>"fieldAccess", "fieldName"=>"a"}],
126
121
  "name"=>"rtc"}
127
- ]
122
+ ])
128
123
  end
129
124
 
130
125
  it 'builds a javascript post aggregation' do
131
126
  @query.postagg { js('function(agg1, agg2) { return agg1 + agg2; }').as result }
132
- JSON.parse(@query.to_json)['postAggregations'].should == [
127
+ expect(JSON.parse(@query.query.to_json)['postAggregations']).to eq([
133
128
  {
134
129
  'type' => 'javascript',
135
130
  'name' => 'result',
136
131
  'fieldNames' => ['agg1', 'agg2'],
137
132
  'function' => 'function(agg1, agg2) { return agg1 + agg2; }'
138
133
  }
139
- ]
134
+ ])
140
135
  end
141
136
 
142
137
  it 'raises an error when an invalid javascript function is used' do
@@ -148,18 +143,75 @@ describe Druid::Query do
148
143
 
149
144
  it 'builds aggregations on long_sum' do
150
145
  @query.long_sum(:a, :b, :c)
151
- JSON.parse(@query.to_json)['aggregations'].should == [
146
+ expect(JSON.parse(@query.query.to_json)['aggregations']).to eq([
152
147
  { 'type' => 'longSum', 'name' => 'a', 'fieldName' => 'a'},
153
148
  { 'type' => 'longSum', 'name' => 'b', 'fieldName' => 'b'},
154
149
  { 'type' => 'longSum', 'name' => 'c', 'fieldName' => 'c'}
155
- ]
150
+ ])
151
+ end
152
+
153
+ describe '#min' do
154
+ it 'builds aggregations with "min" type' do
155
+ @query.min(:a, :b)
156
+ expect(JSON.parse(@query.query.to_json)['aggregations']).to eq [
157
+ { 'type' => 'min', 'name' => 'a', 'fieldName' => 'a'},
158
+ { 'type' => 'min', 'name' => 'b', 'fieldName' => 'b'}
159
+ ]
160
+ end
161
+ end
162
+
163
+ describe '#max' do
164
+ it 'builds aggregations with "max" type' do
165
+ @query.max(:a, :b)
166
+ expect(JSON.parse(@query.query.to_json)['aggregations']).to eq [
167
+ { 'type' => 'max', 'name' => 'a', 'fieldName' => 'a'},
168
+ { 'type' => 'max', 'name' => 'b', 'fieldName' => 'b'}
169
+ ]
170
+ end
171
+ end
172
+
173
+ describe '#hyper_unique' do
174
+ it 'builds aggregation with "hyperUnique"' do
175
+ @query.hyper_unique(:a, :b)
176
+ expect(JSON.parse(@query.query.to_json)['aggregations']).to eq [
177
+ { 'type' => 'hyperUnique', 'name' => 'a', 'fieldName' => 'a'},
178
+ { 'type' => 'hyperUnique', 'name' => 'b', 'fieldName' => 'b'}
179
+ ]
180
+ end
181
+ end
182
+
183
+ describe '#cardinality' do
184
+ it 'builds aggregation with "cardinality" type' do
185
+ @query.cardinality(:a, [:dim1, :dim2], true)
186
+ expect(JSON.parse(@query.query.to_json)['aggregations']).to eq [
187
+ { 'type' => 'cardinality', 'name' => 'a', 'fieldNames' => ['dim1', 'dim2'], 'byRow' => true }
188
+ ]
189
+ end
190
+ end
191
+
192
+ describe '#js_aggregation' do
193
+ it 'builds aggregation with "javascript" type' do
194
+ @query.js_aggregation(:aggregate, [:x, :y],
195
+ aggregate: "function(current, a, b) { return current + (Math.log(a) * b); }",
196
+ combine: "function(partialA, partialB) { return partialA + partialB; }",
197
+ reset: "function() { return 10; }"
198
+ )
199
+ expect(JSON.parse(@query.query.to_json)['aggregations']).to eq [{
200
+ 'type' => 'javascript',
201
+ 'name' => 'aggregate',
202
+ 'fieldNames' => ['x', 'y'],
203
+ 'fnAggregate' => 'function(current, a, b) { return current + (Math.log(a) * b); }',
204
+ 'fnCombine' => 'function(partialA, partialB) { return partialA + partialB; }',
205
+ 'fnReset' => 'function() { return 10; }'
206
+ }]
207
+ end
156
208
  end
157
209
 
158
210
  it 'appends long_sum properties from aggregations on calling long_sum again' do
159
211
  @query.long_sum(:a, :b, :c)
160
212
  @query.double_sum(:x,:y)
161
213
  @query.long_sum(:d, :e, :f)
162
- JSON.parse(@query.to_json)['aggregations'].sort{|x,y| x['name'] <=> y['name']}.should == [
214
+ expect(JSON.parse(@query.query.to_json)['aggregations'].sort{|x,y| x['name'] <=> y['name']}).to eq([
163
215
  { 'type' => 'longSum', 'name' => 'a', 'fieldName' => 'a'},
164
216
  { 'type' => 'longSum', 'name' => 'b', 'fieldName' => 'b'},
165
217
  { 'type' => 'longSum', 'name' => 'c', 'fieldName' => 'c'},
@@ -168,21 +220,21 @@ describe Druid::Query do
168
220
  { 'type' => 'longSum', 'name' => 'f', 'fieldName' => 'f'},
169
221
  { 'type' => 'doubleSum', 'name' => 'x', 'fieldName' => 'x'},
170
222
  { 'type' => 'doubleSum', 'name' => 'y', 'fieldName' => 'y'}
171
- ]
223
+ ])
172
224
  end
173
225
 
174
226
  it 'removes duplicate aggregation fields' do
175
227
  @query.long_sum(:a, :b)
176
228
  @query.long_sum(:b)
177
229
 
178
- JSON.parse(@query.to_json)['aggregations'].should == [
230
+ expect(JSON.parse(@query.query.to_json)['aggregations']).to eq([
179
231
  { 'type' => 'longSum', 'name' => 'a', 'fieldName' => 'a'},
180
232
  { 'type' => 'longSum', 'name' => 'b', 'fieldName' => 'b'},
181
- ]
233
+ ])
182
234
  end
183
235
 
184
236
  it 'must be chainable' do
185
- q = [Druid::Query.new('test')]
237
+ q = [Druid::Query::Builder.new]
186
238
  q.push q[-1].query_type('a')
187
239
  q.push q[-1].data_source('b')
188
240
  q.push q[-1].group_by('c')
@@ -193,274 +245,331 @@ describe Druid::Query do
193
245
  q.push q[-1].granularity(:day)
194
246
 
195
247
  q.each do |instance|
196
- instance.should == q[0]
248
+ expect(instance).to eq(q[0])
197
249
  end
198
250
  end
199
251
 
200
252
  it 'parses intervals from strings' do
201
- @query.interval('2013-01-26T0', '2020-01-26T00:15')
202
- JSON.parse(@query.to_json)['intervals'].should == ['2013-01-26T00:00:00+00:00/2020-01-26T00:15:00+00:00']
253
+ @query.interval('2013-01-26T00', '2020-01-26T00:15')
254
+ expect(JSON.parse(@query.query.to_json)['intervals']).to eq(['2013-01-26T00:00:00+00:00/2020-01-26T00:15:00+00:00'])
203
255
  end
204
256
 
205
257
  it 'takes multiple intervals' do
206
- @query.intervals([['2013-01-26T0', '2020-01-26T00:15'],['2013-04-23T0', '2013-04-23T15:00']])
207
- JSON.parse(@query.to_json)['intervals'].should == ["2013-01-26T00:00:00+00:00/2020-01-26T00:15:00+00:00", "2013-04-23T00:00:00+00:00/2013-04-23T15:00:00+00:00"]
258
+ @query.intervals([['2013-01-26T00', '2020-01-26T00:15'],['2013-04-23T00', '2013-04-23T15:00']])
259
+ expect(JSON.parse(@query.query.to_json)['intervals']).to eq(["2013-01-26T00:00:00+00:00/2020-01-26T00:15:00+00:00", "2013-04-23T00:00:00+00:00/2013-04-23T15:00:00+00:00"])
208
260
  end
209
261
 
210
262
  it 'accepts Time objects for intervals' do
211
263
  @query.interval(a = Time.now, b = Time.now + 1)
212
- JSON.parse(@query.to_json)['intervals'].should == ["#{a.iso8601}/#{b.iso8601}"]
264
+ expect(JSON.parse(@query.query.to_json)['intervals']).to eq(["#{a.iso8601}/#{b.iso8601}"])
213
265
  end
214
266
 
215
267
  it 'takes a granularity from string' do
216
268
  @query.granularity('all')
217
- JSON.parse(@query.to_json)['granularity'].should == 'all'
269
+ expect(JSON.parse(@query.query.to_json)['granularity']).to eq('all')
218
270
  end
219
271
 
220
272
  it 'should take a period' do
221
- @query.granularity(:day, 'CEST')
222
- @query.properties[:granularity].should == {
223
- :type => "period",
224
- :period => "P1D",
225
- :timeZone => "Europe/Berlin"
226
- }
227
- end
273
+ @query.granularity("P1D", 'Europe/Berlin')
274
+ expect(@query.query.as_json['granularity']).to eq({
275
+ 'type' => "period",
276
+ 'period' => "P1D",
277
+ 'timeZone' => "Europe/Berlin"
278
+ })
279
+ end
280
+
281
+ describe '#filter' do
282
+ it 'creates a in_circ filter' do
283
+ @query.filter{a.in_circ [[52.0,13.0], 10.0]}
284
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq({
285
+ "type" => "spatial",
286
+ "dimension" => "a",
287
+ "bound" => {
288
+ "type" => "radius",
289
+ "coords" => [52.0, 13.0],
290
+ "radius" => 10.0
291
+ }
292
+ })
293
+ end
228
294
 
229
- it 'creates a in_circ filter' do
230
- @query.filter{a.in_circ [[52.0,13.0], 10.0]}
231
- JSON.parse(@query.to_json)['filter'].should == {
232
- "type" => "spatial",
233
- "dimension" => "a",
234
- "bound" => {
235
- "type" => "radius",
236
- "coords" => [52.0, 13.0],
237
- "radius" => 10.0
238
- }
239
- }
240
- end
295
+ it 'creates a in_rec filter' do
296
+ @query.filter{a.in_rec [[10.0, 20.0], [30.0, 40.0]] }
297
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq({
298
+ "type" => "spatial",
299
+ "dimension" => "a",
300
+ "bound" => {
301
+ "type" => "rectangular",
302
+ "minCoords" => [10.0, 20.0],
303
+ "maxCoords" => [30.0, 40.0]
304
+ }
305
+ })
306
+ end
241
307
 
242
- it 'creates a in_rec filter' do
243
- @query.filter{a.in_rec [[10.0, 20.0], [30.0, 40.0]] }
244
- JSON.parse(@query.to_json)['filter'].should == {
245
- "type" => "spatial",
246
- "dimension" => "a",
247
- "bound" => {
248
- "type" => "rectangular",
249
- "minCoords" => [10.0, 20.0],
250
- "maxCoords" => [30.0, 40.0]
251
- }
252
- }
253
- end
308
+ it 'creates an equals filter' do
309
+ @query.filter{a.eq 1}
310
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq({"type"=>"selector", "dimension"=>"a", "value"=>1})
311
+ end
254
312
 
255
- it 'creates an equals filter' do
256
- @query.filter{a.eq 1}
257
- JSON.parse(@query.to_json)['filter'].should == {"type"=>"selector", "dimension"=>"a", "value"=>1}
258
- end
313
+ it 'creates an equals filter with ==' do
314
+ @query.filter{a == 1}
315
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq({"type"=>"selector", "dimension"=>"a", "value"=>1})
316
+ end
259
317
 
260
- it 'creates an equals filter with ==' do
261
- @query.filter{a == 1}
262
- JSON.parse(@query.to_json)['filter'].should == {"type"=>"selector", "dimension"=>"a", "value"=>1}
263
- end
318
+ it 'creates a not filter' do
319
+ @query.filter{!a.eq 1}
320
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq( {"field" =>
321
+ {"type"=>"selector", "dimension"=>"a", "value"=>1},
322
+ "type" => "not"})
323
+ end
264
324
 
325
+ it 'creates a not filter with neq' do
326
+ @query.filter{a.neq 1}
327
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq( {"field" =>
328
+ {"type"=>"selector", "dimension"=>"a", "value"=>1},
329
+ "type" => "not"})
330
+ end
265
331
 
266
- it 'creates a not filter' do
267
- @query.filter{!a.eq 1}
268
- JSON.parse(@query.to_json)['filter'].should == {"field" =>
269
- {"type"=>"selector", "dimension"=>"a", "value"=>1},
270
- "type" => "not"}
271
- end
332
+ it 'creates a not filter with !=' do
333
+ @query.filter{a != 1}
334
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq( {"field" =>
335
+ {"type"=>"selector", "dimension"=>"a", "value"=>1},
336
+ "type" => "not"})
337
+ end
272
338
 
273
- it 'creates a not filter with neq' do
274
- @query.filter{a.neq 1}
275
- JSON.parse(@query.to_json)['filter'].should == {"field" =>
276
- {"type"=>"selector", "dimension"=>"a", "value"=>1},
277
- "type" => "not"}
278
- end
339
+ it 'creates an and filter' do
340
+ @query.filter{a.neq(1) & b.eq(2) & c.eq('foo')}
341
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq( {"fields" => [
342
+ {"type"=>"not", "field"=>{"type"=>"selector", "dimension"=>"a", "value"=>1}},
343
+ {"type"=>"selector", "dimension"=>"b", "value"=>2},
344
+ {"type"=>"selector", "dimension"=>"c", "value"=>"foo"}
345
+ ],
346
+ "type" => "and"})
347
+ end
279
348
 
280
- it 'creates a not filter with !=' do
281
- @query.filter{a != 1}
282
- JSON.parse(@query.to_json)['filter'].should == {"field" =>
283
- {"type"=>"selector", "dimension"=>"a", "value"=>1},
284
- "type" => "not"}
285
- end
349
+ it 'creates an or filter' do
350
+ @query.filter{a.neq(1) | b.eq(2) | c.eq('foo')}
351
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq( {"fields" => [
352
+ {"type"=>"not", "field"=> {"type"=>"selector", "dimension"=>"a", "value"=>1}},
353
+ {"type"=>"selector", "dimension"=>"b", "value"=>2},
354
+ {"type"=>"selector", "dimension"=>"c", "value"=>"foo"}
355
+ ],
356
+ "type" => "or"})
357
+ end
286
358
 
359
+ it 'chains filters' do
360
+ @query.filter{a.eq(1)}.filter{b.eq(2)}
361
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq( {"fields" => [
362
+ {"type"=>"selector", "dimension"=>"a", "value"=>1},
363
+ {"type"=>"selector", "dimension"=>"b", "value"=>2}
364
+ ],
365
+ "type" => "and"})
366
+ end
287
367
 
288
- it 'creates an and filter' do
289
- @query.filter{a.neq(1) & b.eq(2) & c.eq('foo')}
290
- JSON.parse(@query.to_json)['filter'].should == {"fields" => [
291
- {"type"=>"not", "field"=>{"type"=>"selector", "dimension"=>"a", "value"=>1}},
292
- {"type"=>"selector", "dimension"=>"b", "value"=>2},
293
- {"type"=>"selector", "dimension"=>"c", "value"=>"foo"}
294
- ],
295
- "type" => "and"}
296
- end
368
+ it 'creates filter from hash' do
369
+ @query.filter a:1, b:2
370
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq( {"fields" => [
371
+ {"type"=>"selector", "dimension"=>"a", "value"=>1},
372
+ {"type"=>"selector", "dimension"=>"b", "value"=>2}
373
+ ],
374
+ "type" => "and"})
375
+ end
297
376
 
298
- it 'creates an or filter' do
299
- @query.filter{a.neq(1) | b.eq(2) | c.eq('foo')}
300
- JSON.parse(@query.to_json)['filter'].should == {"fields" => [
301
- {"type"=>"not", "field"=> {"type"=>"selector", "dimension"=>"a", "value"=>1}},
302
- {"type"=>"selector", "dimension"=>"b", "value"=>2},
303
- {"type"=>"selector", "dimension"=>"c", "value"=>"foo"}
304
- ],
305
- "type" => "or"}
306
- end
377
+ context 'when type argument is :nin' do
378
+ it 'creates nin filter from hash' do
379
+ @query.filter({ a: 1, b: 2 }, :nin)
380
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq({'fields' => [
381
+ {'type' => 'not', 'field' => { 'dimension' => 'a', 'type' => 'selector', 'value' => 1} },
382
+ {'type' => 'not', 'field' => { 'dimension' => 'b', 'type' => 'selector', 'value' => 2} }
383
+ ],
384
+ 'type' => 'and'})
385
+ end
386
+ end
307
387
 
308
- it 'chains filters' do
309
- @query.filter{a.eq(1)}.filter{b.eq(2)}
310
- JSON.parse(@query.to_json)['filter'].should == {"fields" => [
311
- {"type"=>"selector", "dimension"=>"a", "value"=>1},
312
- {"type"=>"selector", "dimension"=>"b", "value"=>2}
313
- ],
314
- "type" => "and"}
315
- end
388
+ it 'creates an in statement with or filter' do
389
+ @query.filter{a.in [1,2,3]}
390
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq( {"fields" => [
391
+ {"type"=>"selector", "dimension"=>"a", "value"=>1},
392
+ {"type"=>"selector", "dimension"=>"a", "value"=>2},
393
+ {"type"=>"selector", "dimension"=>"a", "value"=>3}
394
+ ],
395
+ "type" => "or"})
396
+ end
316
397
 
317
- it 'creates filter from hash' do
318
- @query.filter a:1, b:2
319
- JSON.parse(@query.to_json)['filter'].should == {"fields" => [
320
- {"type"=>"selector", "dimension"=>"a", "value"=>1},
321
- {"type"=>"selector", "dimension"=>"b", "value"=>2}
322
- ],
323
- "type" => "and"}
398
+ it 'creates a nin statement with and filter' do
399
+ @query.filter{a.nin [1,2,3]}
400
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq( {"fields" => [
401
+ {"field"=>{"type"=>"selector", "dimension"=>"a", "value"=>1},"type" => "not"},
402
+ {"field"=>{"type"=>"selector", "dimension"=>"a", "value"=>2},"type" => "not"},
403
+ {"field"=>{"type"=>"selector", "dimension"=>"a", "value"=>3},"type" => "not"}
404
+ ],
405
+ "type" => "and"})
406
+ end
324
407
 
325
- end
408
+ it 'creates a javascript with > filter' do
409
+ @query.filter{a > 100}
410
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq({
411
+ "type" => "javascript",
412
+ "dimension" => "a",
413
+ "function" => "function(a) { return(a > 100); }"
414
+ })
415
+ end
326
416
 
327
- it 'creates an in statement with or filter' do
328
- @query.filter{a.in [1,2,3]}
329
- JSON.parse(@query.to_json)['filter'].should == {"fields" => [
330
- {"type"=>"selector", "dimension"=>"a", "value"=>1},
331
- {"type"=>"selector", "dimension"=>"a", "value"=>2},
332
- {"type"=>"selector", "dimension"=>"a", "value"=>3}
333
- ],
334
- "type" => "or"}
335
- end
417
+ it 'creates a mixed javascript filter' do
418
+ @query.filter{(a >= 128) & (a != 256)}
419
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq({"fields" => [
420
+ {"type" => "javascript", "dimension" => "a", "function" => "function(a) { return(a >= 128); }"},
421
+ {"field" => {"type" => "selector", "dimension" => "a", "value" => 256}, "type" => "not"}
422
+ ],
423
+ "type" => "and"})
424
+ end
336
425
 
337
- it 'creates a nin statement with and filter' do
338
- @query.filter{a.nin [1,2,3]}
339
- JSON.parse(@query.to_json)['filter'].should == {"fields" => [
340
- {"field"=>{"type"=>"selector", "dimension"=>"a", "value"=>1},"type" => "not"},
341
- {"field"=>{"type"=>"selector", "dimension"=>"a", "value"=>2},"type" => "not"},
342
- {"field"=>{"type"=>"selector", "dimension"=>"a", "value"=>3},"type" => "not"}
343
- ],
344
- "type" => "and"}
345
- end
426
+ it 'creates a complex javascript filter' do
427
+ @query.filter{(a >= 4) & (a <= '128')}
428
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq({"fields" => [
429
+ {"type" => "javascript", "dimension" => "a", "function" => "function(a) { return(a >= 4); }"},
430
+ {"type" => "javascript", "dimension" => "a", "function" => "function(a) { return(a <= \"128\"); }"}
431
+ ],
432
+ "type" => "and"})
433
+ end
346
434
 
347
- it 'creates a javascript with > filter' do
348
- @query.filter{a > 100}
349
- JSON.parse(@query.to_json)['filter'].should == {
350
- "type" => "javascript",
351
- "dimension" => "a",
352
- "function" => "function(a) { return(a > 100); }"
353
- }
354
- end
435
+ it 'creates a custom javascript filter' do
436
+ @query.filter{a.javascript("function(a) { return true; }")}
437
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq({
438
+ "type" => "javascript",
439
+ "dimension" => "a",
440
+ "function" => "function(a) { return true; }"
441
+ })
442
+ end
355
443
 
356
- it 'creates a mixed javascript filter' do
357
- @query.filter{(a >= 128) & (a != 256)}
358
- JSON.parse(@query.to_json)['filter'].should == {"fields" => [
359
- {"type" => "javascript", "dimension" => "a", "function" => "function(a) { return(a >= 128); }"},
360
- {"field" => {"type" => "selector", "dimension" => "a", "value" => 256}, "type" => "not"}
361
- ],
362
- "type" => "and"}
444
+ it 'can chain two in statements' do
445
+ @query.filter{a.in([1,2,3]) & b.in([1,2,3])}
446
+ expect(JSON.parse(@query.query.to_json)['filter']).to eq({"type"=>"and", "fields"=>[
447
+ {"type"=>"or", "fields"=>[
448
+ {"type"=>"selector", "dimension"=>"a", "value"=>1},
449
+ {"type"=>"selector", "dimension"=>"a", "value"=>2},
450
+ {"type"=>"selector", "dimension"=>"a", "value"=>3}
451
+ ]},
452
+ {"type"=>"or", "fields"=>[
453
+ {"type"=>"selector", "dimension"=>"b", "value"=>1},
454
+ {"type"=>"selector", "dimension"=>"b", "value"=>2},
455
+ {"type"=>"selector", "dimension"=>"b", "value"=>3}
456
+ ]}
457
+ ]})
458
+ end
363
459
  end
364
460
 
365
- it 'creates a complex javascript filter' do
366
- @query.filter{(a >= 4) & (a <= '128')}
367
- JSON.parse(@query.to_json)['filter'].should == {"fields" => [
368
- {"type" => "javascript", "dimension" => "a", "function" => "function(a) { return(a >= 4); }"},
369
- {"type" => "javascript", "dimension" => "a", "function" => "function(a) { return(a <= '128'); }"}
370
- ],
371
- "type" => "and"}
372
- end
461
+ describe '#having' do
462
+ subject(:having) { JSON.parse(@query.to_json)['having'] }
373
463
 
374
- it 'can chain two in statements' do
375
- @query.filter{a.in([1,2,3]) & b.in([1,2,3])}
376
- JSON.parse(@query.to_json)['filter'].should == {"type"=>"and", "fields"=>[
377
- {"type"=>"or", "fields"=>[
378
- {"type"=>"selector", "dimension"=>"a", "value"=>1},
379
- {"type"=>"selector", "dimension"=>"a", "value"=>2},
380
- {"type"=>"selector", "dimension"=>"a", "value"=>3}
381
- ]},
382
- {"type"=>"or", "fields"=>[
383
- {"type"=>"selector", "dimension"=>"b", "value"=>1},
384
- {"type"=>"selector", "dimension"=>"b", "value"=>2},
385
- {"type"=>"selector", "dimension"=>"b", "value"=>3}
386
- ]}
387
- ]}
388
- end
464
+ it 'creates an equalTo clause using ==' do
465
+ @query.having { a == 100 }
466
+ expect(@query.query.as_json['having']).to eq({ 'type' => 'equalTo', 'aggregation' => 'a', 'value' => 100 })
467
+ end
389
468
 
390
- describe '#having' do
391
- it 'creates a greater than having clause' do
392
- @query.having{a > 100}
393
- JSON.parse(@query.to_json)['having'].should == {
394
- "type"=>"greaterThan", "aggregation"=>"a", "value"=>100
395
- }
396
- end
397
-
398
- it 'chains having clauses with and' do
399
- @query.having{a > 100}.having{b > 200}.having{c > 300}
400
- JSON.parse(@query.to_json)['having'].should == {
401
- "type" => "and",
402
- "havingSpecs" => [
403
- { "type" => "greaterThan", "aggregation" => "a", "value" => 100 },
404
- { "type" => "greaterThan", "aggregation" => "b", "value" => 200 },
405
- { "type" => "greaterThan", "aggregation" => "c", "value" => 300 }
406
- ]
407
- }
469
+ it 'creates a not equalTo clause using !=' do
470
+ @query.having { a != 100 }
471
+ expect(@query.query.as_json['having']).to eq({
472
+ 'type' => 'not',
473
+ 'havingSpec' => { 'type' => 'equalTo', 'aggregation' => 'a', 'value' => 100 },
474
+ })
408
475
  end
409
- end
410
476
 
411
- it 'does not accept in with empty array' do
412
- expect { @query.filter{a.in []} }.to raise_error "Values cannot be empty"
413
- end
477
+ it 'creates a greaterThan clause using >' do
478
+ @query.having { a > 100 }
479
+ expect(@query.query.as_json['having']).to eq({ 'type' => 'greaterThan', 'aggregation' => 'a', 'value' => 100 })
480
+ end
414
481
 
415
- it 'does raise on invalid filter statement' do
416
- expect { @query.filter{:a} }.to raise_error 'Not a valid filter'
417
- end
482
+ it 'creates a lessThan clause using <' do
483
+ @query.having { a < 100 }
484
+ expect(@query.query.as_json['having']).to eq({ 'type' => 'lessThan', 'aggregation' => 'a', 'value' => 100 })
485
+ end
418
486
 
419
- it 'raises if no value is passed to a filter operator' do
420
- expect { @query.filter{a.eq a}.to_json}.to raise_error 'no value assigned'
421
- end
487
+ it 'creates an add clause using &' do
488
+ @query.having { (a > 100) & (b > 200) }
489
+ expect(@query.query.as_json['having']).to eq({
490
+ 'type' => 'and',
491
+ 'havingSpecs' => [
492
+ { 'type' => 'greaterThan', 'aggregation' => 'a', 'value' => 100 },
493
+ { 'type' => 'greaterThan', 'aggregation' => 'b', 'value' => 200 },
494
+ ]
495
+ })
496
+ end
497
+
498
+ it 'creates an or clause using |' do
499
+ @query.having { (a > 100) | (b > 200) }
500
+ expect(@query.query.as_json['having']).to eq({
501
+ 'type' => 'or',
502
+ 'havingSpecs' => [
503
+ { 'type' => 'greaterThan', 'aggregation' => 'a', 'value' => 100 },
504
+ { 'type' => 'greaterThan', 'aggregation' => 'b', 'value' => 200 },
505
+ ]
506
+ })
507
+ end
508
+
509
+ it 'creates a not clause using !' do
510
+ @query.having { !((a == 100) & (b == 200)) }
511
+ expect(@query.query.as_json['having']).to eq({
512
+ 'type' => 'not',
513
+ 'havingSpec' => {
514
+ 'type' => 'and',
515
+ 'havingSpecs' => [
516
+ { 'type' => 'equalTo', 'aggregation' => 'a', 'value' => 100 },
517
+ { 'type' => 'equalTo', 'aggregation' => 'b', 'value' => 200 },
518
+ ]
519
+ }
520
+ })
521
+ end
422
522
 
423
- it 'raises wrong number of arguments if filter operator is called without param' do
424
- expect { @query.filter{a.eq}.to_json}.to raise_error
523
+ it 'combines successive calls with and operator' do
524
+ @query.having { a > 100 }.having { b > 200 }.having { c > 300 }
525
+ expect(@query.query.as_json['having']).to eq({
526
+ 'type' => 'and',
527
+ 'havingSpecs' => [
528
+ { 'type' => 'greaterThan', 'aggregation' => 'a', 'value' => 100 },
529
+ { 'type' => 'greaterThan', 'aggregation' => 'b', 'value' => 200 },
530
+ { 'type' => 'greaterThan', 'aggregation' => 'c', 'value' => 300 },
531
+ ]
532
+ })
533
+ end
425
534
  end
426
535
 
427
536
  it 'should query regexp using .regexp(string)' do
428
- JSON.parse(@query.filter{a.regexp('[1-9].*')}.to_json)['filter'].should == {
537
+ expect(JSON.parse(@query.filter{a.regexp('[1-9].*')}.query.to_json)['filter']).to eq({
429
538
  "dimension"=>"a",
430
539
  "type"=>"regex",
431
540
  "pattern"=>"[1-9].*"
432
- }
541
+ })
433
542
  end
434
543
 
435
544
  it 'should query regexp using .eq(regexp)' do
436
- JSON.parse(@query.filter{a.in(/abc.*/)}.to_json)['filter'].should == {
545
+ expect(JSON.parse(@query.filter{a.in(/abc.*/)}.query.to_json)['filter']).to eq({
437
546
  "dimension"=>"a",
438
547
  "type"=>"regex",
439
548
  "pattern"=>"abc.*"
440
- }
549
+ })
441
550
  end
442
551
 
443
552
  it 'should query regexp using .in([regexp])' do
444
- JSON.parse(@query.filter{ a.in(['b', /[a-z].*/, 'c']) }.to_json)['filter'].should == {
553
+ expect(JSON.parse(@query.filter{ a.in(['b', /[a-z].*/, 'c']) }.query.to_json)['filter']).to eq({
445
554
  "type"=>"or",
446
555
  "fields"=>[
447
556
  {"dimension"=>"a", "type"=>"selector", "value"=>"b"},
448
557
  {"dimension"=>"a", "type"=>"regex", "pattern"=>"[a-z].*"},
449
558
  {"dimension"=>"a", "type"=>"selector", "value"=>"c"}
450
559
  ]
451
- }
560
+ })
452
561
  end
453
562
 
454
563
  it 'takes type, limit and columns from limit method' do
455
- @query.limit_spec(10, :a => 'ASCENDING', :b => 'DESCENDING')
456
- result = JSON.parse(@query.to_json)
457
- result['limitSpec'].should == {
564
+ @query.limit(10, :a => 'ASCENDING', :b => 'DESCENDING')
565
+ result = JSON.parse(@query.query.to_json)
566
+ expect(result['limitSpec']).to eq({
458
567
  'type' => 'default',
459
568
  'limit' => 10,
460
569
  'columns' => [
461
570
  { 'dimension' => 'a', 'direction' => 'ASCENDING'},
462
571
  { 'dimension' => 'b', 'direction' => 'DESCENDING'}
463
572
  ]
464
- }
573
+ })
465
574
  end
466
575
  end