ruby-druid 0.1.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +15 -0
- data/.gitignore +5 -0
- data/Gemfile +21 -0
- data/Guardfile +10 -0
- data/LICENSE +20 -0
- data/README.md +286 -0
- data/Rakefile +1 -0
- data/bin/dripl +40 -0
- data/dot_driplrc_example +12 -0
- data/lib/druid.rb +8 -0
- data/lib/druid/client.rb +95 -0
- data/lib/druid/console.rb +66 -0
- data/lib/druid/filter.rb +216 -0
- data/lib/druid/having.rb +53 -0
- data/lib/druid/post_aggregation.rb +111 -0
- data/lib/druid/query.rb +175 -0
- data/lib/druid/response_row.rb +32 -0
- data/lib/druid/zoo_handler.rb +129 -0
- data/ruby-druid.gemspec +19 -0
- data/spec/lib/client_spec.rb +69 -0
- data/spec/lib/query_spec.rb +377 -0
- data/spec/lib/zoo_handler_spec.rb +200 -0
- data/spec/spec_helper.rb +2 -0
- metadata +96 -0
@@ -0,0 +1,32 @@
|
|
1
|
+
module Druid
|
2
|
+
|
3
|
+
class ResponseRow
|
4
|
+
(instance_methods + private_instance_methods).each do |method|
|
5
|
+
unless method.to_s =~ /^(__|object_id|initialize)/
|
6
|
+
undef_method method
|
7
|
+
end
|
8
|
+
end
|
9
|
+
|
10
|
+
attr_reader :timestamp
|
11
|
+
attr_reader :row
|
12
|
+
|
13
|
+
def initialize(row)
|
14
|
+
@timestamp = row['timestamp']
|
15
|
+
@row = row['event'] || row['result']
|
16
|
+
end
|
17
|
+
|
18
|
+
def method_missing(name, *args, &block)
|
19
|
+
@row.send name, *args, &block
|
20
|
+
end
|
21
|
+
|
22
|
+
def to_s
|
23
|
+
"#{@timestamp.to_s}:#{@row.to_s}"
|
24
|
+
end
|
25
|
+
|
26
|
+
def inspect
|
27
|
+
"#{@timestamp.inspect}:#{@row.inspect}"
|
28
|
+
end
|
29
|
+
|
30
|
+
end
|
31
|
+
|
32
|
+
end
|
@@ -0,0 +1,129 @@
|
|
1
|
+
require 'zk'
|
2
|
+
require 'json'
|
3
|
+
require 'rest_client'
|
4
|
+
|
5
|
+
module Druid
|
6
|
+
|
7
|
+
class ZooHandler
|
8
|
+
def initialize(uri, opts = {})
|
9
|
+
@zk = ZK.new uri, :chroot => :check
|
10
|
+
@registry = Hash.new {|hash,key| hash[key] = Array.new }
|
11
|
+
@discovery_path = opts[:discovery_path] || '/discoveryPath'
|
12
|
+
@watched_services = Hash.new
|
13
|
+
|
14
|
+
init_zookeeper
|
15
|
+
end
|
16
|
+
|
17
|
+
def init_zookeeper
|
18
|
+
@zk.on_expired_session do
|
19
|
+
init_zookeeper
|
20
|
+
end
|
21
|
+
|
22
|
+
@zk.register(@discovery_path, :only => :child) do |event|
|
23
|
+
check_services
|
24
|
+
end
|
25
|
+
|
26
|
+
check_services
|
27
|
+
end
|
28
|
+
|
29
|
+
def close!
|
30
|
+
@zk.close!
|
31
|
+
end
|
32
|
+
|
33
|
+
def check_services
|
34
|
+
zk_services = @zk.children @discovery_path, :watch => true
|
35
|
+
|
36
|
+
#remove deprecated services
|
37
|
+
(services - zk_services).each do |old_service|
|
38
|
+
@registry.delete old_service
|
39
|
+
if @watched_services.include? old_service
|
40
|
+
@watched_services.delete(old_service).unregister
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
zk_services.each do |service|
|
45
|
+
check_service service unless @watched_services.include? service
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
def check_service(service)
|
50
|
+
unless @watched_services.include? service
|
51
|
+
watchPath = "#{@discovery_path}/#{service}"
|
52
|
+
@watched_services[service] = @zk.register(watchPath, :only => :child) do |event|
|
53
|
+
old_handler = @watched_services.delete(service)
|
54
|
+
if old_handler
|
55
|
+
old_handler.unregister
|
56
|
+
end
|
57
|
+
check_service service
|
58
|
+
end
|
59
|
+
|
60
|
+
known = @registry[service].map{ |node| node[:name] } rescue []
|
61
|
+
live = @zk.children(watchPath, :watch => true)
|
62
|
+
|
63
|
+
# copy the unchanged entries
|
64
|
+
new_list = @registry[service].select{ |node| live.include? node[:name] } rescue []
|
65
|
+
|
66
|
+
# verify the new entries to be living brokers
|
67
|
+
(live - known).each do |name|
|
68
|
+
info = @zk.get "#{watchPath}/#{name}"
|
69
|
+
node = JSON.parse(info[0])
|
70
|
+
uri = "http://#{node['address']}:#{node['port']}/druid/v2/"
|
71
|
+
|
72
|
+
begin
|
73
|
+
check_uri = "#{uri}datasources/"
|
74
|
+
|
75
|
+
check = RestClient::Request.execute({
|
76
|
+
:method => :get,
|
77
|
+
:url => check_uri,
|
78
|
+
:timeout => 5,
|
79
|
+
:open_timeout => 5
|
80
|
+
})
|
81
|
+
|
82
|
+
if check.code == 200
|
83
|
+
new_list.push({
|
84
|
+
:name => name,
|
85
|
+
:uri => uri,
|
86
|
+
:data_sources => JSON.parse(check.to_str)
|
87
|
+
})
|
88
|
+
else
|
89
|
+
end
|
90
|
+
rescue
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
if !new_list.empty?
|
95
|
+
# poor mans load balancing
|
96
|
+
@registry[service] = new_list.shuffle
|
97
|
+
else
|
98
|
+
# don't show services w/o active brokers
|
99
|
+
@registry.delete service
|
100
|
+
end
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
def services
|
105
|
+
@registry.keys
|
106
|
+
end
|
107
|
+
|
108
|
+
def data_sources
|
109
|
+
result = Hash.new { |hash, key| hash[key] = [] }
|
110
|
+
|
111
|
+
@registry.each do |service, brokers|
|
112
|
+
brokers.each do |broker|
|
113
|
+
broker[:data_sources].each do |data_source|
|
114
|
+
result["#{service}/#{data_source}"] << broker[:uri]
|
115
|
+
end
|
116
|
+
end
|
117
|
+
end
|
118
|
+
result.each do |source, uris|
|
119
|
+
result[source] = uris.sample if uris.respond_to?(:sample)
|
120
|
+
end
|
121
|
+
|
122
|
+
result
|
123
|
+
end
|
124
|
+
|
125
|
+
def to_s
|
126
|
+
@registry.to_s
|
127
|
+
end
|
128
|
+
end
|
129
|
+
end
|
data/ruby-druid.gemspec
ADDED
@@ -0,0 +1,19 @@
|
|
1
|
+
require 'rake'
|
2
|
+
|
3
|
+
Gem::Specification.new do |gem|
|
4
|
+
gem.name = 'ruby-druid'
|
5
|
+
gem.version = '0.1.1'
|
6
|
+
gem.date = '2013-08-01'
|
7
|
+
gem.summary = 'Ruby client for druid'
|
8
|
+
gem.description = 'Ruby client for metamx druid'
|
9
|
+
gem.authors = 'The LiquidM Team'
|
10
|
+
gem.email = 'tech@liquidm.com'
|
11
|
+
gem.homepage = 'https://github.com/madvertise/ruby-druid'
|
12
|
+
|
13
|
+
gem.files = `git ls-files`.split("\n")
|
14
|
+
gem.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
|
15
|
+
gem.require_paths = ['lib']
|
16
|
+
|
17
|
+
gem.add_dependency 'zk'
|
18
|
+
gem.add_dependency 'rest-client'
|
19
|
+
end
|
@@ -0,0 +1,69 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe Druid::Client do
|
4
|
+
|
5
|
+
it 'calls zookeeper on intialize' do
|
6
|
+
Druid::ZooHandler.should_receive(:new)
|
7
|
+
Druid::Client.new('test_uri', zk_keepalive: true)
|
8
|
+
end
|
9
|
+
|
10
|
+
it 'creates a query' do
|
11
|
+
Druid::ZooHandler.stub!(:new).and_return(mock(Druid::ZooHandler, :data_sources => {'test/test' => 'http://www.example.com'}, :close! => true))
|
12
|
+
Druid::Client.new('test_uri', zk_keepalive: true).query('test/test').should be_a Druid::Query
|
13
|
+
end
|
14
|
+
|
15
|
+
it 'sends query if block is given' do
|
16
|
+
Druid::ZooHandler.stub!(:new).and_return(mock(Druid::ZooHandler, :data_sources => {'test/test' => 'http://www.example.com'}, :close! => true))
|
17
|
+
client = Druid::Client.new('test_uri', zk_keepalive: true)
|
18
|
+
client.should_receive(:send)
|
19
|
+
client.query('test/test') do
|
20
|
+
group(:group1)
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
it 'parses response on 200' do
|
25
|
+
stub_request(:post, "http://www.example.com/druid/v2").
|
26
|
+
with(:body => "{\"dataSource\":\"test\",\"granularity\":\"all\",\"intervals\":[\"2013-04-04T00:00:00+00:00/2013-04-04T00:00:00+00:00\"]}",
|
27
|
+
:headers => {'Accept'=>'*/*', 'Content-Type'=>'application/json', 'User-Agent'=>'Ruby'}).
|
28
|
+
to_return(:status => 200, :body => "[]", :headers => {})
|
29
|
+
Druid::ZooHandler.stub!(:new).and_return(mock(Druid::ZooHandler, :data_sources => {'test/test' => 'http://www.example.com/druid/v2'}, :close! => true))
|
30
|
+
client = Druid::Client.new('test_uri', zk_keepalive: true)
|
31
|
+
JSON.should_receive(:parse).and_return([])
|
32
|
+
client.send(client.query('test/test').interval("2013-04-04", "2013-04-04"))
|
33
|
+
end
|
34
|
+
|
35
|
+
it 'raises on request failure' do
|
36
|
+
stub_request(:post, "http://www.example.com/druid/v2").
|
37
|
+
with(:body => "{\"dataSource\":\"test\",\"granularity\":\"all\",\"intervals\":[\"2013-04-04T00:00:00+00:00/2013-04-04T00:00:00+00:00\"]}",
|
38
|
+
:headers => {'Accept'=>'*/*', 'Content-Type'=>'application/json', 'User-Agent'=>'Ruby'}).
|
39
|
+
to_return(:status => 666, :body => "Strange server error", :headers => {})
|
40
|
+
Druid::ZooHandler.stub!(:new).and_return(mock(Druid::ZooHandler, :data_sources => {'test/test' => 'http://www.example.com/druid/v2'}, :close! => true))
|
41
|
+
client = Druid::Client.new('test_uri', zk_keepalive: true)
|
42
|
+
expect { client.send(client.query('test/test').interval("2013-04-04", "2013-04-04")) }.to raise_error(RuntimeError, /Request failed: 666: Strange server error/)
|
43
|
+
end
|
44
|
+
|
45
|
+
it 'should have a static setup' do
|
46
|
+
client = Druid::Client.new('test_uri', :static_setup => {'madvertise/mock' => 'mock_uri'})
|
47
|
+
client.data_sources.should == ['madvertise/mock']
|
48
|
+
client.data_source_uri('madvertise/mock').should == URI('mock_uri')
|
49
|
+
end
|
50
|
+
|
51
|
+
it 'should report dimensions of a data source correctly' do
|
52
|
+
stub_request(:get, "http://www.example.com/druid/v2/datasources/mock").
|
53
|
+
with(:headers =>{'Accept'=>'*/*', 'User-Agent'=>'Ruby'}).
|
54
|
+
to_return(:status => 200, :body => '{"dimensions":["d1","d2","d3"],"metrics":["m1", "m2"]}')
|
55
|
+
|
56
|
+
client = Druid::Client.new('test_uri', :static_setup => {'madvertise/mock' => 'http://www.example.com/druid/v2/'})
|
57
|
+
client.data_source('madvertise/mock').dimensions.should == ["d1","d2","d3"]
|
58
|
+
end
|
59
|
+
|
60
|
+
it 'should report metrics of a data source correctly' do
|
61
|
+
stub_request(:get, "http://www.example.com/druid/v2/datasources/mock").
|
62
|
+
with(:headers =>{'Accept'=>'*/*', 'User-Agent'=>'Ruby'}).
|
63
|
+
to_return(:status => 200, :body => '{"dimensions":["d1","d2","d3"],"metrics":["m1", "m2"]}')
|
64
|
+
|
65
|
+
client = Druid::Client.new('test_uri', :static_setup => {'madvertise/mock' => 'http://www.example.com/druid/v2/'})
|
66
|
+
client.data_source('madvertise/mock').metrics.should == ["m1","m2"]
|
67
|
+
end
|
68
|
+
|
69
|
+
end
|
@@ -0,0 +1,377 @@
|
|
1
|
+
require "spec_helper"
|
2
|
+
|
3
|
+
describe Druid::Query do
|
4
|
+
|
5
|
+
before :each do
|
6
|
+
@query = Druid::Query.new('test')
|
7
|
+
end
|
8
|
+
|
9
|
+
it 'takes a datasource in the constructor' do
|
10
|
+
query = Druid::Query.new('test')
|
11
|
+
JSON.parse(query.to_json)['dataSource'].should == 'test'
|
12
|
+
end
|
13
|
+
|
14
|
+
it 'takes a query type' do
|
15
|
+
@query.query_type('query_type')
|
16
|
+
JSON.parse(@query.to_json)['queryType'].should == 'query_type'
|
17
|
+
end
|
18
|
+
|
19
|
+
it 'sets query type by group_by' do
|
20
|
+
@query.group_by()
|
21
|
+
JSON.parse(@query.to_json)['queryType'].should == 'groupBy'
|
22
|
+
end
|
23
|
+
|
24
|
+
it 'sets query type to timeseries' do
|
25
|
+
@query.time_series()
|
26
|
+
JSON.parse(@query.to_json)['queryType'].should == 'timeseries'
|
27
|
+
end
|
28
|
+
|
29
|
+
it 'takes dimensions from group_by method' do
|
30
|
+
@query.group_by(:a, :b, :c)
|
31
|
+
JSON.parse(@query.to_json)['dimensions'].should == ['a', 'b', 'c']
|
32
|
+
end
|
33
|
+
|
34
|
+
it 'build a post aggregation with a constant right' do
|
35
|
+
@query.postagg{(a + 1).as ctr }
|
36
|
+
|
37
|
+
JSON.parse(@query.to_json)['postAggregations'].should == [{"type"=>"arithmetic",
|
38
|
+
"fn"=>"+",
|
39
|
+
"fields"=>
|
40
|
+
[{"type"=>"fieldAccess", "name"=>"a", "fieldName"=>"a"},
|
41
|
+
{"type"=>"constant", "value"=>1}],
|
42
|
+
"name"=>"ctr"}]
|
43
|
+
end
|
44
|
+
|
45
|
+
it 'build a + post aggregation' do
|
46
|
+
@query.postagg{(a + b).as ctr }
|
47
|
+
JSON.parse(@query.to_json)['postAggregations'].should == [{"type"=>"arithmetic",
|
48
|
+
"fn"=>"+",
|
49
|
+
"fields"=>
|
50
|
+
[{"type"=>"fieldAccess","name"=>"a", "fieldName"=>"a"},
|
51
|
+
{"type"=>"fieldAccess", "name"=>"b", "fieldName"=>"b"}],
|
52
|
+
"name"=>"ctr"}]
|
53
|
+
end
|
54
|
+
|
55
|
+
it 'build a - post aggregation' do
|
56
|
+
@query.postagg{(a - b).as ctr }
|
57
|
+
JSON.parse(@query.to_json)['postAggregations'].should == [{"type"=>"arithmetic",
|
58
|
+
"fn"=>"-",
|
59
|
+
"fields"=>
|
60
|
+
[{"type"=>"fieldAccess", "name"=>"a", "fieldName"=>"a"},
|
61
|
+
{"type"=>"fieldAccess", "name"=>"b", "fieldName"=>"b"}],
|
62
|
+
"name"=>"ctr"}]
|
63
|
+
end
|
64
|
+
|
65
|
+
it 'build a * post aggregation' do
|
66
|
+
@query.postagg{(a * b).as ctr }
|
67
|
+
JSON.parse(@query.to_json)['postAggregations'].should == [{"type"=>"arithmetic",
|
68
|
+
"fn"=>"*",
|
69
|
+
"fields"=>
|
70
|
+
[{"type"=>"fieldAccess", "name"=>"a", "fieldName"=>"a"},
|
71
|
+
{"type"=>"fieldAccess", "name"=>"b", "fieldName"=>"b"}],
|
72
|
+
"name"=>"ctr"}]
|
73
|
+
end
|
74
|
+
|
75
|
+
it 'build a / post aggregation' do
|
76
|
+
@query.postagg{(a / b).as ctr }
|
77
|
+
JSON.parse(@query.to_json)['postAggregations'].should == [{"type"=>"arithmetic",
|
78
|
+
"fn"=>"/",
|
79
|
+
"fields"=>
|
80
|
+
[{"type"=>"fieldAccess", "name"=>"a", "fieldName"=>"a"},
|
81
|
+
{"type"=>"fieldAccess", "name"=>"b", "fieldName"=>"b"}],
|
82
|
+
"name"=>"ctr"}]
|
83
|
+
end
|
84
|
+
|
85
|
+
it 'build a complex post aggregation' do
|
86
|
+
@query.postagg{((a / b) * 1000).as ctr }
|
87
|
+
JSON.parse(@query.to_json)['postAggregations'].should == [{"type"=>"arithmetic",
|
88
|
+
"fn"=>"*",
|
89
|
+
"fields"=>
|
90
|
+
[{"type"=>"arithmetic", "fn"=>"/", "fields"=>
|
91
|
+
[{"type"=>"fieldAccess", "name"=>"a", "fieldName"=>"a"},
|
92
|
+
{"type"=>"fieldAccess", "name"=>"b", "fieldName"=>"b"}]},
|
93
|
+
{"type"=>"constant", "value"=>1000}],
|
94
|
+
"name"=>"ctr"}]
|
95
|
+
end
|
96
|
+
|
97
|
+
it 'adds fields required by the postagg operation to longsum' do
|
98
|
+
@query.postagg{ (a/b).as c }
|
99
|
+
JSON.parse(@query.to_json)['aggregations'].should == [{"type"=>"longSum", "name"=>"a", "fieldName"=>"a"},
|
100
|
+
{"type"=>"longSum", "name"=>"b", "fieldName"=>"b"}]
|
101
|
+
end
|
102
|
+
|
103
|
+
it 'chains aggregations' do
|
104
|
+
@query.postagg{(a / b).as ctr }.postagg{(b / a).as rtc }
|
105
|
+
|
106
|
+
JSON.parse(@query.to_json)['postAggregations'].should == [{"type"=>"arithmetic",
|
107
|
+
"fn"=>"/",
|
108
|
+
"fields"=>
|
109
|
+
[{"type"=>"fieldAccess", "name"=>"a", "fieldName"=>"a"},
|
110
|
+
{"type"=>"fieldAccess", "name"=>"b", "fieldName"=>"b"}],
|
111
|
+
"name"=>"ctr"},
|
112
|
+
{"type"=>"arithmetic",
|
113
|
+
"fn"=>"/",
|
114
|
+
"fields"=>
|
115
|
+
[{"type"=>"fieldAccess", "name"=>"b", "fieldName"=>"b"},
|
116
|
+
{"type"=>"fieldAccess", "name"=>"a", "fieldName"=>"a"}],
|
117
|
+
"name"=>"rtc"}
|
118
|
+
]
|
119
|
+
end
|
120
|
+
|
121
|
+
it 'builds aggregations on long_sum' do
|
122
|
+
@query.long_sum(:a, :b, :c)
|
123
|
+
JSON.parse(@query.to_json)['aggregations'].should == [
|
124
|
+
{ 'type' => 'longSum', 'name' => 'a', 'fieldName' => 'a'},
|
125
|
+
{ 'type' => 'longSum', 'name' => 'b', 'fieldName' => 'b'},
|
126
|
+
{ 'type' => 'longSum', 'name' => 'c', 'fieldName' => 'c'}
|
127
|
+
]
|
128
|
+
end
|
129
|
+
|
130
|
+
|
131
|
+
it 'appends long_sum properties from aggregations on calling long_sum again' do
|
132
|
+
@query.long_sum(:a, :b, :c)
|
133
|
+
@query.double_sum(:x,:y)
|
134
|
+
@query.long_sum(:d, :e, :f)
|
135
|
+
JSON.parse(@query.to_json)['aggregations'].sort{|x,y| x['name'] <=> y['name']}.should == [
|
136
|
+
{ 'type' => 'longSum', 'name' => 'a', 'fieldName' => 'a'},
|
137
|
+
{ 'type' => 'longSum', 'name' => 'b', 'fieldName' => 'b'},
|
138
|
+
{ 'type' => 'longSum', 'name' => 'c', 'fieldName' => 'c'},
|
139
|
+
{ 'type' => 'longSum', 'name' => 'd', 'fieldName' => 'd'},
|
140
|
+
{ 'type' => 'longSum', 'name' => 'e', 'fieldName' => 'e'},
|
141
|
+
{ 'type' => 'longSum', 'name' => 'f', 'fieldName' => 'f'},
|
142
|
+
{ 'type' => 'doubleSum', 'name' => 'x', 'fieldName' => 'x'},
|
143
|
+
{ 'type' => 'doubleSum', 'name' => 'y', 'fieldName' => 'y'}
|
144
|
+
]
|
145
|
+
end
|
146
|
+
|
147
|
+
it 'removes duplicate aggregation fields' do
|
148
|
+
@query.long_sum(:a, :b)
|
149
|
+
@query.long_sum(:b)
|
150
|
+
|
151
|
+
JSON.parse(@query.to_json)['aggregations'].should == [
|
152
|
+
{ 'type' => 'longSum', 'name' => 'a', 'fieldName' => 'a'},
|
153
|
+
{ 'type' => 'longSum', 'name' => 'b', 'fieldName' => 'b'},
|
154
|
+
]
|
155
|
+
end
|
156
|
+
|
157
|
+
it 'must be chainable' do
|
158
|
+
q = [Druid::Query.new('test')]
|
159
|
+
q.push q[-1].query_type('a')
|
160
|
+
q.push q[-1].data_source('b')
|
161
|
+
q.push q[-1].group_by('c')
|
162
|
+
q.push q[-1].long_sum('d')
|
163
|
+
q.push q[-1].double_sum('e')
|
164
|
+
q.push q[-1].filter{a.eq 1}
|
165
|
+
q.push q[-1].interval("2013-01-26T00", "2020-01-26T00:15")
|
166
|
+
q.push q[-1].granularity(:day)
|
167
|
+
|
168
|
+
q.each do |instance|
|
169
|
+
instance.should == q[0]
|
170
|
+
end
|
171
|
+
end
|
172
|
+
|
173
|
+
it 'parses intervals from strings' do
|
174
|
+
@query.interval('2013-01-26T0', '2020-01-26T00:15')
|
175
|
+
JSON.parse(@query.to_json)['intervals'].should == ['2013-01-26T00:00:00+00:00/2020-01-26T00:15:00+00:00']
|
176
|
+
end
|
177
|
+
|
178
|
+
it 'takes multiple intervals' do
|
179
|
+
@query.intervals([['2013-01-26T0', '2020-01-26T00:15'],['2013-04-23T0', '2013-04-23T15:00']])
|
180
|
+
JSON.parse(@query.to_json)['intervals'].should == ["2013-01-26T00:00:00+00:00/2020-01-26T00:15:00+00:00", "2013-04-23T00:00:00+00:00/2013-04-23T15:00:00+00:00"]
|
181
|
+
end
|
182
|
+
|
183
|
+
it 'accepts Time objects for intervals' do
|
184
|
+
@query.interval(a = Time.now, b = Time.now + 1)
|
185
|
+
JSON.parse(@query.to_json)['intervals'].should == ["#{a.iso8601}/#{b.iso8601}"]
|
186
|
+
end
|
187
|
+
|
188
|
+
it 'takes a granularity from string' do
|
189
|
+
@query.granularity('all')
|
190
|
+
JSON.parse(@query.to_json)['granularity'].should == 'all'
|
191
|
+
end
|
192
|
+
|
193
|
+
it 'should take a period' do
|
194
|
+
@query.granularity(:day, 'CEST')
|
195
|
+
@query.properties[:granularity].should == {
|
196
|
+
:type => "period",
|
197
|
+
:period => "P1D",
|
198
|
+
:timeZone => "Europe/Berlin"
|
199
|
+
}
|
200
|
+
end
|
201
|
+
|
202
|
+
it 'creates an equals filter' do
|
203
|
+
@query.filter{a.eq 1}
|
204
|
+
JSON.parse(@query.to_json)['filter'].should == {"type"=>"selector", "dimension"=>"a", "value"=>1}
|
205
|
+
end
|
206
|
+
|
207
|
+
it 'creates an equals filter with ==' do
|
208
|
+
@query.filter{a == 1}
|
209
|
+
JSON.parse(@query.to_json)['filter'].should == {"type"=>"selector", "dimension"=>"a", "value"=>1}
|
210
|
+
end
|
211
|
+
|
212
|
+
|
213
|
+
it 'creates a not filter' do
|
214
|
+
@query.filter{!a.eq 1}
|
215
|
+
JSON.parse(@query.to_json)['filter'].should == {"field" =>
|
216
|
+
{"type"=>"selector", "dimension"=>"a", "value"=>1},
|
217
|
+
"type" => "not"}
|
218
|
+
end
|
219
|
+
|
220
|
+
it 'creates a not filter with neq' do
|
221
|
+
@query.filter{a.neq 1}
|
222
|
+
JSON.parse(@query.to_json)['filter'].should == {"field" =>
|
223
|
+
{"type"=>"selector", "dimension"=>"a", "value"=>1},
|
224
|
+
"type" => "not"}
|
225
|
+
end
|
226
|
+
|
227
|
+
it 'creates a not filter with !=' do
|
228
|
+
@query.filter{a != 1}
|
229
|
+
JSON.parse(@query.to_json)['filter'].should == {"field" =>
|
230
|
+
{"type"=>"selector", "dimension"=>"a", "value"=>1},
|
231
|
+
"type" => "not"}
|
232
|
+
end
|
233
|
+
|
234
|
+
|
235
|
+
it 'creates an and filter' do
|
236
|
+
@query.filter{a.neq(1) & b.eq(2) & c.eq('foo')}
|
237
|
+
JSON.parse(@query.to_json)['filter'].should == {"fields" => [
|
238
|
+
{"type"=>"not", "field"=>{"type"=>"selector", "dimension"=>"a", "value"=>1}},
|
239
|
+
{"type"=>"selector", "dimension"=>"b", "value"=>2},
|
240
|
+
{"type"=>"selector", "dimension"=>"c", "value"=>"foo"}
|
241
|
+
],
|
242
|
+
"type" => "and"}
|
243
|
+
end
|
244
|
+
|
245
|
+
it 'creates an or filter' do
|
246
|
+
@query.filter{a.neq(1) | b.eq(2) | c.eq('foo')}
|
247
|
+
JSON.parse(@query.to_json)['filter'].should == {"fields" => [
|
248
|
+
{"type"=>"not", "field"=> {"type"=>"selector", "dimension"=>"a", "value"=>1}},
|
249
|
+
{"type"=>"selector", "dimension"=>"b", "value"=>2},
|
250
|
+
{"type"=>"selector", "dimension"=>"c", "value"=>"foo"}
|
251
|
+
],
|
252
|
+
"type" => "or"}
|
253
|
+
end
|
254
|
+
|
255
|
+
it 'chains filters' do
|
256
|
+
@query.filter{a.eq(1)}.filter{b.eq(2)}
|
257
|
+
JSON.parse(@query.to_json)['filter'].should == {"fields" => [
|
258
|
+
{"type"=>"selector", "dimension"=>"a", "value"=>1},
|
259
|
+
{"type"=>"selector", "dimension"=>"b", "value"=>2}
|
260
|
+
],
|
261
|
+
"type" => "and"}
|
262
|
+
end
|
263
|
+
|
264
|
+
it 'creates filter from hash' do
|
265
|
+
@query.filter a:1, b:2
|
266
|
+
JSON.parse(@query.to_json)['filter'].should == {"fields" => [
|
267
|
+
{"type"=>"selector", "dimension"=>"a", "value"=>1},
|
268
|
+
{"type"=>"selector", "dimension"=>"b", "value"=>2}
|
269
|
+
],
|
270
|
+
"type" => "and"}
|
271
|
+
|
272
|
+
end
|
273
|
+
|
274
|
+
it 'creates an in statement with or filter' do
|
275
|
+
@query.filter{a.in [1,2,3]}
|
276
|
+
JSON.parse(@query.to_json)['filter'].should == {"fields" => [
|
277
|
+
{"type"=>"selector", "dimension"=>"a", "value"=>1},
|
278
|
+
{"type"=>"selector", "dimension"=>"a", "value"=>2},
|
279
|
+
{"type"=>"selector", "dimension"=>"a", "value"=>3}
|
280
|
+
],
|
281
|
+
"type" => "or"}
|
282
|
+
end
|
283
|
+
|
284
|
+
it 'creates a javascript with > filter' do
|
285
|
+
@query.filter{a > 100}
|
286
|
+
JSON.parse(@query.to_json)['filter'].should == {
|
287
|
+
"type" => "javascript",
|
288
|
+
"dimension" => "a",
|
289
|
+
"function" => "function(a) { return(a > 100); }"
|
290
|
+
}
|
291
|
+
end
|
292
|
+
|
293
|
+
it 'creates a mixed javascript filter' do
|
294
|
+
@query.filter{(a >= 128) & (a != 256)}
|
295
|
+
JSON.parse(@query.to_json)['filter'].should == {"fields" => [
|
296
|
+
{"type" => "javascript", "dimension" => "a", "function" => "function(a) { return(a >= 128); }"},
|
297
|
+
{"field" => {"type" => "selector", "dimension" => "a", "value" => 256}, "type" => "not"}
|
298
|
+
],
|
299
|
+
"type" => "and"}
|
300
|
+
end
|
301
|
+
|
302
|
+
it 'creates a complex javascript filter' do
|
303
|
+
@query.filter{(a >= 4) & (a <= '128')}
|
304
|
+
JSON.parse(@query.to_json)['filter'].should == {"fields" => [
|
305
|
+
{"type" => "javascript", "dimension" => "a", "function" => "function(a) { return(a >= 4); }"},
|
306
|
+
{"type" => "javascript", "dimension" => "a", "function" => "function(a) { return(a <= '128'); }"}
|
307
|
+
],
|
308
|
+
"type" => "and"}
|
309
|
+
end
|
310
|
+
|
311
|
+
it 'can chain two in statements' do
|
312
|
+
@query.filter{a.in([1,2,3]) & b.in([1,2,3])}
|
313
|
+
JSON.parse(@query.to_json)['filter'].should == {"type"=>"and", "fields"=>[
|
314
|
+
{"type"=>"or", "fields"=>[
|
315
|
+
{"type"=>"selector", "dimension"=>"a", "value"=>1},
|
316
|
+
{"type"=>"selector", "dimension"=>"a", "value"=>2},
|
317
|
+
{"type"=>"selector", "dimension"=>"a", "value"=>3}
|
318
|
+
]},
|
319
|
+
{"type"=>"or", "fields"=>[
|
320
|
+
{"type"=>"selector", "dimension"=>"b", "value"=>1},
|
321
|
+
{"type"=>"selector", "dimension"=>"b", "value"=>2},
|
322
|
+
{"type"=>"selector", "dimension"=>"b", "value"=>3}
|
323
|
+
]}
|
324
|
+
]}
|
325
|
+
end
|
326
|
+
|
327
|
+
it 'creates a greater than having clause' do
|
328
|
+
@query.having{a > 100}
|
329
|
+
JSON.parse(@query.to_json)['having'].should == {
|
330
|
+
"type"=>"greaterThan", "aggregation"=>"a", "value"=>100
|
331
|
+
}
|
332
|
+
end
|
333
|
+
|
334
|
+
it 'does not accept in with empty array' do
|
335
|
+
expect { @query.filter{a.in []} }.to raise_error "Must provide non-empty array in in()"
|
336
|
+
end
|
337
|
+
|
338
|
+
it 'does raise on invalid filter statement' do
|
339
|
+
expect { @query.filter{:a} }.to raise_error 'Not a valid filter'
|
340
|
+
end
|
341
|
+
|
342
|
+
it 'raises if no value is passed to a filter operator' do
|
343
|
+
expect { @query.filter{a.eq a}.to_json}.to raise_error 'no value assigned'
|
344
|
+
end
|
345
|
+
|
346
|
+
it 'raises wrong number of arguments if filter operator is called without param' do
|
347
|
+
expect { @query.filter{a.eq}.to_json}.to raise_error 'wrong number of arguments (0 for 1)'
|
348
|
+
end
|
349
|
+
|
350
|
+
it 'should query regexp using .regexp(string)' do
|
351
|
+
JSON.parse(@query.filter{a.regexp('[1-9].*')}.to_json)['filter'].should == {
|
352
|
+
"dimension"=>"a",
|
353
|
+
"type"=>"regex",
|
354
|
+
"pattern"=>"[1-9].*"
|
355
|
+
}
|
356
|
+
end
|
357
|
+
|
358
|
+
it 'should query regexp using .eq(regexp)' do
|
359
|
+
JSON.parse(@query.filter{a.in(/abc.*/)}.to_json)['filter'].should == {
|
360
|
+
"dimension"=>"a",
|
361
|
+
"type"=>"regex",
|
362
|
+
"pattern"=>"abc.*"
|
363
|
+
}
|
364
|
+
end
|
365
|
+
|
366
|
+
it 'should query regexp using .in([regexp])' do
|
367
|
+
JSON.parse(@query.filter{ a.in(['b', /[a-z].*/, 'c']) }.to_json)['filter'].should == {
|
368
|
+
"type"=>"or",
|
369
|
+
"fields"=>[
|
370
|
+
{"dimension"=>"a", "type"=>"selector", "value"=>"b"},
|
371
|
+
{"dimension"=>"a", "type"=>"regex", "pattern"=>"[a-z].*"},
|
372
|
+
{"dimension"=>"a", "type"=>"selector", "value"=>"c"}
|
373
|
+
]
|
374
|
+
}
|
375
|
+
end
|
376
|
+
|
377
|
+
end
|