sensor 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +15 -0
- data/.env.example +17 -0
- data/.gitignore +19 -0
- data/.rspec +1 -0
- data/.ruby-gemset +1 -0
- data/.ruby-version +1 -0
- data/Gemfile +4 -0
- data/LICENSE.txt +22 -0
- data/README.md +29 -0
- data/Rakefile +10 -0
- data/certs/cacert.pem +3554 -0
- data/lib/sensor/actuator/analytics_retrieval.rb +68 -0
- data/lib/sensor/actuator/facebook_retrieval.rb +82 -0
- data/lib/sensor/actuator/twitter_retrieval.rb +59 -0
- data/lib/sensor/output_distribution/flow_dock.rb +54 -0
- data/lib/sensor/payload.rb +35 -0
- data/lib/sensor/rake_task.rb +38 -0
- data/lib/sensor/time_range.rb +36 -0
- data/lib/sensor/version.rb +3 -0
- data/lib/sensor.rb +14 -0
- data/sensor.gemspec +36 -0
- data/spec/payload_spec.rb +13 -0
- data/spec/sensor/actuator/analytics_retrieval_spec.rb +16 -0
- data/spec/sensor/actuator/facebook_retrieval_spec.rb +14 -0
- data/spec/sensor/actuator/twitter_retrieval_spec.rb +16 -0
- data/spec/sensor/time_range_spec.rb +60 -0
- data/spec/spec_helper.rb +22 -0
- metadata +258 -0
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
require 'garb'
|
|
2
|
+
|
|
3
|
+
CA_CERT_FILE = File.join(File.dirname(__FILE__),
|
|
4
|
+
'../../../certs/cacert.pem')
|
|
5
|
+
|
|
6
|
+
module Sensor
|
|
7
|
+
module Actuator
|
|
8
|
+
class AnalyticsRetrieval
|
|
9
|
+
extend Garb::Model
|
|
10
|
+
|
|
11
|
+
metrics :visitors,
|
|
12
|
+
:visit_bounce_rate,
|
|
13
|
+
:avg_time_on_site,
|
|
14
|
+
:pageviews
|
|
15
|
+
|
|
16
|
+
def initialize(time_range)
|
|
17
|
+
@time_range = time_range
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def acquire
|
|
21
|
+
results = self.class.results(profile,
|
|
22
|
+
start_date: @time_range.start_date,
|
|
23
|
+
end_date: @time_range.end_date)
|
|
24
|
+
if results
|
|
25
|
+
map = results.to_a[0].marshal_dump.inject({}) do |map, key_values|
|
|
26
|
+
map[key_values[0]] = key_values[1].to_f
|
|
27
|
+
map
|
|
28
|
+
end
|
|
29
|
+
{
|
|
30
|
+
analytics: map
|
|
31
|
+
}
|
|
32
|
+
else
|
|
33
|
+
{}
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
protected
|
|
38
|
+
def client
|
|
39
|
+
if !@client
|
|
40
|
+
session = Garb::Session.login(analytics_user, analytics_password)
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
def analytics_user
|
|
46
|
+
ENV['GOOGLE_ANALYTICS_USER']
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
def analytics_password
|
|
50
|
+
ENV['GOOGLE_ANALYTICS_PASSWORD']
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def analytics_profile_id
|
|
54
|
+
ENV['GOOGLE_ANALYTICS_WEB_PROPERTY_ID']
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
def profile
|
|
58
|
+
if !@profile
|
|
59
|
+
client
|
|
60
|
+
@profile = Garb::Management::Profile.all.detect do |p|
|
|
61
|
+
p.web_property_id == analytics_profile_id
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
@profile
|
|
65
|
+
end
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
end
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
require 'koala'
|
|
2
|
+
module Sensor
|
|
3
|
+
module Actuator
|
|
4
|
+
class FacebookRetrieval
|
|
5
|
+
def initialize(time_range)
|
|
6
|
+
@time_range = time_range
|
|
7
|
+
end
|
|
8
|
+
|
|
9
|
+
def acquire
|
|
10
|
+
results = graph.get_object("TheLaunchAcademy")
|
|
11
|
+
likes = results["likes"]
|
|
12
|
+
talking = results["talking_about_count"]
|
|
13
|
+
posts = graph.get_object("TheLaunchAcademy/posts",
|
|
14
|
+
since: format_date(@time_range.start_date),
|
|
15
|
+
until: format_date(@time_range.end_date),
|
|
16
|
+
limit: 200
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
post_likes = posts.inject(0) do |post_likes, post|
|
|
20
|
+
if post["likes"] && post["likes"]["data"]
|
|
21
|
+
post_likes += post["likes"]["data"].size
|
|
22
|
+
else
|
|
23
|
+
post_likes
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
{
|
|
28
|
+
facebook: {
|
|
29
|
+
likes: likes,
|
|
30
|
+
talking: talking,
|
|
31
|
+
posts: posts.size,
|
|
32
|
+
post_likes: post_likes
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
protected
|
|
38
|
+
def format_date(date)
|
|
39
|
+
date.strftime("%Y-%m-%d")
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
def graph
|
|
43
|
+
@graph ||= Koala::Facebook::API.new(access_token)
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def oauth_url
|
|
47
|
+
"https://www.facebook.com/dialog/oauth?" +
|
|
48
|
+
"client_id=#{api_key}&client_secret=#{secret_key}" +
|
|
49
|
+
"scope=manage_pages,read_insights&redirect_uri=#{redirect_uri}&" +
|
|
50
|
+
"response_type=token"
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def token_exchange_url
|
|
54
|
+
"https://www.facebook.com/oauth/access_token?" +
|
|
55
|
+
"grant_type=fb_exchange_token&" +
|
|
56
|
+
"client_id=#{api_key}&" +
|
|
57
|
+
"client_secret=#{secret_key}&" +
|
|
58
|
+
"fb_exchange_token=#{short_lived_token}"
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
def redirect_uri
|
|
62
|
+
ENV['FACEBOOK_REDIRECT_URI']
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
def short_lived_token
|
|
66
|
+
ENV['FACEBOOK_SHORT_LIVED_TOKEN']
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
def access_token
|
|
70
|
+
ENV['FACEBOOK_TOKEN']
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
def api_key
|
|
74
|
+
ENV['FACEBOOK_API_KEY']
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
def secret_key
|
|
78
|
+
ENV['FACEBOOK_SECRET_KEY']
|
|
79
|
+
end
|
|
80
|
+
end
|
|
81
|
+
end
|
|
82
|
+
end
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
require 'twitter'
|
|
2
|
+
|
|
3
|
+
module Sensor
|
|
4
|
+
module Actuator
|
|
5
|
+
class TwitterRetrieval
|
|
6
|
+
def initialize(time_range)
|
|
7
|
+
@time_range = time_range
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
def acquire
|
|
11
|
+
map = {}
|
|
12
|
+
map.tap do |map|
|
|
13
|
+
map[:twitter] = {}
|
|
14
|
+
map[:twitter][:followers] = client.user.followers_count
|
|
15
|
+
map[:twitter][:tweets] = tweet_count
|
|
16
|
+
map[:twitter][:mentions] = mention_count
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
protected
|
|
21
|
+
def format_date(date)
|
|
22
|
+
date.strftime("%Y-%m-%d")
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def search_options
|
|
26
|
+
{
|
|
27
|
+
count: 100,
|
|
28
|
+
since: format_date(@time_range.start_date),
|
|
29
|
+
until: format_date(@time_range.end_date)
|
|
30
|
+
}
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def tweet_count
|
|
34
|
+
search = client.search("from:launchacademy_",
|
|
35
|
+
search_options
|
|
36
|
+
)
|
|
37
|
+
search.entries.count
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def mention_count
|
|
41
|
+
search = client.search("to:launchacademy_",
|
|
42
|
+
search_options)
|
|
43
|
+
search.entries.count
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def client
|
|
47
|
+
if !@client
|
|
48
|
+
@client = Twitter::REST::Client.new do |config|
|
|
49
|
+
config.consumer_key = ENV["TWITTER_CONSUMER_KEY"]
|
|
50
|
+
config.consumer_secret = ENV["TWITTER_CONSUMER_SECRET"]
|
|
51
|
+
config.access_token = ENV["TWITTER_ACCESS_TOKEN"]
|
|
52
|
+
config.access_token_secret = ENV["TWITTER_ACCESS_TOKEN_SECRET"]
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
@client
|
|
56
|
+
end
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
end
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
require 'flowdock'
|
|
2
|
+
|
|
3
|
+
module Sensor
|
|
4
|
+
module OutputDistribution
|
|
5
|
+
class FlowDock
|
|
6
|
+
def initialize(payload)
|
|
7
|
+
@payload = payload
|
|
8
|
+
@time_range = payload.time_range
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def distribute
|
|
12
|
+
flow.push_to_team_inbox({
|
|
13
|
+
subject: "Sensor Output: " +
|
|
14
|
+
"#{format_date(@payload.start_date)} - " +
|
|
15
|
+
"#{format_date(@payload.end_date)}",
|
|
16
|
+
content: content
|
|
17
|
+
})
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
protected
|
|
21
|
+
def format_date(date)
|
|
22
|
+
date.strftime("%m/%d/%Y")
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def content
|
|
26
|
+
@payload.data.inject("") do |str, key_value|
|
|
27
|
+
str += "<h3>#{key_value[0]}</h3>"
|
|
28
|
+
|
|
29
|
+
str += "<ul>"
|
|
30
|
+
key_value[1].each do |attribute, value|
|
|
31
|
+
str += "<li><strong>#{attribute}</strong>: #{value}</li>"
|
|
32
|
+
end
|
|
33
|
+
str += "</ul>"
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def flow
|
|
38
|
+
@flow ||= ::Flowdock::Flow.new(
|
|
39
|
+
api_token: token,
|
|
40
|
+
source: 'Sensor',
|
|
41
|
+
from: {
|
|
42
|
+
name: 'Sensor',
|
|
43
|
+
address: 'hello@sensor.com'
|
|
44
|
+
}
|
|
45
|
+
)
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
def token
|
|
49
|
+
ENV['FLOWDOCK_TOKEN']
|
|
50
|
+
end
|
|
51
|
+
end
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
require 'sensor'
|
|
2
|
+
|
|
3
|
+
Sensor.require("analytics_retrieval")
|
|
4
|
+
Sensor.require("twitter_retrieval")
|
|
5
|
+
Sensor.require("facebook_retrieval")
|
|
6
|
+
Sensor.require("flow_dock")
|
|
7
|
+
|
|
8
|
+
module Sensor
|
|
9
|
+
class Payload
|
|
10
|
+
attr_reader :data, :time_range
|
|
11
|
+
delegate :start_date, to: :time_range
|
|
12
|
+
delegate :end_date, to: :time_range
|
|
13
|
+
|
|
14
|
+
def initialize(time_range)
|
|
15
|
+
@time_range = time_range
|
|
16
|
+
@data = {}
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
def acquire
|
|
20
|
+
[
|
|
21
|
+
Sensor::Actuator::AnalyticsRetrieval,
|
|
22
|
+
Sensor::Actuator::TwitterRetrieval,
|
|
23
|
+
Sensor::Actuator::FacebookRetrieval
|
|
24
|
+
].each do |actuator|
|
|
25
|
+
@data.merge!(actuator.new(@time_range).acquire)
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
@data
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
def distribute
|
|
32
|
+
Sensor::OutputDistribution::FlowDock.new(self).distribute
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
end
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
require 'sensor/payload'
|
|
2
|
+
|
|
3
|
+
module Sensor
|
|
4
|
+
class RakeTask < Rake::TaskLib
|
|
5
|
+
def initialize(name)
|
|
6
|
+
desc "run sensors for yesterday"
|
|
7
|
+
namespace name do
|
|
8
|
+
task :yesterday do
|
|
9
|
+
yesterday = Date.today - 1
|
|
10
|
+
time_range = Sensor::TimeRange.new(yesterday, yesterday)
|
|
11
|
+
payload = Sensor::Payload.new(time_range)
|
|
12
|
+
payload.acquire
|
|
13
|
+
payload.distribute
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
desc "run sensors for last week (Mon-Sun)"
|
|
17
|
+
task :last_week do
|
|
18
|
+
today = Date.today
|
|
19
|
+
week_start = today - today.wday - 6
|
|
20
|
+
week_end = week_start + 6
|
|
21
|
+
|
|
22
|
+
time_range = Sensor::TimeRange.new(week_start, week_end)
|
|
23
|
+
payload = Sensor::Payload.new(time_range)
|
|
24
|
+
payload.acquire
|
|
25
|
+
payload.distribute
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
desc "run sensors for an arbitrary range of dates"
|
|
29
|
+
task :custom do
|
|
30
|
+
time_range = Sensor::TimeRange.new(ENV['START'], ENV['END'])
|
|
31
|
+
payload = Sensor::Payload.new(time_range)
|
|
32
|
+
payload.acquire
|
|
33
|
+
payload.distribute
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
end
|
|
37
|
+
end
|
|
38
|
+
end
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
require 'chronic'
|
|
2
|
+
|
|
3
|
+
module Sensor
|
|
4
|
+
class TimeRange
|
|
5
|
+
attr_reader :start, :end
|
|
6
|
+
def initialize(start_date, end_date)
|
|
7
|
+
@start = timify(start_date)
|
|
8
|
+
@end = timify(end_date, true)
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def start_date
|
|
12
|
+
datify(@start)
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def end_date
|
|
16
|
+
datify(@end)
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
protected
|
|
20
|
+
def datify(time)
|
|
21
|
+
Date.new(time.year, time.month, time.day)
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def timify(date, as_end = false)
|
|
25
|
+
if date.kind_of?(String)
|
|
26
|
+
Chronic.parse(date)
|
|
27
|
+
elsif date.kind_of?(Date)
|
|
28
|
+
as_end ? (date + 1).to_time - 1 : date.to_time
|
|
29
|
+
elsif date.kind_of?(Time)
|
|
30
|
+
date
|
|
31
|
+
else
|
|
32
|
+
raise "Unknown date parameter: #{date}"
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
end
|
data/lib/sensor.rb
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
module Sensor
|
|
2
|
+
def self.require(path)
|
|
3
|
+
begin
|
|
4
|
+
super("sensor/actuator/#{path}")
|
|
5
|
+
rescue LoadError
|
|
6
|
+
super("sensor/output_distribution/#{path}")
|
|
7
|
+
end
|
|
8
|
+
end
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
require "sensor/version"
|
|
12
|
+
require "sensor/time_range"
|
|
13
|
+
|
|
14
|
+
require "sensor/payload"
|
data/sensor.gemspec
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
# coding: utf-8
|
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
|
4
|
+
require 'sensor/version'
|
|
5
|
+
|
|
6
|
+
Gem::Specification.new do |spec|
|
|
7
|
+
spec.name = "sensor"
|
|
8
|
+
spec.version = Sensor::VERSION
|
|
9
|
+
spec.authors = ["Dan Pickett"]
|
|
10
|
+
spec.email = ["dan.pickett@launchware.com"]
|
|
11
|
+
spec.description = %q{Sensor provides a set of marketing metrics}
|
|
12
|
+
spec.summary = %q{Marketing Metrics Acquisition Tool}
|
|
13
|
+
spec.homepage = ""
|
|
14
|
+
spec.license = "MIT"
|
|
15
|
+
|
|
16
|
+
spec.files = `git ls-files`.split($/)
|
|
17
|
+
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
|
|
18
|
+
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
|
19
|
+
spec.require_paths = ["lib"]
|
|
20
|
+
|
|
21
|
+
spec.add_dependency "chronic"
|
|
22
|
+
spec.add_dependency "garb", "~> 0.9.1"
|
|
23
|
+
spec.add_dependency "twitter", "~> 5.0.0"
|
|
24
|
+
spec.add_dependency "koala"
|
|
25
|
+
spec.add_dependency "flowdock"
|
|
26
|
+
|
|
27
|
+
spec.add_development_dependency "bundler", "~> 1.3"
|
|
28
|
+
spec.add_development_dependency "rake"
|
|
29
|
+
|
|
30
|
+
spec.add_development_dependency "rspec"
|
|
31
|
+
spec.add_development_dependency "vcr"
|
|
32
|
+
spec.add_development_dependency "webmock"
|
|
33
|
+
spec.add_development_dependency "pry"
|
|
34
|
+
spec.add_development_dependency "timecop"
|
|
35
|
+
spec.add_development_dependency "dotenv"
|
|
36
|
+
end
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
require 'spec_helper'
|
|
2
|
+
|
|
3
|
+
describe Sensor::Payload, :vcr do
|
|
4
|
+
|
|
5
|
+
it 'fetches relevant data' do
|
|
6
|
+
date_range = Sensor::TimeRange.new('yesterday', 'today')
|
|
7
|
+
act = Sensor::Payload.new(date_range)
|
|
8
|
+
results = act.acquire
|
|
9
|
+
expect(results).to_not be_nil
|
|
10
|
+
|
|
11
|
+
act.distribute
|
|
12
|
+
end
|
|
13
|
+
end
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
require 'spec_helper'
|
|
2
|
+
|
|
3
|
+
Sensor.require("analytics_retrieval")
|
|
4
|
+
|
|
5
|
+
describe Sensor::Actuator::AnalyticsRetrieval, vcr: true do
|
|
6
|
+
it 'returns the count of visits' do
|
|
7
|
+
Timecop.freeze(Chronic.parse('01/01/13')) do
|
|
8
|
+
date_range = Sensor::TimeRange.new('yesterday', 'today')
|
|
9
|
+
act = Sensor::Actuator::AnalyticsRetrieval.new(date_range)
|
|
10
|
+
results = act.acquire
|
|
11
|
+
expect(results).to_not be_nil
|
|
12
|
+
expect(results[:analytics]).to_not be_nil
|
|
13
|
+
expect(results[:analytics][:visitors]).to_not be_nil
|
|
14
|
+
end
|
|
15
|
+
end
|
|
16
|
+
end
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
require 'spec_helper'
|
|
2
|
+
|
|
3
|
+
Sensor.require("facebook_retrieval")
|
|
4
|
+
|
|
5
|
+
describe Sensor::Actuator::FacebookRetrieval, :vcr do
|
|
6
|
+
it 'fetches relevant stats' do
|
|
7
|
+
date_range = Sensor::TimeRange.new('yesterday', 'today')
|
|
8
|
+
act = Sensor::Actuator::FacebookRetrieval.new(date_range)
|
|
9
|
+
results = act.acquire
|
|
10
|
+
expect(results).to_not be_nil
|
|
11
|
+
expect(results[:facebook]).to_not be_nil
|
|
12
|
+
expect(results[:facebook][:likes]).to_not be_nil
|
|
13
|
+
end
|
|
14
|
+
end
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
require 'spec_helper'
|
|
2
|
+
|
|
3
|
+
Sensor.require("twitter_retrieval")
|
|
4
|
+
|
|
5
|
+
describe Sensor::Actuator::TwitterRetrieval, :vcr do
|
|
6
|
+
it 'fetches relevant stats' do
|
|
7
|
+
date_range = Sensor::TimeRange.new('yesterday', 'today')
|
|
8
|
+
act = Sensor::Actuator::TwitterRetrieval.new(date_range)
|
|
9
|
+
results = act.acquire
|
|
10
|
+
expect(results).to_not be_nil
|
|
11
|
+
expect(results[:twitter]).to_not be_nil
|
|
12
|
+
expect(results[:twitter][:followers]).to_not be_nil
|
|
13
|
+
expect(results[:twitter][:tweets]).to_not be_nil
|
|
14
|
+
expect(results[:twitter][:mentions]).to_not be_nil
|
|
15
|
+
end
|
|
16
|
+
end
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
require 'spec_helper'
|
|
2
|
+
|
|
3
|
+
describe Sensor::TimeRange, :vcr do
|
|
4
|
+
before do
|
|
5
|
+
Timecop.freeze
|
|
6
|
+
end
|
|
7
|
+
|
|
8
|
+
after do
|
|
9
|
+
Timecop.return
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
let(:day) { 24 * 60 * 60 }
|
|
13
|
+
|
|
14
|
+
let(:start_time) do
|
|
15
|
+
twenty_four_hours_ago = Time.now - day
|
|
16
|
+
Date.new(twenty_four_hours_ago.year,
|
|
17
|
+
twenty_four_hours_ago.month,
|
|
18
|
+
twenty_four_hours_ago.day).to_time
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
let(:end_time) do
|
|
22
|
+
now = Time.now
|
|
23
|
+
(Date.new(now.year, now.month, now.day) + 1).to_time - 1
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
let(:date_range) { Sensor::TimeRange.new(start_time, end_time) }
|
|
27
|
+
|
|
28
|
+
context 'instantiated with Time objects' do
|
|
29
|
+
it 'has a start correlated to the constructor argument' do
|
|
30
|
+
expect(date_range.start).to eql(start_time)
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
it 'has an end correlated to the constructor argument' do
|
|
34
|
+
expect(date_range.end).to eql(end_time)
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
it 'has a start date' do
|
|
38
|
+
date = Date.new(start_time.year, start_time.month, start_time.day)
|
|
39
|
+
expect(date_range.start_date).to eql(date)
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
it 'has an end date' do
|
|
43
|
+
date = Date.new(end_time.year, end_time.month, end_time.day)
|
|
44
|
+
expect(date_range.end_date).to eql(date)
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
context 'instantiated with strings' do
|
|
49
|
+
let(:string_range) { Sensor::TimeRange.new('yesterday at 0', 'today at 11:59:59PM') }
|
|
50
|
+
|
|
51
|
+
it 'parses a string for the start' do
|
|
52
|
+
expect(string_range.start).to eql(start_time)
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
it 'parses a string for the end' do
|
|
56
|
+
expect(string_range.end).to eql(end_time)
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
end
|
data/spec/spec_helper.rb
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
require 'sensor'
|
|
2
|
+
|
|
3
|
+
require 'rspec'
|
|
4
|
+
|
|
5
|
+
require 'timecop'
|
|
6
|
+
|
|
7
|
+
require 'vcr'
|
|
8
|
+
require 'webmock'
|
|
9
|
+
|
|
10
|
+
require 'dotenv'
|
|
11
|
+
Dotenv.load
|
|
12
|
+
|
|
13
|
+
VCR.configure do |c|
|
|
14
|
+
c.cassette_library_dir = 'spec/fixtures/vcr_cassettes'
|
|
15
|
+
c.hook_into :webmock # or :fakeweb
|
|
16
|
+
c.default_cassette_options = { :record => :new_episodes }
|
|
17
|
+
c.configure_rspec_metadata!
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
RSpec.configure do |c|
|
|
21
|
+
c.treat_symbols_as_metadata_keys_with_true_values = true
|
|
22
|
+
end
|