data_store 0.0.1 → 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +3 -0
- data/.rvmrc +1 -0
- data/.travis.yml +27 -0
- data/CHANGELOG.md +10 -0
- data/Gemfile +30 -0
- data/Guardfile +10 -0
- data/LICENSE +1 -1
- data/README.md +91 -2
- data/REMINDERS +8 -0
- data/Rakefile +13 -1
- data/config/database.yml +16 -0
- data/data_store.gemspec +1 -4
- data/db/data_store.db +0 -0
- data/lib/data_store/average_calculator.rb +93 -0
- data/lib/data_store/base.rb +108 -0
- data/lib/data_store/configuration.rb +66 -0
- data/lib/data_store/connector.rb +85 -0
- data/lib/data_store/definitions.rb +42 -0
- data/lib/data_store/table.rb +119 -0
- data/lib/data_store/version.rb +1 -1
- data/lib/data_store.rb +75 -1
- data/test/average_calculator_test.rb +196 -0
- data/test/configuration_test.rb +48 -0
- data/test/connector_test.rb +32 -0
- data/test/data_store_test.rb +117 -0
- data/test/integration_test.rb +66 -0
- data/test/table_test.rb +181 -0
- data/test/test_helper.rb +32 -0
- metadata +56 -66
@@ -0,0 +1,119 @@
|
|
1
|
+
module DataStore
|
2
|
+
|
3
|
+
class Table
|
4
|
+
|
5
|
+
include Celluloid
|
6
|
+
|
7
|
+
attr_reader :identifier, :table_index, :original_value
|
8
|
+
|
9
|
+
# Initialize the table by passsing an identifier
|
10
|
+
def initialize(identifier, table_index = 0)
|
11
|
+
@identifier = identifier
|
12
|
+
@table_index = table_index
|
13
|
+
end
|
14
|
+
|
15
|
+
# Return a the corresponding parent class, i.e the settings from the data_stores table
|
16
|
+
def parent
|
17
|
+
@parent ||= DataStore::Base.find(identifier: identifier)
|
18
|
+
end
|
19
|
+
|
20
|
+
# Return a table object enriched with Sequel::Model behaviour
|
21
|
+
def model
|
22
|
+
@model ||= Class.new(Sequel::Model(dataset))
|
23
|
+
end
|
24
|
+
|
25
|
+
# Add a new datapoint to the table
|
26
|
+
# In case of a counter type, store the difference between current and last value
|
27
|
+
# And calculates average values on the fly according to compression schema
|
28
|
+
#
|
29
|
+
# Options (hash):
|
30
|
+
# * created: timestamp
|
31
|
+
# * type: gauge or counter
|
32
|
+
# * table_index: in which compressed table
|
33
|
+
def add(value, options = {})
|
34
|
+
created = options[:created] || Time.now.utc.to_f
|
35
|
+
type = options[:type] || parent.type
|
36
|
+
@table_index = options[:table_index] if options[:table_index]
|
37
|
+
push(value, type, created)
|
38
|
+
end
|
39
|
+
|
40
|
+
# Return the most recent datapoint added
|
41
|
+
def last
|
42
|
+
model.order(:created).last
|
43
|
+
end
|
44
|
+
|
45
|
+
# Return the total number of datapoints in the table
|
46
|
+
def count
|
47
|
+
dataset.count
|
48
|
+
end
|
49
|
+
|
50
|
+
# Return the corresponding dataset with the datapoitns
|
51
|
+
def dataset
|
52
|
+
database[table_name]
|
53
|
+
end
|
54
|
+
|
55
|
+
# Fetch the corresponding datapoints
|
56
|
+
#
|
57
|
+
# Options:
|
58
|
+
# * :from
|
59
|
+
# * :till
|
60
|
+
#
|
61
|
+
def fetch(options)
|
62
|
+
datapoints = []
|
63
|
+
query = parent.db[timeslot(options)].where{created >= options[:from]}.where{created <= options[:till]}.order(:created)
|
64
|
+
query.all.map{|record| datapoints <<[record[:value], record[:created]]}
|
65
|
+
datapoints
|
66
|
+
end
|
67
|
+
|
68
|
+
# Import original datapoints, mostly to recreate compression tables
|
69
|
+
def import(datapoints)
|
70
|
+
datapoints.each do |data|
|
71
|
+
add!(data[0], table_index: 0, created: data[1])
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
private
|
76
|
+
|
77
|
+
def timeslot(options)
|
78
|
+
distance = options[:till] - options[:from]
|
79
|
+
index = 0
|
80
|
+
parent.time_borders.each_with_index do |value, idx|
|
81
|
+
index = idx
|
82
|
+
break if value >= distance
|
83
|
+
end
|
84
|
+
parent.table_names[index]
|
85
|
+
end
|
86
|
+
|
87
|
+
def push(value, type, created)
|
88
|
+
value = difference_with_previous(value) if type.to_s == 'counter'
|
89
|
+
datapoint = { value: value, created: created }
|
90
|
+
datapoint[:original_value] = original_value if original_value
|
91
|
+
dataset << datapoint
|
92
|
+
calculate_average_values
|
93
|
+
end
|
94
|
+
|
95
|
+
def calculate_average_values
|
96
|
+
calculator = AverageCalculator.new(self)
|
97
|
+
calculator.perform
|
98
|
+
end
|
99
|
+
|
100
|
+
def difference_with_previous(value)
|
101
|
+
@original_value = value
|
102
|
+
unless last.nil?
|
103
|
+
value = value - last[:original_value]
|
104
|
+
last.delete if last[:value] == last[:original_value]
|
105
|
+
end
|
106
|
+
value
|
107
|
+
end
|
108
|
+
|
109
|
+
def database
|
110
|
+
@database ||= DataStore::Base.db
|
111
|
+
end
|
112
|
+
|
113
|
+
def table_name
|
114
|
+
parent.table_names[table_index]
|
115
|
+
end
|
116
|
+
|
117
|
+
end
|
118
|
+
|
119
|
+
end
|
data/lib/data_store/version.rb
CHANGED
data/lib/data_store.rb
CHANGED
@@ -1,5 +1,79 @@
|
|
1
|
+
# encoding: UTF-8
|
2
|
+
|
3
|
+
require 'sequel'
|
4
|
+
require 'yaml'
|
5
|
+
require 'logger'
|
6
|
+
require 'celluloid'
|
7
|
+
|
8
|
+
$: << File.expand_path('../', __FILE__)
|
9
|
+
$: << File.expand_path('../data_store/', __FILE__)
|
10
|
+
|
11
|
+
Sequel.extension :migration
|
12
|
+
Sequel::Model.plugin :timestamps, :force=>true, :update_on_create=>true
|
13
|
+
|
1
14
|
require 'data_store/version'
|
15
|
+
require 'data_store/connector'
|
16
|
+
require 'data_store/configuration'
|
17
|
+
require 'data_store/definitions'
|
18
|
+
require 'data_store/table'
|
19
|
+
require 'data_store/average_calculator'
|
20
|
+
|
21
|
+
module Kernel
|
22
|
+
def suppress_warnings
|
23
|
+
original_verbosity = $VERBOSE
|
24
|
+
$VERBOSE = nil
|
25
|
+
result = yield
|
26
|
+
$VERBOSE = original_verbosity
|
27
|
+
return result
|
28
|
+
end
|
29
|
+
end
|
2
30
|
|
3
31
|
module DataStore
|
4
|
-
|
32
|
+
|
33
|
+
# Base class will be redefined during configure
|
34
|
+
# In order to assign Sequel::Model behaviour to it
|
35
|
+
# with the correctly defined (or configured) database connector
|
36
|
+
class Base
|
37
|
+
end
|
38
|
+
|
39
|
+
class << self
|
40
|
+
|
41
|
+
# Configure DataStore
|
42
|
+
#
|
43
|
+
# Example
|
44
|
+
# DataStore.configure |config|
|
45
|
+
# config.prefix = 'data_store_'
|
46
|
+
# config.database = :postgres
|
47
|
+
# end
|
48
|
+
def configure
|
49
|
+
yield(configuration)
|
50
|
+
define_base_class
|
51
|
+
end
|
52
|
+
|
53
|
+
# The configuration object. See {Configuration}
|
54
|
+
def configuration
|
55
|
+
@configuration ||= Configuration.new
|
56
|
+
end
|
57
|
+
|
58
|
+
private
|
59
|
+
|
60
|
+
def define_base_class
|
61
|
+
connector = DataStore::Connector.new
|
62
|
+
set_logger(connector.database)
|
63
|
+
connector.create_table!
|
64
|
+
suppress_warnings { self.const_set(:Base, Class.new(Sequel::Model(connector.dataset)))}
|
65
|
+
load 'base.rb'
|
66
|
+
connector.database.disconnect
|
67
|
+
end
|
68
|
+
|
69
|
+
def set_logger(db)
|
70
|
+
if configuration.enable_logging
|
71
|
+
logger = Logger.new(configuration.log_file)
|
72
|
+
logger.level = configuration.log_level
|
73
|
+
db.logger = logger
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
end
|
78
|
+
|
5
79
|
end
|
@@ -0,0 +1,196 @@
|
|
1
|
+
require File.expand_path '../test_helper', __FILE__
|
2
|
+
|
3
|
+
class AverageCalculatorTest < Test::Unit::TestCase
|
4
|
+
|
5
|
+
context 'AverageCalculator for a gauge type' do
|
6
|
+
|
7
|
+
setup do
|
8
|
+
DataStore::Base.db.tables.each do |table|
|
9
|
+
DataStore::Base.db.drop_table(table)
|
10
|
+
end
|
11
|
+
|
12
|
+
DataStore::Connector.new.reset!
|
13
|
+
@record = DataStore::Base.create(identifier: 1,
|
14
|
+
type: 'gauge',
|
15
|
+
name: 'Electra',
|
16
|
+
frequency: 10,
|
17
|
+
description: 'Actual usage of electra in the home',
|
18
|
+
compression_schema: [2,2,2])
|
19
|
+
|
20
|
+
@table = DataStore::Table.new(1)
|
21
|
+
@calculator = DataStore::AverageCalculator.new(@table)
|
22
|
+
end
|
23
|
+
|
24
|
+
should 'be valid' do
|
25
|
+
assert @calculator
|
26
|
+
end
|
27
|
+
|
28
|
+
should 'return the identifier' do
|
29
|
+
assert_equal 1, @calculator.identifier
|
30
|
+
end
|
31
|
+
|
32
|
+
should 'calculate the average value for the first' do
|
33
|
+
@table.model.insert(value: 10, created: 0)
|
34
|
+
@table.model.insert(value: 11, created: 10)
|
35
|
+
|
36
|
+
@calculator.perform
|
37
|
+
assert_equal 10.5, DataStore::Base.db[:ds_1_2].order(:created).last[:value]
|
38
|
+
end
|
39
|
+
|
40
|
+
context 'Scenario: adding values according expected frequency' do
|
41
|
+
should 'calculate the average values' do
|
42
|
+
time_now_utc_returns(10)
|
43
|
+
|
44
|
+
@table.model.insert(value: 10, created: 0)
|
45
|
+
@table.model.insert(value: 11, created: 10)
|
46
|
+
|
47
|
+
@calculator.perform
|
48
|
+
|
49
|
+
assert_equal 10.5, DataStore::Base.db[:ds_1_2].order(:created).first[:value]
|
50
|
+
|
51
|
+
@table.model.insert(value: 12, created: 20)
|
52
|
+
@table.model.insert(value: 13, created: 30)
|
53
|
+
|
54
|
+
time_now_utc_returns(30)
|
55
|
+
|
56
|
+
@calculator.perform
|
57
|
+
|
58
|
+
assert_equal 12.5, DataStore::Base.db[:ds_1_2].order(:created).last[:value]
|
59
|
+
assert_equal 11.5, DataStore::Base.db[:ds_1_4].order(:created).last[:value]
|
60
|
+
|
61
|
+
@table.model.insert(value: 14, created: 40)
|
62
|
+
@table.model.insert(value: 15, created: 50)
|
63
|
+
|
64
|
+
time_now_utc_returns(50)
|
65
|
+
|
66
|
+
@calculator.perform
|
67
|
+
|
68
|
+
assert_equal 14.5, DataStore::Base.db[:ds_1_2].order(:created).last[:value]
|
69
|
+
|
70
|
+
@table.model.insert(value: 16, created: 60)
|
71
|
+
@table.model.insert(value: 17, created: 70)
|
72
|
+
|
73
|
+
time_now_utc_returns(70)
|
74
|
+
|
75
|
+
@calculator.perform
|
76
|
+
|
77
|
+
assert_equal 16.5, DataStore::Base.db[:ds_1_2].order(:created).last[:value]
|
78
|
+
assert_equal 15.5, DataStore::Base.db[:ds_1_4].order(:created).last[:value]
|
79
|
+
assert_equal 13.5, DataStore::Base.db[:ds_1_8].order(:created).last[:value]
|
80
|
+
|
81
|
+
assert_equal [:data_stores, :ds_1, :ds_1_2, :ds_1_4, :ds_1_8], DataStore::Base.db.tables.sort
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
context 'Scenario: adding values with an unexpected failure' do
|
86
|
+
should 'calculate the average values' do
|
87
|
+
time_now_utc_returns(10)
|
88
|
+
|
89
|
+
@table.model.insert(value: 10, created: 0)
|
90
|
+
@table.model.insert(value: 11, created: 10)
|
91
|
+
|
92
|
+
@calculator.perform
|
93
|
+
|
94
|
+
assert_equal 10.5, DataStore::Base.db[:ds_1_2].order(:created).first[:value]
|
95
|
+
|
96
|
+
@table.model.insert(value: 12, created: 20)
|
97
|
+
|
98
|
+
#No value at timestamp 30!
|
99
|
+
@table.model.insert(value: 14, created: 40)
|
100
|
+
|
101
|
+
time_now_utc_returns(40)
|
102
|
+
|
103
|
+
@calculator.perform
|
104
|
+
|
105
|
+
assert_equal 13.0, DataStore::Base.db[:ds_1_2].order(:created).last[:value]
|
106
|
+
assert_equal 11.75, DataStore::Base.db[:ds_1_4].order(:created).last[:value]
|
107
|
+
|
108
|
+
@table.model.insert(value: 15, created: 50)
|
109
|
+
@table.model.insert(value: 16, created: 60)
|
110
|
+
|
111
|
+
time_now_utc_returns(60)
|
112
|
+
|
113
|
+
@calculator.perform
|
114
|
+
|
115
|
+
assert_equal 15.5, DataStore::Base.db[:ds_1_2].order(:created).last[:value]
|
116
|
+
|
117
|
+
@table.model.insert(value: 17, created: 70)
|
118
|
+
@table.model.insert(value: 18, created: 80)
|
119
|
+
|
120
|
+
time_now_utc_returns(80)
|
121
|
+
|
122
|
+
@calculator.perform
|
123
|
+
|
124
|
+
assert_equal 17.5, DataStore::Base.db[:ds_1_2].order(:created).last[:value]
|
125
|
+
assert_equal 16.5, DataStore::Base.db[:ds_1_4].order(:created).last[:value]
|
126
|
+
assert_equal 14.125, DataStore::Base.db[:ds_1_8].order(:created).last[:value]
|
127
|
+
end
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
context 'AverageCalculator for a counter type' do
|
132
|
+
|
133
|
+
setup do
|
134
|
+
DataStore::Base.db.tables.each do |table|
|
135
|
+
DataStore::Base.db.drop_table(table)
|
136
|
+
end
|
137
|
+
|
138
|
+
DataStore::Connector.new.reset!
|
139
|
+
@record = DataStore::Base.create(identifier: 1,
|
140
|
+
type: 'counter',
|
141
|
+
name: 'Electra',
|
142
|
+
frequency: 10,
|
143
|
+
description: 'Actual usage of gas in the home',
|
144
|
+
compression_schema: [2])
|
145
|
+
|
146
|
+
@table = DataStore::Table.new(1)
|
147
|
+
@calculator = DataStore::AverageCalculator.new(@table)
|
148
|
+
end
|
149
|
+
|
150
|
+
should 'be valid' do
|
151
|
+
assert @calculator
|
152
|
+
end
|
153
|
+
|
154
|
+
should 'return the identifier' do
|
155
|
+
assert_equal 1, @calculator.identifier
|
156
|
+
end
|
157
|
+
|
158
|
+
should 'calculate the average value' do
|
159
|
+
@table.model.insert(value: 10, original_value: 1010, created: 10)
|
160
|
+
@table.model.insert(value: 10, original_value: 1020, created: 20)
|
161
|
+
|
162
|
+
@calculator.perform
|
163
|
+
|
164
|
+
assert_equal 10.0, DataStore::Base.db[:ds_1_2].order(:created).last[:value]
|
165
|
+
end
|
166
|
+
|
167
|
+
should 'calculate the average values according to compression_schema' do
|
168
|
+
@table.model.insert(value: 10, original_value: 1010, created: 10)
|
169
|
+
@table.model.insert(value: 10, original_value: 1020, created: 20)
|
170
|
+
|
171
|
+
time_now_utc_returns(20)
|
172
|
+
@calculator.perform
|
173
|
+
|
174
|
+
@table.model.insert(value: 20, original_value: 1040, created: 30)
|
175
|
+
@table.model.insert(value: 30, original_value: 1070, created: 40)
|
176
|
+
|
177
|
+
time_now_utc_returns(40)
|
178
|
+
@calculator.perform
|
179
|
+
|
180
|
+
assert_equal 25.0, DataStore::Base.db[:ds_1_2].order(:created).last[:value]
|
181
|
+
|
182
|
+
assert_equal [:data_stores, :ds_1, :ds_1_2], DataStore::Base.db.tables.sort
|
183
|
+
end
|
184
|
+
|
185
|
+
should 'calculate the average value by ignoring the original values' do
|
186
|
+
@table.model.insert(value: 20, original_value: 12345, created: 10)
|
187
|
+
@table.model.insert(value: 30, original_value: 67890, created: 20)
|
188
|
+
|
189
|
+
@calculator.perform
|
190
|
+
|
191
|
+
assert_equal 25.0, DataStore::Base.db[:ds_1_2].order(:created).last[:value]
|
192
|
+
end
|
193
|
+
|
194
|
+
end
|
195
|
+
|
196
|
+
end
|
@@ -0,0 +1,48 @@
|
|
1
|
+
require File.expand_path '../test_helper', __FILE__
|
2
|
+
|
3
|
+
class ConfigurationTest < Test::Unit::TestCase
|
4
|
+
|
5
|
+
context 'Configuration' do
|
6
|
+
|
7
|
+
should "provide default values" do
|
8
|
+
assert_config_default :prefix, 'ds_'
|
9
|
+
assert_config_default :database, :postgres
|
10
|
+
assert_config_default :compression_schema, [6,5,3,4,4,3]
|
11
|
+
assert_config_default :frequency, 10
|
12
|
+
assert_config_default :frequency_tolerance, 0.05
|
13
|
+
assert_config_default :maximum_datapoints, 800
|
14
|
+
assert_config_default :data_type, :double
|
15
|
+
assert_config_default :database_config_file, File.expand_path('../../config/database.yml', __FILE__)
|
16
|
+
assert_config_default :enable_logging, true
|
17
|
+
assert_config_default :log_file, $stdout
|
18
|
+
assert_config_default :log_level, Logger::ERROR
|
19
|
+
end
|
20
|
+
|
21
|
+
should "allow values to be overwritten" do
|
22
|
+
assert_config_overridable :prefix
|
23
|
+
assert_config_overridable :database
|
24
|
+
assert_config_overridable :compression_schema
|
25
|
+
assert_config_overridable :frequency
|
26
|
+
assert_config_overridable :frequency_tolerance
|
27
|
+
assert_config_overridable :maximum_datapoints
|
28
|
+
assert_config_overridable :data_type
|
29
|
+
assert_config_overridable :database_config_file
|
30
|
+
assert_config_overridable :enable_logging
|
31
|
+
assert_config_overridable :log_file
|
32
|
+
assert_config_overridable :log_level
|
33
|
+
end
|
34
|
+
|
35
|
+
end
|
36
|
+
|
37
|
+
def assert_config_default(option, default_value, config = nil)
|
38
|
+
config ||= DataStore::Configuration.new
|
39
|
+
assert_equal default_value, config.send(option)
|
40
|
+
end
|
41
|
+
|
42
|
+
def assert_config_overridable(option, value = 'a value')
|
43
|
+
config = DataStore::Configuration.new
|
44
|
+
config.send(:"#{option}=", value)
|
45
|
+
assert_equal value, config.send(option)
|
46
|
+
end
|
47
|
+
|
48
|
+
end
|
@@ -0,0 +1,32 @@
|
|
1
|
+
require File.expand_path '../test_helper', __FILE__
|
2
|
+
|
3
|
+
class ConnectorTest < Test::Unit::TestCase
|
4
|
+
|
5
|
+
context 'DataStore::Connector connection with database' do
|
6
|
+
|
7
|
+
setup do
|
8
|
+
@connector = DataStore::Connector.new
|
9
|
+
end
|
10
|
+
|
11
|
+
should 'trigger the migration to create the database table' do
|
12
|
+
migration = mock
|
13
|
+
DataStore.expects(:create_data_stores).returns(migration)
|
14
|
+
migration.expects(:apply)
|
15
|
+
@connector.create_table!
|
16
|
+
end
|
17
|
+
|
18
|
+
should 'reset by dropping and recreating the database table' do
|
19
|
+
migration = mock
|
20
|
+
@connector.expects(:drop_table!)
|
21
|
+
DataStore.expects(:create_data_stores).returns(migration)
|
22
|
+
migration.expects(:apply)
|
23
|
+
@connector.reset!
|
24
|
+
end
|
25
|
+
|
26
|
+
teardown do
|
27
|
+
@connector.database.disconnect
|
28
|
+
end
|
29
|
+
|
30
|
+
end
|
31
|
+
|
32
|
+
end
|
@@ -0,0 +1,117 @@
|
|
1
|
+
require File.expand_path '../test_helper', __FILE__
|
2
|
+
|
3
|
+
class DataStoreTest < Test::Unit::TestCase
|
4
|
+
|
5
|
+
context 'DataStore configuration' do
|
6
|
+
|
7
|
+
should 'have a configuration object' do
|
8
|
+
assert_equal true, DataStore.configuration.is_a?(DataStore::Configuration)
|
9
|
+
end
|
10
|
+
|
11
|
+
should 'be able to define the configuration' do
|
12
|
+
assert_equal ENV['DB'] || :postgres, DataStore.configuration.database
|
13
|
+
end
|
14
|
+
|
15
|
+
end
|
16
|
+
|
17
|
+
context 'DataStore::Base general' do
|
18
|
+
|
19
|
+
setup do
|
20
|
+
DataStore::Connector.new.reset!
|
21
|
+
end
|
22
|
+
|
23
|
+
context 'with added behaviour through Sequel::Model' do
|
24
|
+
|
25
|
+
setup do
|
26
|
+
@record = DataStore::Base.create(identifier: 1,
|
27
|
+
type: 'gauge',
|
28
|
+
name: 'Electra',
|
29
|
+
description: 'Actual usage of electra in the home',
|
30
|
+
compression_schema: [5,4,3])
|
31
|
+
end
|
32
|
+
|
33
|
+
should 'be valid' do
|
34
|
+
assert @record
|
35
|
+
end
|
36
|
+
|
37
|
+
should 'have added a record to the database' do
|
38
|
+
assert_equal 1, DataStore::Base.count
|
39
|
+
end
|
40
|
+
|
41
|
+
should 'have created the necessary tables' do
|
42
|
+
assert_equal 0, DataStore::Base.db[:ds_1].count
|
43
|
+
assert_equal 0, DataStore::Base.db[:ds_1_5].count
|
44
|
+
assert_equal 0, DataStore::Base.db[:ds_1_20].count
|
45
|
+
assert_equal 0, DataStore::Base.db[:ds_1_60].count
|
46
|
+
end
|
47
|
+
|
48
|
+
should 'return all table_names' do
|
49
|
+
assert_equal [:ds_1, :ds_1_5, :ds_1_20, :ds_1_60], @record.table_names
|
50
|
+
end
|
51
|
+
|
52
|
+
should 'return its time_borders' do
|
53
|
+
assert_equal [8000, 40000, 160000, 480000], @record.time_borders
|
54
|
+
end
|
55
|
+
|
56
|
+
should 'return its attributes' do
|
57
|
+
record = DataStore::Base.order(:created_at).last
|
58
|
+
assert_equal 1, record.identifier
|
59
|
+
assert_equal 'gauge', record.type
|
60
|
+
assert_equal 'Electra', record.name
|
61
|
+
assert_equal 'Actual usage of electra in the home', record.description
|
62
|
+
assert_equal [5,4,3], @record.compression_schema
|
63
|
+
end
|
64
|
+
|
65
|
+
should 'return default values if not set' do
|
66
|
+
assert_equal 10, @record.frequency
|
67
|
+
assert_equal 'double', @record.data_type
|
68
|
+
assert_equal 800, @record.maximum_datapoints
|
69
|
+
end
|
70
|
+
|
71
|
+
should 'have timestamps' do
|
72
|
+
assert @record.created_at
|
73
|
+
assert @record.updated_at
|
74
|
+
end
|
75
|
+
|
76
|
+
should 'create a record with a uniq identifier' do
|
77
|
+
assert_raise 'Sequel::DatabaseError(<SQLite3::ConstraintException: column identifier is not unique>)' do
|
78
|
+
DataStore::Base.create(identifier: 1, type: 'gauge', name: 'Electra')
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
should 'be able to update a record' do
|
83
|
+
@record.name = 'Gas'
|
84
|
+
@record.save
|
85
|
+
assert_equal 'Gas', DataStore::Base.order(:created_at).last.name
|
86
|
+
end
|
87
|
+
|
88
|
+
end
|
89
|
+
|
90
|
+
should 'create with the correct data type for value' do
|
91
|
+
record = DataStore::Base.create(identifier: 2, type: 'gauge', name: 'Electra', data_type: 'integer')
|
92
|
+
assert_equal :integer,Sequel::Model(DataStore::Base.db[:ds_2]).db_schema[:value][:type]
|
93
|
+
record.destroy
|
94
|
+
end
|
95
|
+
|
96
|
+
context 'handling of database tables for the datapoints' do
|
97
|
+
|
98
|
+
should 'create the necessary datapoint tables on create' do
|
99
|
+
DataStore::Base.any_instance.expects(:drop_tables!)
|
100
|
+
DataStore::Base.any_instance.expects(:create_tables!)
|
101
|
+
DataStore::Base.create(identifier: 1, type: 'gauge', name: 'Electra')
|
102
|
+
end
|
103
|
+
|
104
|
+
should 'destroy the corresponding datapoint tables on destroy' do
|
105
|
+
record = DataStore::Base.create(identifier: 1, type: 'gauge', name: 'Electra')
|
106
|
+
record.destroy
|
107
|
+
assert_raise { DataStore::Base.db[:ds_1].count }
|
108
|
+
assert_raise { DataStore::Base.db[:ds_5].count }
|
109
|
+
assert_raise { DataStore::Base.db[:ds_20].count }
|
110
|
+
assert_raise { DataStore::Base.db[:ds_60].count }
|
111
|
+
end
|
112
|
+
|
113
|
+
end
|
114
|
+
|
115
|
+
end
|
116
|
+
|
117
|
+
end
|
@@ -0,0 +1,66 @@
|
|
1
|
+
require File.expand_path '../test_helper', __FILE__
|
2
|
+
|
3
|
+
class IntegrationTest < Test::Unit::TestCase
|
4
|
+
|
5
|
+
context 'Integration test by adding datapoints through table object' do
|
6
|
+
|
7
|
+
setup do
|
8
|
+
DataStore::Connector.new.reset!
|
9
|
+
@record = DataStore::Base.create(identifier: 1,
|
10
|
+
type: 'counter',
|
11
|
+
name: 'Electra',
|
12
|
+
frequency: 10,
|
13
|
+
description: 'Actual usage of gas in the home',
|
14
|
+
compression_schema: [2,2])
|
15
|
+
|
16
|
+
@table = DataStore::Table.new(1)
|
17
|
+
@calculator = DataStore::AverageCalculator.new(@table)
|
18
|
+
end
|
19
|
+
|
20
|
+
should 'also calculate the average value' do
|
21
|
+
time_now_utc_returns(0)
|
22
|
+
@table.add(1000)
|
23
|
+
|
24
|
+
time_now_utc_returns(10)
|
25
|
+
@table.add(1010)
|
26
|
+
|
27
|
+
time_now_utc_returns(20)
|
28
|
+
@table.add(1020)
|
29
|
+
|
30
|
+
assert_equal 10.0, DataStore::Base.db[:ds_1_2].order(:created).last[:value]
|
31
|
+
end
|
32
|
+
|
33
|
+
end
|
34
|
+
|
35
|
+
context 'Import datapoints (gauge type)' do
|
36
|
+
setup do
|
37
|
+
start_time = 1349042407.00000
|
38
|
+
values = [2380.0, 2370.0, 2380.0, 2380.0, 2390.0, 2390.0, 2390.0, 2380.0, 2380.0, 2380.0, 2380.0, 2370.0, 2370.0, 2370.0,
|
39
|
+
2380.0, 2380.0, 2380.0, 2380.0, 230.0, 230.0, 230.0, 230.0, 230.0, 230.0]
|
40
|
+
@datapoints = []
|
41
|
+
values.each do |value|
|
42
|
+
@datapoints << [value, start_time]
|
43
|
+
start_time += rand(9.95..10.05)
|
44
|
+
end
|
45
|
+
DataStore::Connector.new.reset!
|
46
|
+
@record = DataStore::Base.create(identifier: 1,
|
47
|
+
type: 'gauge',
|
48
|
+
name: 'Electra',
|
49
|
+
description: 'Actual usage of electra in the home',
|
50
|
+
compression_schema: [2,3])
|
51
|
+
@table = DataStore::Table.new(1)
|
52
|
+
end
|
53
|
+
|
54
|
+
should 'store the data and calculate all averages' do
|
55
|
+
@table.import(@datapoints)
|
56
|
+
assert_equal 24, @table.model.db[:ds_1].count
|
57
|
+
assert_equal 12, @table.model.db[:ds_1_2].count
|
58
|
+
assert_equal 4, @table.model.db[:ds_1_6].count
|
59
|
+
|
60
|
+
assert_equal 1842, @table.model.db[:ds_1].avg(:value).round
|
61
|
+
assert_equal 1842, @table.model.db[:ds_1_2].avg(:value).round
|
62
|
+
assert_equal 1842, @table.model.db[:ds_1_6].avg(:value).round
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
end
|