plantwatchdog 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/History.txt +4 -0
- data/License.txt +674 -0
- data/Manifest.txt +42 -0
- data/README.txt +91 -0
- data/Rakefile +22 -0
- data/bin/plantwatchdog +8 -0
- data/bin/upload_measurements +2 -0
- data/config.ru +35 -0
- data/config/app_config.yaml +10 -0
- data/lib/plantwatchdog/aggregation.rb +220 -0
- data/lib/plantwatchdog/aggregation_methods.rb +90 -0
- data/lib/plantwatchdog/data.rb +126 -0
- data/lib/plantwatchdog/db.rb +37 -0
- data/lib/plantwatchdog/gems.rb +5 -0
- data/lib/plantwatchdog/main.rb +76 -0
- data/lib/plantwatchdog/model.rb +442 -0
- data/lib/plantwatchdog/sinatra.rb +206 -0
- data/public/images/arrow-down.gif +0 -0
- data/public/images/arrow-left.gif +0 -0
- data/public/images/arrow-right.gif +0 -0
- data/public/images/arrow-up.gif +0 -0
- data/public/images/spinner.gif +0 -0
- data/public/images/tabs.png +0 -0
- data/public/js/customflot.js +120 -0
- data/public/js/jquery-1.3.2.min.js +19 -0
- data/public/js/jquery.flot.crosshair.js +157 -0
- data/public/js/jquery.flot.js +2119 -0
- data/public/js/jquery.flot.navigate.js +272 -0
- data/public/js/jquery.flot.selection.js +299 -0
- data/public/js/select-chain.js +71 -0
- data/public/js/tools.tabs-1.0.4.js +285 -0
- data/public/tabs.css +87 -0
- data/sample/solar/create_solar.rb +31 -0
- data/sample/solar/measurements/client.sqlite3 +0 -0
- data/sample/solar/static/devices.yml +17 -0
- data/sample/solar/static/metadata.yml +30 -0
- data/sample/solar/static/plants.yml +3 -0
- data/sample/solar/static/users.yml +4 -0
- data/sample/solar/upload_measurements +26 -0
- data/templates/graph.erb +134 -0
- data/templates/index.erb +24 -0
- data/templates/monthly_graph.erb +41 -0
- data/test/test_aggregation.rb +161 -0
- data/test/test_aggregation_methods.rb +50 -0
- data/test/test_base.rb +83 -0
- data/test/test_data.rb +118 -0
- data/test/test_model.rb +142 -0
- data/test/test_sync.rb +71 -0
- data/test/test_web.rb +87 -0
- metadata +167 -0
@@ -0,0 +1,50 @@
|
|
1
|
+
$:.unshift File.join(File.dirname(__FILE__),"..","lib")
|
2
|
+
$:.unshift File.join(File.dirname(__FILE__),"..")
|
3
|
+
require 'rubygems'
|
4
|
+
require 'plantwatchdog/model'
|
5
|
+
require 'plantwatchdog/aggregation_methods'
|
6
|
+
require 'test/unit'
|
7
|
+
require 'test/test_base'
|
8
|
+
|
9
|
+
module PlantWatchdog
|
10
|
+
class AggregationMethodsTest < Test::Unit::TestCase
|
11
|
+
include TestUtil
|
12
|
+
include Aggregation::Methods
|
13
|
+
def test_methods
|
14
|
+
assert_equal(true, Aggregation::Methods.call(:avg, [] ).nan?)
|
15
|
+
assert_equal(1, Aggregation::Methods.call("avg", [1] ))
|
16
|
+
assert_equal(1.5, Aggregation::Methods.call("avg", [1,2] ))
|
17
|
+
assert_equal(2.0, Aggregation::Methods.call("avg", [1.0,3.0] ))
|
18
|
+
assert_nil(Aggregation::Methods.call("unknown", [1.0,3.0] ))
|
19
|
+
end
|
20
|
+
|
21
|
+
def convert a
|
22
|
+
f = s = []
|
23
|
+
a.each {|p| f << p.first; s << p.last}
|
24
|
+
[f, s]
|
25
|
+
end
|
26
|
+
|
27
|
+
def test_each_prior
|
28
|
+
block = Proc.new {|x,y| [x,y]}
|
29
|
+
assert_equal([], [].each_prior(&block))
|
30
|
+
assert_equal([], [1].each_prior( &block))
|
31
|
+
assert_equal([[1,2]], [1,2].each_prior(&block))
|
32
|
+
assert_equal([[1,2],[2,3]], [1,2,3].each_prior(&block))
|
33
|
+
end
|
34
|
+
|
35
|
+
def test_integrate
|
36
|
+
a=[]
|
37
|
+
integrate = Proc.new { Aggregation::Methods.integrate(a.transpose.first, a.transpose.last) }
|
38
|
+
assert_equal(0, Aggregation::Methods.integrate([], []) )
|
39
|
+
a << [0,0]
|
40
|
+
assert_equal(0, integrate.call )
|
41
|
+
a << [1,1]
|
42
|
+
assert_equal(0.5, integrate.call )
|
43
|
+
a << [1.5,1.5]
|
44
|
+
assert_equal(1.125, integrate.call )
|
45
|
+
a << [5,5]
|
46
|
+
assert_equal(12.5, integrate.call )
|
47
|
+
end
|
48
|
+
|
49
|
+
end
|
50
|
+
end
|
data/test/test_base.rb
ADDED
@@ -0,0 +1,83 @@
|
|
1
|
+
$:.unshift File.join(File.dirname(__FILE__),"..","lib")
|
2
|
+
require 'rubygems'
|
3
|
+
|
4
|
+
module PlantWatchdog
|
5
|
+
module TestUtil
|
6
|
+
def setup type=:sqlite
|
7
|
+
type == :mysql ? setup_mysql : setup_sqlite
|
8
|
+
#setup_sqlite
|
9
|
+
#Model::Schema.migrate(:down)
|
10
|
+
Model::Schema.migrate(:up)
|
11
|
+
end
|
12
|
+
|
13
|
+
def setup_sqlite
|
14
|
+
ActiveRecord::Base.establish_connection(:adapter => "sqlite3",
|
15
|
+
:database => ":memory:")
|
16
|
+
#require 'ar-extensions/import/sqlite'
|
17
|
+
end
|
18
|
+
|
19
|
+
def setup_mysql
|
20
|
+
ActiveRecord::Base.establish_connection(:adapter=>"mysql",
|
21
|
+
:database => "solar",
|
22
|
+
:username=>"root",
|
23
|
+
:password=>"root"
|
24
|
+
)
|
25
|
+
|
26
|
+
#require 'ar-extensions/import/mysql'
|
27
|
+
|
28
|
+
end
|
29
|
+
|
30
|
+
def user
|
31
|
+
user = Model::User.find(:first, :conditions => [ "name = 'markus'" ] )
|
32
|
+
return user if user
|
33
|
+
user = Model::User.new
|
34
|
+
user.name = 'markus'
|
35
|
+
user.password = 'markus'
|
36
|
+
user.save()
|
37
|
+
return user
|
38
|
+
end
|
39
|
+
|
40
|
+
def create_inverter(plant = nil)
|
41
|
+
|
42
|
+
if (user.plant.nil?)
|
43
|
+
plant = Model::Plant.new
|
44
|
+
plant.user = user
|
45
|
+
plant.save();
|
46
|
+
end
|
47
|
+
|
48
|
+
inverter = Model::Device.new
|
49
|
+
inverter.plant = user.plant
|
50
|
+
inverter.unique_id = "123"
|
51
|
+
inverter.meta = [["time", "integer"], ["pac", "float"], ["etotal", "integer"]]
|
52
|
+
inverter.save
|
53
|
+
|
54
|
+
return inverter
|
55
|
+
end
|
56
|
+
|
57
|
+
def create_measurement(time, pac, etotal)
|
58
|
+
metadata = user.plant.devices.first.metadata
|
59
|
+
m = metadata.dataclass.new;
|
60
|
+
m.time = time
|
61
|
+
m.pac = pac
|
62
|
+
m.etotal = etotal
|
63
|
+
m
|
64
|
+
end
|
65
|
+
|
66
|
+
def sync(csv, inverter=user.plant.devices.first)
|
67
|
+
Model::SyncManager.new.sync(user, inverter.unique_id, StringIO.new(csv))
|
68
|
+
end
|
69
|
+
|
70
|
+
def write_file(name, content)
|
71
|
+
f = File.new("#{data_dir}#{name}", "w")
|
72
|
+
f.write(content)
|
73
|
+
f.close
|
74
|
+
end
|
75
|
+
|
76
|
+
def data_dir
|
77
|
+
dirname = "#{File.dirname(__FILE__)}/unit_test_generated/"
|
78
|
+
FileUtils.mkdir(dirname) unless File.exists?(dirname)
|
79
|
+
dirname
|
80
|
+
end
|
81
|
+
|
82
|
+
end
|
83
|
+
end
|
data/test/test_data.rb
ADDED
@@ -0,0 +1,118 @@
|
|
1
|
+
$:.unshift File.join(File.dirname(__FILE__),"..","lib")
|
2
|
+
$:.unshift File.join(File.dirname(__FILE__),"..")
|
3
|
+
require 'rubygems'
|
4
|
+
require 'plantwatchdog/data'
|
5
|
+
require 'test/unit'
|
6
|
+
require 'test/test_base'
|
7
|
+
|
8
|
+
module PlantWatchdog
|
9
|
+
class DataTest < Test::Unit::TestCase
|
10
|
+
include TestUtil
|
11
|
+
include Datadetection
|
12
|
+
def test_raw_data_years
|
13
|
+
inverter1 = create_inverter
|
14
|
+
|
15
|
+
year = 2006
|
16
|
+
date = Time.utc(year, "jan", 2, 5, 0, 0)
|
17
|
+
|
18
|
+
# add a measurement for inverter1
|
19
|
+
Model::MeasurementChunk.save_measurements(inverter1, [create_measurement(date.tv_sec, 25.0, 10000)])
|
20
|
+
|
21
|
+
assert_equal [year], years_with_data(user)
|
22
|
+
assert_equal [2], days_with_data(user, 2006)
|
23
|
+
|
24
|
+
# add inverter2 and a measurement for another day in the same year
|
25
|
+
date = Time.utc(year, "feb", 1, 5, 0, 0)
|
26
|
+
inverter2 = create_inverter
|
27
|
+
Model::MeasurementChunk.save_measurements(inverter1, [create_measurement(date.tv_sec, 25.0, 10000)])
|
28
|
+
Model::MeasurementChunk.save_measurements(inverter2, [create_measurement(date.tv_sec, 25.0, 10000)])
|
29
|
+
assert_equal [year], years_with_data(user)
|
30
|
+
assert_equal [2,32], days_with_data(user, 2006)
|
31
|
+
|
32
|
+
# add a measurement for the next year
|
33
|
+
date = Time.utc(year+1, "feb", 2, 5, 0, 0)
|
34
|
+
Model::MeasurementChunk.save_measurements(inverter2, [create_measurement(date.tv_sec, 25.0, 10000)])
|
35
|
+
assert_equal [year, year+1], years_with_data(user)
|
36
|
+
assert_equal [33], days_with_data(user, 2007)
|
37
|
+
end
|
38
|
+
|
39
|
+
def test_day_of_year_converter
|
40
|
+
# 2000 is a switch year
|
41
|
+
dayc = DayOfYearConverter.new(2000, [1,60])
|
42
|
+
assert_equal [1,2], dayc.months
|
43
|
+
assert_equal [1], dayc.days(1)
|
44
|
+
assert_equal [29], dayc.days(2)
|
45
|
+
assert_equal [], dayc.days(3)
|
46
|
+
dayc = DayOfYearConverter.new(2001, [1,60])
|
47
|
+
assert_equal [1,3], dayc.months
|
48
|
+
assert_equal [1], dayc.days(1)
|
49
|
+
assert_equal [], dayc.days(2)
|
50
|
+
assert_equal [1], dayc.days(3)
|
51
|
+
end
|
52
|
+
|
53
|
+
def test_series
|
54
|
+
inverter = create_inverter
|
55
|
+
date = Time.utc(2001, "jan", 2, 12, 0, 0)
|
56
|
+
|
57
|
+
# add a measurement for inverter1
|
58
|
+
Model::MeasurementChunk.save_measurements(inverter, [create_measurement(date.tv_sec, 25.1, 101)])
|
59
|
+
ENV["TZ"] = "Europe/Berlin" # set timezone
|
60
|
+
ts = time_series(inverter, 2001, 2)
|
61
|
+
ts_pac = ts["pac"]
|
62
|
+
utc_offset = 3600 # Berlin is GMT+1 in January
|
63
|
+
# the time in timeseries is milliseconds and faked local time:
|
64
|
+
# the secs_since_epoch is adjusted to fake local time, see flot documentation
|
65
|
+
assert_equal([(date.tv_sec + utc_offset)*1000, 25.1], ts_pac[0])
|
66
|
+
|
67
|
+
ts_etotal=ts["etotal"]
|
68
|
+
assert_equal([(date.tv_sec + utc_offset)*1000, 101], ts_etotal[0])
|
69
|
+
|
70
|
+
end
|
71
|
+
|
72
|
+
def test_plant_agg
|
73
|
+
create_inverter
|
74
|
+
plant = user.plant
|
75
|
+
plant.aggrules = { "eday" => [:sum, "eday"] }
|
76
|
+
plant.save
|
77
|
+
ma1 = Model::MeasurementAggregate.new
|
78
|
+
ma1.plant = plant
|
79
|
+
ma1.data = { "eday" => 1.2 }
|
80
|
+
ma1.time_year = 2010
|
81
|
+
ma1.time_day_of_year = 20
|
82
|
+
ma1.save!
|
83
|
+
|
84
|
+
# 2010-21 missing
|
85
|
+
|
86
|
+
ma2 = Model::MeasurementAggregate.new
|
87
|
+
ma2.plant = plant
|
88
|
+
ma2.data = { "eday" => 1.4 , "another" => 1 }
|
89
|
+
ma2.time_year = 2010
|
90
|
+
ma2.time_day_of_year = 22
|
91
|
+
ma2.save!
|
92
|
+
|
93
|
+
ma3 = Model::MeasurementAggregate.new
|
94
|
+
ma3.plant = plant
|
95
|
+
ma3.data = { "another" => 1 }
|
96
|
+
ma3.time_year = 2010
|
97
|
+
ma3.time_day_of_year = 23
|
98
|
+
ma3.save!
|
99
|
+
|
100
|
+
aggs = plant_aggregates(plant, [[2010,19],[2010,20],[2010,21],[2010,22],[2010,23]])
|
101
|
+
ENV["TZ"] = "Europe/Berlin" # set timezone
|
102
|
+
millis = Time.utc(2010, "jan", 19, 12, 0, 0).tv_sec*1000
|
103
|
+
day_millis = 3600*24*1000
|
104
|
+
# TODO NaN instead of 0
|
105
|
+
assert_equal( [[millis, 0], [millis+day_millis, 1.2], [millis+day_millis*2, 0], [millis+day_millis*3, 1.4], [millis+day_millis*4, 0]], aggs["eday"])
|
106
|
+
end
|
107
|
+
|
108
|
+
include Monthhelper
|
109
|
+
|
110
|
+
def test_monthhelper
|
111
|
+
assert_equal(31, days_of_month(2000, 1).size)
|
112
|
+
assert_equal(29, days_of_month(2000, 2).size)
|
113
|
+
assert_equal(30, days_of_month(2000, 4).size)
|
114
|
+
assert_equal(28, days_of_month(2001, 2).size)
|
115
|
+
end
|
116
|
+
|
117
|
+
end
|
118
|
+
end
|
data/test/test_model.rb
ADDED
@@ -0,0 +1,142 @@
|
|
1
|
+
$:.unshift File.join(File.dirname(__FILE__),"..","lib")
|
2
|
+
$:.unshift File.join(File.dirname(__FILE__),"..")
|
3
|
+
require 'rubygems'
|
4
|
+
require 'plantwatchdog/model'
|
5
|
+
require 'test/unit'
|
6
|
+
require 'test/test_base'
|
7
|
+
|
8
|
+
module PlantWatchdog
|
9
|
+
class ModelTest < Test::Unit::TestCase
|
10
|
+
include TestUtil
|
11
|
+
def test_measurement_parse_csv
|
12
|
+
inverter = create_inverter
|
13
|
+
|
14
|
+
year = 2006
|
15
|
+
date = Time.utc(year, "jan", 2, 8, 0, 0)
|
16
|
+
|
17
|
+
assert_equal([], Model::Measurement.parse_csv(inverter.metadata, nil))
|
18
|
+
assert_equal([], Model::Measurement.parse_csv(inverter.metadata, ""))
|
19
|
+
|
20
|
+
csv = "#{date.tv_sec},25.00,10000"
|
21
|
+
actual = Model::Measurement.parse_csv inverter.metadata, csv
|
22
|
+
expected = [ create_measurement(date.tv_sec, 25.0, 10000) ]
|
23
|
+
assert_equal(expected, actual)
|
24
|
+
|
25
|
+
# to_csv: a measurement instance must preserver the original line
|
26
|
+
assert_equal(csv, actual.first.line)
|
27
|
+
|
28
|
+
# expected.first has not been created from a csv line, the line
|
29
|
+
# must be created according to the metadata, therefore 25.0 will
|
30
|
+
# be serialized as "25.0"
|
31
|
+
assert_equal("#{date.tv_sec},25.0,10000", expected.first.line)
|
32
|
+
end
|
33
|
+
|
34
|
+
def test_save_measurement
|
35
|
+
inverter = create_inverter
|
36
|
+
ENV["TZ"] = "Europe/Berlin"
|
37
|
+
year = 2006
|
38
|
+
date = Time.utc(year, "jan", 1, 23, 0, 0) # this is January 2, 00:00:00 in Berlin
|
39
|
+
|
40
|
+
m = create_measurement(date.tv_sec, 25.0, 10000)
|
41
|
+
Model::MeasurementChunk.save_measurements(inverter, [m])
|
42
|
+
|
43
|
+
# re-read the chunk and therefore go through the complete de-serialization
|
44
|
+
c = Model::MeasurementChunk.find(:first)
|
45
|
+
assert_equal(year, c.time_year)
|
46
|
+
assert_equal(2, c.time_day_of_year)
|
47
|
+
assert_equal(m, c.measurements.first)
|
48
|
+
|
49
|
+
# create a new chunk for another inverter on the same day
|
50
|
+
inverter2 = create_inverter
|
51
|
+
Model::MeasurementChunk.save_measurements(inverter2, [m])
|
52
|
+
c = Model::MeasurementChunk.find(:first, :conditions => [ "device_id=?", inverter2.id ])
|
53
|
+
assert_equal(year, c.time_year)
|
54
|
+
assert_equal(2, c.time_day_of_year)
|
55
|
+
|
56
|
+
end
|
57
|
+
|
58
|
+
def test_measurement_partition
|
59
|
+
create_inverter()
|
60
|
+
year = 2006
|
61
|
+
date1 = Time.utc(year, "jan", 2, 8, 0, 0)
|
62
|
+
date2 = Time.utc(year, "jan", 3, 8, 0, 0)
|
63
|
+
|
64
|
+
measurements = []
|
65
|
+
dict = Model::Measurement.partition_by_day measurements
|
66
|
+
assert(dict.empty?)
|
67
|
+
|
68
|
+
# one measurement, one day
|
69
|
+
measurements << create_measurement(date1.tv_sec, 25.0, 10000)
|
70
|
+
dict = Model::Measurement.partition_by_day measurements
|
71
|
+
assert_equal(measurements, dict[[year,2]])
|
72
|
+
|
73
|
+
# two measurements, one day
|
74
|
+
measurements << create_measurement(date1.tv_sec + 1000, 25.0, 10000)
|
75
|
+
dict = Model::Measurement.partition_by_day measurements
|
76
|
+
assert_equal(measurements, dict[[year,2]])
|
77
|
+
|
78
|
+
# three measurements, two days
|
79
|
+
measurements << create_measurement(date2.tv_sec, 25.0, 10000)
|
80
|
+
dict = Model::Measurement.partition_by_day measurements
|
81
|
+
assert_equal(measurements[0,2], dict[[year,2]])
|
82
|
+
assert_equal([measurements.last], dict[[year,3]])
|
83
|
+
end
|
84
|
+
|
85
|
+
def test_metadata_and_aggrules_conversion
|
86
|
+
device = Model::Device.new
|
87
|
+
# set metadata and aggrules via conveniance setters ...
|
88
|
+
aggrules = { "k" => [ "m", "p1"] } # TODO: Hash keys should be symbols, but default encoding/decoding to json only uses strings
|
89
|
+
device.aggrules = aggrules
|
90
|
+
device.meta = ["col1", "col2"]
|
91
|
+
device.save!
|
92
|
+
|
93
|
+
act_device = Model::Device.find(:first)
|
94
|
+
# and check that the dict and array was transformed to JSON
|
95
|
+
# access active record attributes directly since getter is overwritten
|
96
|
+
assert_equal( '["col1","col2"]' , device.metadata["description"])
|
97
|
+
assert_equal( '{"k":["m","p1"]}', device.aggregationrule["description"])
|
98
|
+
|
99
|
+
# now check that the JSON from the descriptions has been transformed to ruby array and dict
|
100
|
+
assert_equal( act_device.meta, device.meta)
|
101
|
+
assert_equal( aggrules, act_device.aggrules)
|
102
|
+
|
103
|
+
# check that updating meta and saving again creates a new
|
104
|
+
# metadata row and the old one is preserved
|
105
|
+
existing_metadata_id = device.metadata.id
|
106
|
+
device.meta = ["col1", "col2", "col3"]
|
107
|
+
device.save!
|
108
|
+
assert(existing_metadata_id < device.metadata.id)
|
109
|
+
act_device = Model::Device.find(:first)
|
110
|
+
assert_equal( '["col1","col2","col3"]' , device.metadata["description"])
|
111
|
+
old_metadata = Model::Metadata.find(:first, :conditions => ["id=?", existing_metadata_id])
|
112
|
+
|
113
|
+
assert_equal('["col1","col2"]' , old_metadata["description"])
|
114
|
+
|
115
|
+
# assigning the same metadata again must not create a new metadata instance
|
116
|
+
current_metadata_id = device.metadata.id
|
117
|
+
device.meta = ["col1", "col2", "col3"]
|
118
|
+
device.save!
|
119
|
+
assert( device.metadata.id == current_metadata_id)
|
120
|
+
|
121
|
+
end
|
122
|
+
|
123
|
+
def test_metadata
|
124
|
+
# every metadata instance dynamically creates one subclass of BaseWithoutTable
|
125
|
+
meta = Model::Metadata.new
|
126
|
+
meta.description = '["default",["time","integer"],["value","float"]]'
|
127
|
+
assert_equal(["default",["time","integer"],["value","float"]], meta.description)
|
128
|
+
cols = meta.dataclass.columns
|
129
|
+
assert_equal("default", cols.first.name)
|
130
|
+
assert_equal(nil, cols.first.type)
|
131
|
+
assert_equal("time", cols[1].name)
|
132
|
+
assert_equal(:integer, cols[1].type)
|
133
|
+
assert_equal("value", cols.last.name)
|
134
|
+
assert_equal(:float, cols.last.type)
|
135
|
+
|
136
|
+
# ensure there is exactly one class created for every metadata row
|
137
|
+
meta.save!
|
138
|
+
loadedMeta = Model::Metadata.find(:first)
|
139
|
+
assert_same(meta.dataclass, loadedMeta.dataclass)
|
140
|
+
end
|
141
|
+
end
|
142
|
+
end
|
data/test/test_sync.rb
ADDED
@@ -0,0 +1,71 @@
|
|
1
|
+
$:.unshift File.join(File.dirname(__FILE__),"..","lib")
|
2
|
+
$:.unshift File.join(File.dirname(__FILE__),"..")
|
3
|
+
require 'rubygems'
|
4
|
+
require 'plantwatchdog/model'
|
5
|
+
require 'test/unit'
|
6
|
+
require 'test/test_base'
|
7
|
+
|
8
|
+
module PlantWatchdog
|
9
|
+
class SyncTest < Test::Unit::TestCase
|
10
|
+
include TestUtil
|
11
|
+
def test_sync_inverter
|
12
|
+
# sync inverter
|
13
|
+
inverter = create_inverter()
|
14
|
+
inverter.save
|
15
|
+
year = 2005
|
16
|
+
day = 5
|
17
|
+
syncTime = Time.utc(year, "jan", day, 15, 0, 0)
|
18
|
+
|
19
|
+
syncManager = Model::SyncManager.new
|
20
|
+
# latest is 0 if there has not been a synchronization yet
|
21
|
+
assert_equal(0, syncManager.latest(user, inverter.unique_id))
|
22
|
+
csv_line1 = "#{syncTime.tv_sec},20,1000"
|
23
|
+
sync(csv_line1)
|
24
|
+
|
25
|
+
# check that there is an entry in the sync table after the first sync
|
26
|
+
assert_equal(syncTime.tv_sec, syncManager.latest(user, inverter.unique_id))
|
27
|
+
|
28
|
+
ims = Model::MeasurementChunk.find(:first, :conditions => ["device_id=?", inverter.id]);
|
29
|
+
assert_equal(csv_line1, ims.data)
|
30
|
+
|
31
|
+
# has the user's start_year been updated, too?
|
32
|
+
assert_equal(Time.at(syncTime.tv_sec).utc.year, user.start_year)
|
33
|
+
|
34
|
+
# we are only allowed to upload more recent data
|
35
|
+
assert_raise(Model::SyncError) do
|
36
|
+
sync("#{syncTime.tv_sec - 5},20,1000")
|
37
|
+
end
|
38
|
+
|
39
|
+
syncTime += 3600 # one hour later
|
40
|
+
|
41
|
+
# sync the next chunk on the same day
|
42
|
+
csv_line2 = "#{syncTime.tv_sec},0,1200"
|
43
|
+
sync(csv_line2)
|
44
|
+
|
45
|
+
assert_equal(syncTime.tv_sec, syncManager.latest(user, inverter.unique_id))
|
46
|
+
|
47
|
+
ims = Model::MeasurementChunk.find(:first, :conditions => ["device_id=?", inverter.id]);
|
48
|
+
assert_equal("#{csv_line1}\n#{csv_line2}", ims.data)
|
49
|
+
|
50
|
+
# sync another chunk, this time containing data spanning two days
|
51
|
+
syncTime = Time.utc(year, "jan", 6, 15, 0, 0)
|
52
|
+
csv_line_day_6 = "#{syncTime.tv_sec},0,1300"
|
53
|
+
syncTime = Time.utc(year, "jan", 7, 14, 0, 0)
|
54
|
+
csv_line_day_7 = "#{syncTime.tv_sec},0,1400"
|
55
|
+
sync("#{csv_line_day_6}\n#{csv_line_day_7}")
|
56
|
+
|
57
|
+
# check data of day 6
|
58
|
+
ims = Model::MeasurementChunk.find(:first, :conditions => ["time_year=? and time_day_of_year=?", year, 6]);
|
59
|
+
assert_equal("#{csv_line_day_6}", ims.data)
|
60
|
+
# check data of day 7
|
61
|
+
ims = Model::MeasurementChunk.find(:first, :conditions => ["time_year=? and time_day_of_year=?", year, 7]);
|
62
|
+
assert_equal("#{csv_line_day_7}", ims.data)
|
63
|
+
end
|
64
|
+
|
65
|
+
def test_sync_invalid_unique_id
|
66
|
+
assert_raise(Model::SyncError) do
|
67
|
+
Model::SyncManager.new.sync(user, "unkown", StringIO.new("1234,20,1000"))
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|