barnyard_harvester 0.0.5 → 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +1 -0
- data/barnyard_harvester.gemspec +3 -0
- data/lib/barnyard_harvester.rb +29 -7
- data/lib/barnyard_harvester/dynamodb.rb +88 -0
- data/lib/barnyard_harvester/mongodb.rb +1 -5
- data/lib/barnyard_harvester/rabbitmq_queue.rb +106 -0
- data/lib/barnyard_harvester/redis.rb +1 -15
- data/lib/barnyard_harvester/{mongodb_queue.rb → resque_queue.rb} +23 -6
- data/lib/barnyard_harvester/sqs_queue.rb +112 -0
- data/lib/barnyard_harvester/version.rb +1 -1
- data/spec/dynamodb_sqs_spec.rb +180 -0
- data/spec/hash_spec.rb +6 -1
- data/spec/mongo_spec.rb +16 -3
- data/spec/redis_rabbitmq_spec.rb +162 -0
- data/spec/redis_sqs_spec.rb +170 -0
- metadata +48 -18
- data/lib/barnyard_harvester/redis_queue.rb +0 -84
data/.gitignore
CHANGED
data/barnyard_harvester.gemspec
CHANGED
data/lib/barnyard_harvester.rb
CHANGED
@@ -4,6 +4,7 @@ require "logger"
|
|
4
4
|
require "barnyard_harvester/version"
|
5
5
|
|
6
6
|
module BarnyardHarvester
|
7
|
+
|
7
8
|
ADD = "add"
|
8
9
|
CHANGE = "change"
|
9
10
|
DELETE = "delete"
|
@@ -24,6 +25,16 @@ module BarnyardHarvester
|
|
24
25
|
@debug = args.fetch(:debug) { false }
|
25
26
|
@log = args.fetch(:logger) { Logger.new(STDOUT) }
|
26
27
|
|
28
|
+
@queueing = args[:queueing]
|
29
|
+
|
30
|
+
case @queueing
|
31
|
+
when :rabbitmq
|
32
|
+
@rabbitmq_settings = args.fetch(:rabbitmq_settings) { raise "You must provide :rabbitmq_settings" }
|
33
|
+
when :sqs
|
34
|
+
@sqs_settings = args.fetch(:sqs_settings) { raise "You must provide :sqs_settings" }
|
35
|
+
else
|
36
|
+
@queueing = :resque
|
37
|
+
end
|
27
38
|
|
28
39
|
@backend = args.fetch(:backend) { :redis }
|
29
40
|
|
@@ -32,7 +43,7 @@ module BarnyardHarvester
|
|
32
43
|
end
|
33
44
|
|
34
45
|
require "barnyard_harvester/#{@backend.to_s}_helper" if File.exist? "barnyard_harvester/#{@backend.to_s}_helper"
|
35
|
-
require "barnyard_harvester/#{@
|
46
|
+
require "barnyard_harvester/#{@queueing.to_s}_queue"
|
36
47
|
require "barnyard_harvester/#{@backend.to_s}"
|
37
48
|
|
38
49
|
# YAML::ENGINE.yamler = 'syck'
|
@@ -114,12 +125,12 @@ module BarnyardHarvester
|
|
114
125
|
end
|
115
126
|
else
|
116
127
|
# We got add!
|
117
|
-
begin
|
128
|
+
#begin
|
118
129
|
@my_add_queue.push(@harvester_uuid, crop_change_uuid, @crop_number, primary_key, BarnyardHarvester::ADD, value)
|
119
|
-
rescue Exception => e
|
120
|
-
|
121
|
-
|
122
|
-
end
|
130
|
+
#rescue Exception => e
|
131
|
+
# @log.fatal "FATAL error pushing add #{primary_key} to queue. #{e}"
|
132
|
+
# exit 1
|
133
|
+
#end
|
123
134
|
|
124
135
|
@my_barn[primary_key] = value
|
125
136
|
@add_count += 1
|
@@ -130,6 +141,15 @@ module BarnyardHarvester
|
|
130
141
|
"(#{@add_count}) adds, (#{@delete_count}) deletes, (#{@change_count}) changes, (#{@source_count}) source records, (#{@cache_count}) cache records"
|
131
142
|
end
|
132
143
|
|
144
|
+
#def log_run(harvester_uuid, crop_number, began_at, ended_at, source_count, change_count, add_count, delete_count)
|
145
|
+
#
|
146
|
+
# #begin
|
147
|
+
# #rescue Exception => e
|
148
|
+
# # @log.fatal "#{self.class} Fail in Resque.enqueue of HarvesterLogs. #{e.backtrace}"
|
149
|
+
# #end
|
150
|
+
#
|
151
|
+
#end
|
152
|
+
|
133
153
|
def run
|
134
154
|
|
135
155
|
@began_at = Time.now
|
@@ -141,13 +161,15 @@ module BarnyardHarvester
|
|
141
161
|
|
142
162
|
@ended_at = Time.now
|
143
163
|
|
164
|
+
|
165
|
+
@my_add_queue.log_run(@harvester_uuid, @crop_number, @began_at, @ended_at, @source_count, @change_count, @add_count, @delete_count)
|
166
|
+
|
144
167
|
# Let Farmer know I'm done and to flush the updates
|
145
168
|
@my_barn.flush
|
146
169
|
@my_add_queue.flush
|
147
170
|
@my_change_queue.flush
|
148
171
|
@my_delete_queue.flush
|
149
172
|
|
150
|
-
@my_barn.log_run(@harvester_uuid, @crop_number, @began_at, @ended_at, @source_count, @change_count, @add_count, @delete_count)
|
151
173
|
|
152
174
|
end
|
153
175
|
|
@@ -0,0 +1,88 @@
|
|
1
|
+
require "crack"
|
2
|
+
require "json"
|
3
|
+
require "aws-sdk"
|
4
|
+
|
5
|
+
module BarnyardHarvester
|
6
|
+
|
7
|
+
DEFAULT_DYNAMODB_SETTINGS = {
|
8
|
+
:dynamo_db_endpoint => "dynamodb.us-west-1.amazonaws.com",
|
9
|
+
:access_key_id => ENV["AWS_ACCESS_KEY_ID"],
|
10
|
+
:secret_access_key => ENV["AWS_SECRET_ACCESS_KEY"]
|
11
|
+
}
|
12
|
+
|
13
|
+
class Barn
|
14
|
+
|
15
|
+
def initialize(args)
|
16
|
+
|
17
|
+
@crop_number = args.fetch(:crop_number) { raise "You must provide :crop_number" }
|
18
|
+
@dynamodb_settings = args.fetch(:dynamodb_settings) { DEFAULT_DYNAMODB_SETTINGS }
|
19
|
+
@debug = args.fetch(:debug) { false }
|
20
|
+
@log = args.fetch(:logger) { Logger.new(STDOUT) }
|
21
|
+
|
22
|
+
@db = AWS::DynamoDB.new(@dynamodb_settings)
|
23
|
+
|
24
|
+
table_name = "barnyard_crop_id-#{@crop_number}"
|
25
|
+
|
26
|
+
begin
|
27
|
+
@table = @db.tables.create(table_name, 10, 5)
|
28
|
+
sleep 1 while @table.status == :creating
|
29
|
+
@table.hash_key = [:id, :string]
|
30
|
+
rescue AWS::DynamoDB::Errors::ResourceInUseException
|
31
|
+
@table = @db.tables[table_name]
|
32
|
+
@table.hash_key = [:id, :string]
|
33
|
+
end
|
34
|
+
|
35
|
+
end
|
36
|
+
|
37
|
+
def delete(primary_key)
|
38
|
+
check_key primary_key
|
39
|
+
|
40
|
+
item = @table.items.where("id" => primary_key).first # Save the value
|
41
|
+
value = item.attributes['value']
|
42
|
+
item.delete # Delete the key
|
43
|
+
Crack::JSON.parse(value) # Return the object
|
44
|
+
end
|
45
|
+
|
46
|
+
def []= primary_key, object
|
47
|
+
check_key primary_key
|
48
|
+
check_object object
|
49
|
+
@table.items.create('id' => primary_key, 'value' => object.to_json)
|
50
|
+
end
|
51
|
+
|
52
|
+
def [] primary_key
|
53
|
+
check_key primary_key
|
54
|
+
|
55
|
+
Crack::JSON.parse(@table.items.where("id" => primary_key).first.attributes['value'])
|
56
|
+
end
|
57
|
+
|
58
|
+
def has_key?(primary_key)
|
59
|
+
check_key primary_key
|
60
|
+
|
61
|
+
@table.items.where("id" => primary_key).count == 1
|
62
|
+
|
63
|
+
end
|
64
|
+
|
65
|
+
def each
|
66
|
+
|
67
|
+
@table.items.each do |i|
|
68
|
+
yield i.attributes['id'], i.attributes['value']
|
69
|
+
end
|
70
|
+
|
71
|
+
end
|
72
|
+
|
73
|
+
def flush
|
74
|
+
end
|
75
|
+
|
76
|
+
private
|
77
|
+
|
78
|
+
def check_key(primary_key)
|
79
|
+
# Raise an exception here if the key must conform to a specific format
|
80
|
+
# Example: raise "key must be a string object" unless key.is_a? String
|
81
|
+
end
|
82
|
+
|
83
|
+
def check_object(object)
|
84
|
+
raise "#{object.class} must implement the to_json method" unless object.respond_to? :to_json
|
85
|
+
end
|
86
|
+
end
|
87
|
+
|
88
|
+
end
|
@@ -9,10 +9,6 @@ require "barnyard_harvester/mongodb_helper"
|
|
9
9
|
|
10
10
|
module BarnyardHarvester
|
11
11
|
|
12
|
-
class HarvesterLogs
|
13
|
-
@queue = :logs_harvester
|
14
|
-
end
|
15
|
-
|
16
12
|
DEFAULT_MONGO_SETTINGS = {
|
17
13
|
:host_list => "localhost:27017",
|
18
14
|
:collection => "test_collection",
|
@@ -33,7 +29,7 @@ module BarnyardHarvester
|
|
33
29
|
@mongodb_settings.fetch(:collection) { raise "You must provide :collection" }
|
34
30
|
|
35
31
|
@redis_settings.delete(:db)
|
36
|
-
Resque.redis = Redis.new(@redis_settings)
|
32
|
+
#Resque.redis = Redis.new(@redis_settings)
|
37
33
|
|
38
34
|
@mongodb_settings[:debug] = @debug
|
39
35
|
@mongodb_settings[:logger] = @log
|
@@ -0,0 +1,106 @@
|
|
1
|
+
module BarnyardHarvester
|
2
|
+
|
3
|
+
require "bunny"
|
4
|
+
|
5
|
+
QUEUE_FARMER = "barnyard-farmer"
|
6
|
+
QUEUE_HARVESTER = "barnyard-harvests"
|
7
|
+
QUEUE_TRANSACTION = "barnyard-transactions"
|
8
|
+
QUEUE_CHANGE = "barnyard-changes"
|
9
|
+
|
10
|
+
class Queue
|
11
|
+
|
12
|
+
def enqueue(queue, queued_at, harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value, old_value)
|
13
|
+
|
14
|
+
payload = Hash.new
|
15
|
+
payload[:queued_at] = queued_at
|
16
|
+
payload[:harvester_uuid] = harvester_uuid
|
17
|
+
payload[:crop_change_uuid] = crop_change_uuid
|
18
|
+
payload[:crop_number] = crop_number
|
19
|
+
payload[:primary_key] = primary_key
|
20
|
+
payload[:transaction_type] = transaction_type
|
21
|
+
payload[:value] = value
|
22
|
+
payload[:old_value] = old_value
|
23
|
+
|
24
|
+
json_payload = payload
|
25
|
+
|
26
|
+
@direct_exchange.publish(json_payload, key: queue)
|
27
|
+
@direct_exchange.publish(json_payload, key: QUEUE_CHANGE)
|
28
|
+
|
29
|
+
end
|
30
|
+
|
31
|
+
def log_run(harvester_uuid, crop_number, began_at, ended_at, source_count, change_count, add_count, delete_count)
|
32
|
+
|
33
|
+
payload = Hash.new
|
34
|
+
payload[:time] = Time.now
|
35
|
+
payload[:harvester_uuid] = harvester_uuid
|
36
|
+
payload[:crop_number] = crop_number
|
37
|
+
payload[:began_at] = began_at
|
38
|
+
payload[:ended_at] = ended_at
|
39
|
+
payload[:source_count] = source_count
|
40
|
+
payload[:change_count] = change_count
|
41
|
+
payload[:add_count] = add_count
|
42
|
+
payload[:delete_count] = delete_count
|
43
|
+
|
44
|
+
@direct_exchange.publish(payload.to_json, key: QUEUE_HARVESTER)
|
45
|
+
|
46
|
+
end
|
47
|
+
|
48
|
+
def initialize(args)
|
49
|
+
|
50
|
+
@debug = args.fetch(:debug) { false }
|
51
|
+
@log = args.fetch(:logger) { Logger.new(STDOUT) }
|
52
|
+
@crop_number = args.fetch(:crop_number) { raise "You must provide :crop_number" }
|
53
|
+
@rabbitmq_settings = args.fetch(:rabbitmq_settings) { raise "You must provide :rabbitmq_settings" }
|
54
|
+
|
55
|
+
@bunny = Bunny.new(@rabbitmq_settings)
|
56
|
+
@bunny.start
|
57
|
+
|
58
|
+
@direct_exchange = @bunny.exchange('');
|
59
|
+
|
60
|
+
end
|
61
|
+
|
62
|
+
def push(harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value, old_value=Hash.new)
|
63
|
+
check_key primary_key
|
64
|
+
|
65
|
+
enqueue(QUEUE_FARMER, DateTime.now, harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value.to_json, old_value.to_json)
|
66
|
+
|
67
|
+
message = "RabbitQueue: #{QUEUE_FARMER}, Now: #{DateTime.now}, Harvester:#{harvester_uuid}, Change:#{crop_change_uuid} crop_number: #{crop_number}, key: #{primary_key}, transaction_type: #{transaction_type})"
|
68
|
+
|
69
|
+
if @log.level == Logger::DEBUG
|
70
|
+
message += ", value: #{value.to_json}, old_value: #{old_value.to_json}"
|
71
|
+
@log.debug message
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
# Flush any data if needed.
|
76
|
+
#
|
77
|
+
def flush
|
78
|
+
end
|
79
|
+
|
80
|
+
private
|
81
|
+
|
82
|
+
# Raise an exception here if the key must conform to a specific format
|
83
|
+
#
|
84
|
+
def check_key(primary_key)
|
85
|
+
# Example: raise "key must be a string object" unless key.is_a? String
|
86
|
+
primary_key
|
87
|
+
end
|
88
|
+
|
89
|
+
end
|
90
|
+
|
91
|
+
# AddQueue
|
92
|
+
#
|
93
|
+
class AddQueue < Queue
|
94
|
+
end
|
95
|
+
|
96
|
+
# ChangeQueue
|
97
|
+
#
|
98
|
+
class ChangeQueue < Queue
|
99
|
+
end
|
100
|
+
|
101
|
+
# DeleteQueue
|
102
|
+
#
|
103
|
+
class DeleteQueue < Queue
|
104
|
+
end
|
105
|
+
|
106
|
+
end
|
@@ -5,10 +5,6 @@ require "resque"
|
|
5
5
|
|
6
6
|
module BarnyardHarvester
|
7
7
|
|
8
|
-
class HarvesterLogs
|
9
|
-
@queue = :logs_harvester
|
10
|
-
end
|
11
|
-
|
12
8
|
class Barn
|
13
9
|
|
14
10
|
def initialize(args)
|
@@ -20,7 +16,7 @@ module BarnyardHarvester
|
|
20
16
|
|
21
17
|
@redis_settings.delete(:db)
|
22
18
|
|
23
|
-
Resque.redis = Redis.new(@redis_settings)
|
19
|
+
#Resque.redis = Redis.new(@redis_settings)
|
24
20
|
|
25
21
|
# This sets the database number for redis to store the cached data
|
26
22
|
@redis_settings[:db] = args[:crop_number]
|
@@ -30,16 +26,6 @@ module BarnyardHarvester
|
|
30
26
|
|
31
27
|
end
|
32
28
|
|
33
|
-
def log_run(harvester_uuid, crop_number, began_at, ended_at, source_count, change_count, add_count, delete_count)
|
34
|
-
|
35
|
-
begin
|
36
|
-
Resque.enqueue(HarvesterLogs, Time.now, harvester_uuid, crop_number, began_at, ended_at, source_count, change_count, add_count, delete_count)
|
37
|
-
rescue Exception => e
|
38
|
-
logger.fatal "#{self.class} Fail in Resque.enqueue of HarvesterLogs. #{e.backtrace}"
|
39
|
-
end
|
40
|
-
|
41
|
-
end
|
42
|
-
|
43
29
|
def delete(primary_key)
|
44
30
|
check_key primary_key
|
45
31
|
|
@@ -4,21 +4,37 @@ module BarnyardHarvester
|
|
4
4
|
@queue = :logs_change
|
5
5
|
end
|
6
6
|
|
7
|
+
class HarvesterLogs
|
8
|
+
@queue = :logs_harvester
|
9
|
+
end
|
10
|
+
|
11
|
+
class DeliveryLogs
|
12
|
+
@queue = :logs_delivery
|
13
|
+
end
|
14
|
+
|
15
|
+
class TransactionLogs
|
16
|
+
@queue = :logs_transaction
|
17
|
+
end
|
7
18
|
class Queue
|
8
19
|
|
9
|
-
class
|
20
|
+
class Enqueue
|
10
21
|
def initialize(queue, queued_at, harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value, old_value)
|
11
22
|
Resque.enqueue(queue, queued_at, harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value, old_value)
|
12
|
-
Resque.enqueue(ChangeLogs,queued_at, harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value, old_value)
|
23
|
+
Resque.enqueue(ChangeLogs, queued_at, harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value, old_value)
|
13
24
|
end
|
14
25
|
end
|
15
26
|
|
27
|
+
def log_run(harvester_uuid, crop_number, began_at, ended_at, source_count, change_count, add_count, delete_count)
|
28
|
+
|
29
|
+
Resque.enqueue(HarvesterLogs, harvester_uuid, crop_number, began_at, ended_at, source_count, change_count, add_count, delete_count)
|
30
|
+
|
31
|
+
end
|
32
|
+
|
16
33
|
def initialize(args)
|
17
34
|
|
18
35
|
@debug = args.fetch(:debug) { false }
|
19
36
|
@log = args.fetch(:logger) { Logger.new(STDOUT) }
|
20
|
-
|
21
|
-
raise "arguments must contain :crop_number => some_integer" if args[:crop_number].nil?
|
37
|
+
@crop_number = args.fetch(:crop_number) { raise "You must provide :crop_number" }
|
22
38
|
|
23
39
|
resque_class_name = "Distribute"
|
24
40
|
|
@@ -30,17 +46,18 @@ module BarnyardHarvester
|
|
30
46
|
rescue
|
31
47
|
# Set the queue name to this apol_harvester's id prefixed with a Q_
|
32
48
|
#Object.const_set(resque_class_name, Class.new { @queue = "Q_#{args[:crop_number]}"})
|
33
|
-
Object.const_set(resque_class_name, Class.new { @queue =
|
49
|
+
Object.const_set(resque_class_name, Class.new { @queue = "Farmer" })
|
34
50
|
end
|
35
51
|
|
36
52
|
@resque_queue = Object.const_get(resque_class_name)
|
37
53
|
|
38
54
|
end
|
39
55
|
|
56
|
+
|
40
57
|
def push(harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value, old_value=Hash.new)
|
41
58
|
check_key primary_key
|
42
59
|
|
43
|
-
|
60
|
+
Enqueue.new(@resque_queue, DateTime.now, harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value.to_json, old_value.to_json)
|
44
61
|
|
45
62
|
message = "RedisQueue: #{@resque_queue}, Now: #{DateTime.now}, Harvester:#{harvester_uuid}, Change:#{crop_change_uuid} crop_number: #{crop_number}, key: #{primary_key}, transaction_type: #{transaction_type})"
|
46
63
|
|
@@ -0,0 +1,112 @@
|
|
1
|
+
module BarnyardHarvester
|
2
|
+
|
3
|
+
require "aws-sdk"
|
4
|
+
|
5
|
+
QUEUE_FARMER = "barnyard-farmer"
|
6
|
+
QUEUE_HARVESTER = "barnyard-harvests"
|
7
|
+
QUEUE_TRANSACTION = "barnyard-transactions"
|
8
|
+
QUEUE_CHANGE = "barnyard-changes"
|
9
|
+
|
10
|
+
class Queue
|
11
|
+
|
12
|
+
def enqueue(queued_at, harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value, old_value)
|
13
|
+
|
14
|
+
payload = Hash.new
|
15
|
+
payload[:queued_at] = queued_at
|
16
|
+
payload[:harvester_uuid] = harvester_uuid
|
17
|
+
payload[:crop_change_uuid] = crop_change_uuid
|
18
|
+
payload[:crop_number] = crop_number
|
19
|
+
payload[:primary_key] = primary_key
|
20
|
+
payload[:transaction_type] = transaction_type
|
21
|
+
payload[:value] = value
|
22
|
+
payload[:old_value] = old_value
|
23
|
+
|
24
|
+
json_payload = payload.to_json
|
25
|
+
|
26
|
+
@log.debug "Starting send_message to farmer..."
|
27
|
+
@farmer_queue.send_message(json_payload)
|
28
|
+
@log.debug "Done send_message to farmer..."
|
29
|
+
|
30
|
+
@log.debug "Starting send_message to changes..."
|
31
|
+
@change_queue.send_message(json_payload)
|
32
|
+
@log.debug "Done send_message to changes..."
|
33
|
+
|
34
|
+
end
|
35
|
+
|
36
|
+
def log_run(harvester_uuid, crop_number, began_at, ended_at, source_count, change_count, add_count, delete_count)
|
37
|
+
|
38
|
+
payload = Hash.new
|
39
|
+
payload[:time] = Time.now
|
40
|
+
payload[:harvester_uuid] = harvester_uuid
|
41
|
+
payload[:crop_number] = crop_number
|
42
|
+
payload[:began_at] = began_at
|
43
|
+
payload[:ended_at] = ended_at
|
44
|
+
payload[:source_count] = source_count
|
45
|
+
payload[:change_count] = change_count
|
46
|
+
payload[:add_count] = add_count
|
47
|
+
payload[:delete_count] = delete_count
|
48
|
+
|
49
|
+
@harvester_queue.send_message(payload.to_json)
|
50
|
+
|
51
|
+
end
|
52
|
+
|
53
|
+
def initialize(args)
|
54
|
+
|
55
|
+
@debug = args.fetch(:debug) { false }
|
56
|
+
@log = args.fetch(:logger) { Logger.new(STDOUT) }
|
57
|
+
@crop_number = args.fetch(:crop_number) { raise "You must provide :crop_number" }
|
58
|
+
@sqs_settings = args.fetch(:sqs_settings) { raise "You must provide :sqs_settings" }
|
59
|
+
|
60
|
+
@sqs = AWS::SQS.new(@sqs_settings)
|
61
|
+
|
62
|
+
@farmer_queue = @sqs.queues.create(QUEUE_FARMER)
|
63
|
+
@harvester_queue = @sqs.queues.create(QUEUE_HARVESTER)
|
64
|
+
@change_queue = @sqs.queues.create(QUEUE_CHANGE)
|
65
|
+
|
66
|
+
end
|
67
|
+
|
68
|
+
def push(harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value, old_value=Hash.new)
|
69
|
+
check_key primary_key
|
70
|
+
|
71
|
+
enqueue(DateTime.now, harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value.to_json, old_value.to_json)
|
72
|
+
|
73
|
+
message = "SQS: #{QUEUE_FARMER}, Now: #{DateTime.now}, Harvester:#{harvester_uuid}, Change:#{crop_change_uuid} crop_number: #{crop_number}, key: #{primary_key}, transaction_type: #{transaction_type})"
|
74
|
+
|
75
|
+
if @log.level == Logger::DEBUG
|
76
|
+
message += ", value: #{value.to_json}, old_value: #{old_value.to_json}"
|
77
|
+
@log.debug message
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
# Flush any data if needed.
|
82
|
+
#
|
83
|
+
def flush
|
84
|
+
end
|
85
|
+
|
86
|
+
private
|
87
|
+
|
88
|
+
# Raise an exception here if the key must conform to a specific format
|
89
|
+
#
|
90
|
+
def check_key(primary_key)
|
91
|
+
# Example: raise "key must be a string object" unless key.is_a? String
|
92
|
+
primary_key
|
93
|
+
end
|
94
|
+
|
95
|
+
end
|
96
|
+
|
97
|
+
# AddQueue
|
98
|
+
#
|
99
|
+
class AddQueue < Queue
|
100
|
+
end
|
101
|
+
|
102
|
+
# ChangeQueue
|
103
|
+
#
|
104
|
+
class ChangeQueue < Queue
|
105
|
+
end
|
106
|
+
|
107
|
+
# DeleteQueue
|
108
|
+
#
|
109
|
+
class DeleteQueue < Queue
|
110
|
+
end
|
111
|
+
|
112
|
+
end
|
@@ -0,0 +1,180 @@
|
|
1
|
+
require "barnyard_harvester"
|
2
|
+
require "yaml"
|
3
|
+
require "logger"
|
4
|
+
require "json"
|
5
|
+
require "aws-sdk"
|
6
|
+
|
7
|
+
CROP_NUMBER = 1
|
8
|
+
|
9
|
+
DEFAULT_DYNAMODB_SETTINGS = {
|
10
|
+
:dynamo_db_endpoint => "dynamodb.us-west-1.amazonaws.com",
|
11
|
+
:access_key_id => ENV["AWS_ACCESS_KEY_ID"],
|
12
|
+
:secret_access_key => ENV["AWS_SECRET_ACCESS_KEY"]
|
13
|
+
}
|
14
|
+
|
15
|
+
SQS_SETTINGS = {
|
16
|
+
:sqs_endpoint => "sqs.us-west-1.amazonaws.com",
|
17
|
+
:access_key_id => ENV["AWS_ACCESS_KEY_ID"],
|
18
|
+
:secret_access_key => ENV["AWS_SECRET_ACCESS_KEY"]
|
19
|
+
}
|
20
|
+
|
21
|
+
describe BarnyardHarvester do
|
22
|
+
|
23
|
+
def load_and_process_file(file, backend)
|
24
|
+
|
25
|
+
data = YAML::load_file file
|
26
|
+
|
27
|
+
my_logger = Logger.new(STDOUT)
|
28
|
+
my_logger.level = Logger::DEBUG
|
29
|
+
|
30
|
+
h = BarnyardHarvester::Sync.new(:queueing => :sqs,
|
31
|
+
:sqs_settings => SQS_SETTINGS,
|
32
|
+
:backend => backend,
|
33
|
+
:debug => true,
|
34
|
+
:crop_number => CROP_NUMBER,
|
35
|
+
:dynamodb_settings => DEFAULT_DYNAMODB_SETTINGS,
|
36
|
+
:logger => my_logger)
|
37
|
+
|
38
|
+
h.run do
|
39
|
+
data.each do |primary_key, value|
|
40
|
+
h.process primary_key, value
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
h
|
45
|
+
end
|
46
|
+
|
47
|
+
def get_table
|
48
|
+
table_name = "barnyard_crop_id-#{CROP_NUMBER}"
|
49
|
+
db = AWS::DynamoDB.new(DEFAULT_DYNAMODB_SETTINGS)
|
50
|
+
|
51
|
+
begin
|
52
|
+
table = db.tables.create(table_name, 10, 5)
|
53
|
+
sleep 1 while table.status == :creating
|
54
|
+
table.hash_key = [:id, :string]
|
55
|
+
puts table.status
|
56
|
+
rescue AWS::DynamoDB::Errors::ResourceInUseException
|
57
|
+
table = db.tables[table_name]
|
58
|
+
table.hash_key = [:id, :string]
|
59
|
+
end
|
60
|
+
table
|
61
|
+
end
|
62
|
+
|
63
|
+
def flush
|
64
|
+
|
65
|
+
get_table.items.each do |i|
|
66
|
+
i.delete
|
67
|
+
end
|
68
|
+
|
69
|
+
end
|
70
|
+
|
71
|
+
before(:each) do
|
72
|
+
|
73
|
+
flush
|
74
|
+
|
75
|
+
@crop_number = 1
|
76
|
+
|
77
|
+
file = "spec/fixtures/data-init.yml"
|
78
|
+
|
79
|
+
data = YAML::load_file file
|
80
|
+
|
81
|
+
h = load_and_process_file(file, :dynamodb)
|
82
|
+
|
83
|
+
h.add_count.should eq(data.count)
|
84
|
+
h.delete_count.should eq(0)
|
85
|
+
h.change_count.should eq(0)
|
86
|
+
h.source_count.should eq(data.count)
|
87
|
+
h.cache_count.should eq(data.count)
|
88
|
+
|
89
|
+
end
|
90
|
+
|
91
|
+
it "test initial load of records" do
|
92
|
+
|
93
|
+
data = YAML::load_file "spec/fixtures/data-init.yml"
|
94
|
+
|
95
|
+
data.each do |primary_key, value|
|
96
|
+
value.to_json.should eq(get_table.items.where("id" => primary_key).first.attributes['value'])
|
97
|
+
end
|
98
|
+
|
99
|
+
end
|
100
|
+
|
101
|
+
it "test add one record" do
|
102
|
+
|
103
|
+
file = "spec/fixtures/data-add.yml"
|
104
|
+
data = YAML::load_file file
|
105
|
+
|
106
|
+
h = load_and_process_file(file, :dynamodb)
|
107
|
+
|
108
|
+
h.add_count.should eq(1)
|
109
|
+
h.delete_count.should eq(0)
|
110
|
+
h.change_count.should eq(0)
|
111
|
+
h.source_count.should eq(data.count)
|
112
|
+
h.cache_count.should eq(data.count)
|
113
|
+
|
114
|
+
# h.log_run("#{file}-#{Random.rand(100)}", @crop_number, Time.now, Time.now, h.source_count, h.change_count, h.add_count, h.delete_count)
|
115
|
+
|
116
|
+
end
|
117
|
+
|
118
|
+
it "test change one record" do
|
119
|
+
|
120
|
+
file = "spec/fixtures/data-change.yml"
|
121
|
+
|
122
|
+
data = YAML::load_file file
|
123
|
+
|
124
|
+
h = load_and_process_file(file, :dynamodb)
|
125
|
+
|
126
|
+
h.add_count.should eq(0)
|
127
|
+
h.delete_count.should eq(0)
|
128
|
+
h.change_count.should eq(1)
|
129
|
+
h.source_count.should eq(data.count)
|
130
|
+
h.cache_count.should eq(data.count)
|
131
|
+
|
132
|
+
# h.my_barn.log_run("#{file}-#{Random.rand(100)}", @crop_number, Time.now, Time.now, h.source_count, h.change_count, h.add_count, h.delete_count)
|
133
|
+
|
134
|
+
end
|
135
|
+
|
136
|
+
it "test delete one record" do
|
137
|
+
|
138
|
+
file = "spec/fixtures/data-delete.yml"
|
139
|
+
|
140
|
+
data = YAML::load_file file
|
141
|
+
|
142
|
+
h = load_and_process_file(file, :dynamodb)
|
143
|
+
|
144
|
+
|
145
|
+
h.add_count.should eq(0)
|
146
|
+
h.delete_count.should eq(1)
|
147
|
+
h.change_count.should eq(0)
|
148
|
+
h.source_count.should eq(data.count)
|
149
|
+
h.cache_count.should eq(data.count + 1)
|
150
|
+
|
151
|
+
# h.my_barn.log_run("#{file}-#{Random.rand(100)}", @crop_number, Time.now, Time.now, h.source_count, h.change_count, h.add_count, h.delete_count)
|
152
|
+
|
153
|
+
end
|
154
|
+
|
155
|
+
it "test delete all records and add one" do
|
156
|
+
|
157
|
+
init_file = "spec/fixtures/data-init.yml"
|
158
|
+
init_data = YAML::load_file init_file
|
159
|
+
|
160
|
+
file = "spec/fixtures/data-delete-all-records-add-one.yml"
|
161
|
+
#data = YAML::load_file file
|
162
|
+
|
163
|
+
h = load_and_process_file(file, :dynamodb)
|
164
|
+
|
165
|
+
h.add_count.should eq(1)
|
166
|
+
h.delete_count.should eq(5)
|
167
|
+
h.change_count.should eq(0)
|
168
|
+
h.source_count.should eq(1)
|
169
|
+
h.cache_count.should eq(init_data.count + 1)
|
170
|
+
|
171
|
+
# h.my_barn.log_run("#{file}-#{Random.rand(100)}", @crop_number, Time.now, Time.now, h.source_count, h.change_count, h.add_count, h.delete_count)
|
172
|
+
|
173
|
+
end
|
174
|
+
|
175
|
+
|
176
|
+
after(:each) do
|
177
|
+
end
|
178
|
+
|
179
|
+
|
180
|
+
end
|
data/spec/hash_spec.rb
CHANGED
@@ -22,7 +22,12 @@ describe BarnyardHarvester do
|
|
22
22
|
my_logger = Logger.new(STDOUT)
|
23
23
|
my_logger.level = Logger::INFO
|
24
24
|
|
25
|
-
h = BarnyardHarvester::Sync.new(:
|
25
|
+
h = BarnyardHarvester::Sync.new(:queueing => :resque,
|
26
|
+
:backend => backend,
|
27
|
+
:debug => false,
|
28
|
+
:crop_number => 1,
|
29
|
+
:redis_settings => redis_settings,
|
30
|
+
:logger => my_logger)
|
26
31
|
|
27
32
|
h.run do
|
28
33
|
data.each do |primary_key, value|
|
data/spec/mongo_spec.rb
CHANGED
@@ -29,7 +29,7 @@ MONGODB_REPLICA_SET_SETTINGS = {
|
|
29
29
|
:collection => "test_collection"
|
30
30
|
}
|
31
31
|
|
32
|
-
$mongo_settings =
|
32
|
+
$mongo_settings = MONGODB_SETTINGS
|
33
33
|
|
34
34
|
describe BarnyardHarvester do
|
35
35
|
|
@@ -40,7 +40,13 @@ describe BarnyardHarvester do
|
|
40
40
|
my_logger = Logger.new(STDOUT)
|
41
41
|
my_logger.level = Logger::INFO
|
42
42
|
|
43
|
-
h = BarnyardHarvester::Sync.new(:backend => backend,
|
43
|
+
h = BarnyardHarvester::Sync.new(:backend => backend,
|
44
|
+
:queueing => :resque,
|
45
|
+
:debug => false,
|
46
|
+
:mongodb_settings => $mongo_settings,
|
47
|
+
:crop_number => CROP_NUMBER,
|
48
|
+
:redis_settings => REDIS_SETTINGS,
|
49
|
+
:logger => my_logger)
|
44
50
|
|
45
51
|
h.run do
|
46
52
|
data.each do |primary_key, value|
|
@@ -119,6 +125,8 @@ describe BarnyardHarvester do
|
|
119
125
|
h.source_count.should eq(data.count)
|
120
126
|
h.cache_count.should eq(data.count)
|
121
127
|
|
128
|
+
h.my_barn.log_run("#{file}-#{Random.rand(100)}", @crop_number, Time.now, Time.now, h.source_count, h.change_count, h.add_count, h.delete_count)
|
129
|
+
|
122
130
|
end
|
123
131
|
|
124
132
|
it "test change one record" do
|
@@ -135,6 +143,8 @@ describe BarnyardHarvester do
|
|
135
143
|
h.source_count.should eq(data.count)
|
136
144
|
h.cache_count.should eq(data.count)
|
137
145
|
|
146
|
+
h.my_barn.log_run("#{file}-#{Random.rand(100)}", @crop_number, Time.now, Time.now, h.source_count, h.change_count, h.add_count, h.delete_count)
|
147
|
+
|
138
148
|
end
|
139
149
|
|
140
150
|
it "test delete one record" do
|
@@ -145,13 +155,14 @@ describe BarnyardHarvester do
|
|
145
155
|
|
146
156
|
h = load_and_process_file(file, :mongodb)
|
147
157
|
|
148
|
-
|
149
158
|
h.add_count.should eq(0)
|
150
159
|
h.delete_count.should eq(1)
|
151
160
|
h.change_count.should eq(0)
|
152
161
|
h.source_count.should eq(data.count)
|
153
162
|
h.cache_count.should eq(data.count + 1)
|
154
163
|
|
164
|
+
h.my_barn.log_run("#{file}-#{Random.rand(100)}", @crop_number, Time.now, Time.now, h.source_count, h.change_count, h.add_count, h.delete_count)
|
165
|
+
|
155
166
|
end
|
156
167
|
|
157
168
|
it "test delete all records and add one" do
|
@@ -170,6 +181,8 @@ describe BarnyardHarvester do
|
|
170
181
|
h.source_count.should eq(1)
|
171
182
|
h.cache_count.should eq(init_data.count + 1)
|
172
183
|
|
184
|
+
h.my_barn.log_run("#{file}-#{Random.rand(100)}", @crop_number, Time.now, Time.now, h.source_count, h.change_count, h.add_count, h.delete_count)
|
185
|
+
|
173
186
|
end
|
174
187
|
|
175
188
|
|
@@ -0,0 +1,162 @@
|
|
1
|
+
require "barnyard_harvester"
|
2
|
+
require "yaml"
|
3
|
+
require "redis"
|
4
|
+
require "logger"
|
5
|
+
require "json"
|
6
|
+
|
7
|
+
CROP_NUMBER = 1
|
8
|
+
|
9
|
+
REDIS_SETTINGS = {
|
10
|
+
:host => "localhost",
|
11
|
+
:port => 6379,
|
12
|
+
:db => CROP_NUMBER
|
13
|
+
}
|
14
|
+
|
15
|
+
RABBITMQ_SETTINGS = {
|
16
|
+
:host => "localhost"
|
17
|
+
# :port => 6163
|
18
|
+
}
|
19
|
+
|
20
|
+
describe BarnyardHarvester do
|
21
|
+
|
22
|
+
def load_and_process_file(file, backend)
|
23
|
+
|
24
|
+
data = YAML::load_file file
|
25
|
+
|
26
|
+
my_logger = Logger.new(STDOUT)
|
27
|
+
my_logger.level = Logger::INFO
|
28
|
+
|
29
|
+
h = BarnyardHarvester::Sync.new(:queueing => :rabbitmq, :rabbitmq_settings => RABBITMQ_SETTINGS, :backend => backend, :debug => false, :crop_number => CROP_NUMBER, :redis_settings => REDIS_SETTINGS, :logger => my_logger)
|
30
|
+
|
31
|
+
h.run do
|
32
|
+
data.each do |primary_key, value|
|
33
|
+
h.process primary_key, value
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
h
|
38
|
+
end
|
39
|
+
|
40
|
+
def flush
|
41
|
+
|
42
|
+
r = Redis.new(REDIS_SETTINGS)
|
43
|
+
|
44
|
+
r.keys.each do |k|
|
45
|
+
r.del k
|
46
|
+
#puts "deleted #{k}"
|
47
|
+
end
|
48
|
+
|
49
|
+
end
|
50
|
+
|
51
|
+
before(:each) do
|
52
|
+
|
53
|
+
flush
|
54
|
+
|
55
|
+
@crop_number = 1
|
56
|
+
|
57
|
+
file = "spec/fixtures/data-init.yml"
|
58
|
+
|
59
|
+
data = YAML::load_file file
|
60
|
+
|
61
|
+
h = load_and_process_file(file, :redis)
|
62
|
+
|
63
|
+
h.add_count.should eq(data.count)
|
64
|
+
h.delete_count.should eq(0)
|
65
|
+
h.change_count.should eq(0)
|
66
|
+
h.source_count.should eq(data.count)
|
67
|
+
h.cache_count.should eq(data.count)
|
68
|
+
|
69
|
+
end
|
70
|
+
|
71
|
+
it "test initial load of records" do
|
72
|
+
|
73
|
+
data = YAML::load_file "spec/fixtures/data-init.yml"
|
74
|
+
|
75
|
+
redis = Redis.new(REDIS_SETTINGS)
|
76
|
+
|
77
|
+
data.each do |primary_key, value|
|
78
|
+
value.to_json.should eq(redis.get(primary_key))
|
79
|
+
end
|
80
|
+
|
81
|
+
end
|
82
|
+
|
83
|
+
it "test add one record" do
|
84
|
+
|
85
|
+
file = "spec/fixtures/data-add.yml"
|
86
|
+
data = YAML::load_file file
|
87
|
+
|
88
|
+
h = load_and_process_file(file, :redis)
|
89
|
+
|
90
|
+
h.add_count.should eq(1)
|
91
|
+
h.delete_count.should eq(0)
|
92
|
+
h.change_count.should eq(0)
|
93
|
+
h.source_count.should eq(data.count)
|
94
|
+
h.cache_count.should eq(data.count)
|
95
|
+
|
96
|
+
# h.log_run("#{file}-#{Random.rand(100)}", @crop_number, Time.now, Time.now, h.source_count, h.change_count, h.add_count, h.delete_count)
|
97
|
+
|
98
|
+
end
|
99
|
+
|
100
|
+
it "test change one record" do
|
101
|
+
|
102
|
+
file = "spec/fixtures/data-change.yml"
|
103
|
+
|
104
|
+
data = YAML::load_file file
|
105
|
+
|
106
|
+
h = load_and_process_file(file, :redis)
|
107
|
+
|
108
|
+
h.add_count.should eq(0)
|
109
|
+
h.delete_count.should eq(0)
|
110
|
+
h.change_count.should eq(1)
|
111
|
+
h.source_count.should eq(data.count)
|
112
|
+
h.cache_count.should eq(data.count)
|
113
|
+
|
114
|
+
# h.my_barn.log_run("#{file}-#{Random.rand(100)}", @crop_number, Time.now, Time.now, h.source_count, h.change_count, h.add_count, h.delete_count)
|
115
|
+
|
116
|
+
end
|
117
|
+
|
118
|
+
it "test delete one record" do
|
119
|
+
|
120
|
+
file = "spec/fixtures/data-delete.yml"
|
121
|
+
|
122
|
+
data = YAML::load_file file
|
123
|
+
|
124
|
+
h = load_and_process_file(file, :redis)
|
125
|
+
|
126
|
+
|
127
|
+
h.add_count.should eq(0)
|
128
|
+
h.delete_count.should eq(1)
|
129
|
+
h.change_count.should eq(0)
|
130
|
+
h.source_count.should eq(data.count)
|
131
|
+
h.cache_count.should eq(data.count + 1)
|
132
|
+
|
133
|
+
# h.my_barn.log_run("#{file}-#{Random.rand(100)}", @crop_number, Time.now, Time.now, h.source_count, h.change_count, h.add_count, h.delete_count)
|
134
|
+
|
135
|
+
end
|
136
|
+
|
137
|
+
it "test delete all records and add one" do
|
138
|
+
|
139
|
+
init_file = "spec/fixtures/data-init.yml"
|
140
|
+
init_data = YAML::load_file init_file
|
141
|
+
|
142
|
+
file = "spec/fixtures/data-delete-all-records-add-one.yml"
|
143
|
+
#data = YAML::load_file file
|
144
|
+
|
145
|
+
h = load_and_process_file(file, :redis)
|
146
|
+
|
147
|
+
h.add_count.should eq(1)
|
148
|
+
h.delete_count.should eq(5)
|
149
|
+
h.change_count.should eq(0)
|
150
|
+
h.source_count.should eq(1)
|
151
|
+
h.cache_count.should eq(init_data.count + 1)
|
152
|
+
|
153
|
+
# h.my_barn.log_run("#{file}-#{Random.rand(100)}", @crop_number, Time.now, Time.now, h.source_count, h.change_count, h.add_count, h.delete_count)
|
154
|
+
|
155
|
+
end
|
156
|
+
|
157
|
+
|
158
|
+
after(:each) do
|
159
|
+
end
|
160
|
+
|
161
|
+
|
162
|
+
end
|
@@ -0,0 +1,170 @@
|
|
1
|
+
require "barnyard_harvester"
|
2
|
+
require "yaml"
|
3
|
+
require "redis"
|
4
|
+
require "logger"
|
5
|
+
require "json"
|
6
|
+
|
7
|
+
CROP_NUMBER = 1
|
8
|
+
|
9
|
+
REDIS_SETTINGS = {
|
10
|
+
:host => "localhost",
|
11
|
+
:port => 6379,
|
12
|
+
:db => CROP_NUMBER
|
13
|
+
}
|
14
|
+
|
15
|
+
|
16
|
+
SQS_SETTINGS = {
|
17
|
+
:sqs_endpoint => "sqs.us-west-1.amazonaws.com",
|
18
|
+
:access_key_id => ENV["AWS_ACCESS_KEY_ID"],
|
19
|
+
:secret_access_key => ENV["AWS_SECRET_ACCESS_KEY"]
|
20
|
+
}
|
21
|
+
|
22
|
+
describe BarnyardHarvester do
|
23
|
+
|
24
|
+
def load_and_process_file(file, backend)
|
25
|
+
|
26
|
+
data = YAML::load_file file
|
27
|
+
|
28
|
+
my_logger = Logger.new(STDOUT)
|
29
|
+
my_logger.level = Logger::DEBUG
|
30
|
+
|
31
|
+
h = BarnyardHarvester::Sync.new(:queueing => :sqs,
|
32
|
+
:sqs_settings => SQS_SETTINGS,
|
33
|
+
:backend => backend,
|
34
|
+
:debug => true,
|
35
|
+
:crop_number => CROP_NUMBER,
|
36
|
+
:redis_settings => REDIS_SETTINGS,
|
37
|
+
:logger => my_logger)
|
38
|
+
|
39
|
+
h.run do
|
40
|
+
data.each do |primary_key, value|
|
41
|
+
h.process primary_key, value
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
h
|
46
|
+
end
|
47
|
+
|
48
|
+
def flush
|
49
|
+
|
50
|
+
r = Redis.new(REDIS_SETTINGS)
|
51
|
+
|
52
|
+
r.keys.each do |k|
|
53
|
+
r.del k
|
54
|
+
#puts "deleted #{k}"
|
55
|
+
end
|
56
|
+
|
57
|
+
end
|
58
|
+
|
59
|
+
before(:each) do
|
60
|
+
|
61
|
+
flush
|
62
|
+
|
63
|
+
@crop_number = 1
|
64
|
+
|
65
|
+
file = "spec/fixtures/data-init.yml"
|
66
|
+
|
67
|
+
data = YAML::load_file file
|
68
|
+
|
69
|
+
h = load_and_process_file(file, :redis)
|
70
|
+
|
71
|
+
h.add_count.should eq(data.count)
|
72
|
+
h.delete_count.should eq(0)
|
73
|
+
h.change_count.should eq(0)
|
74
|
+
h.source_count.should eq(data.count)
|
75
|
+
h.cache_count.should eq(data.count)
|
76
|
+
|
77
|
+
end
|
78
|
+
|
79
|
+
it "test initial load of records" do
|
80
|
+
|
81
|
+
data = YAML::load_file "spec/fixtures/data-init.yml"
|
82
|
+
|
83
|
+
redis = Redis.new(REDIS_SETTINGS)
|
84
|
+
|
85
|
+
data.each do |primary_key, value|
|
86
|
+
value.to_json.should eq(redis.get(primary_key))
|
87
|
+
end
|
88
|
+
|
89
|
+
end
|
90
|
+
|
91
|
+
it "test add one record" do
|
92
|
+
|
93
|
+
file = "spec/fixtures/data-add.yml"
|
94
|
+
data = YAML::load_file file
|
95
|
+
|
96
|
+
h = load_and_process_file(file, :redis)
|
97
|
+
|
98
|
+
h.add_count.should eq(1)
|
99
|
+
h.delete_count.should eq(0)
|
100
|
+
h.change_count.should eq(0)
|
101
|
+
h.source_count.should eq(data.count)
|
102
|
+
h.cache_count.should eq(data.count)
|
103
|
+
|
104
|
+
# h.log_run("#{file}-#{Random.rand(100)}", @crop_number, Time.now, Time.now, h.source_count, h.change_count, h.add_count, h.delete_count)
|
105
|
+
|
106
|
+
end
|
107
|
+
|
108
|
+
it "test change one record" do
|
109
|
+
|
110
|
+
file = "spec/fixtures/data-change.yml"
|
111
|
+
|
112
|
+
data = YAML::load_file file
|
113
|
+
|
114
|
+
h = load_and_process_file(file, :redis)
|
115
|
+
|
116
|
+
h.add_count.should eq(0)
|
117
|
+
h.delete_count.should eq(0)
|
118
|
+
h.change_count.should eq(1)
|
119
|
+
h.source_count.should eq(data.count)
|
120
|
+
h.cache_count.should eq(data.count)
|
121
|
+
|
122
|
+
# h.my_barn.log_run("#{file}-#{Random.rand(100)}", @crop_number, Time.now, Time.now, h.source_count, h.change_count, h.add_count, h.delete_count)
|
123
|
+
|
124
|
+
end
|
125
|
+
|
126
|
+
it "test delete one record" do
|
127
|
+
|
128
|
+
file = "spec/fixtures/data-delete.yml"
|
129
|
+
|
130
|
+
data = YAML::load_file file
|
131
|
+
|
132
|
+
h = load_and_process_file(file, :redis)
|
133
|
+
|
134
|
+
|
135
|
+
h.add_count.should eq(0)
|
136
|
+
h.delete_count.should eq(1)
|
137
|
+
h.change_count.should eq(0)
|
138
|
+
h.source_count.should eq(data.count)
|
139
|
+
h.cache_count.should eq(data.count + 1)
|
140
|
+
|
141
|
+
# h.my_barn.log_run("#{file}-#{Random.rand(100)}", @crop_number, Time.now, Time.now, h.source_count, h.change_count, h.add_count, h.delete_count)
|
142
|
+
|
143
|
+
end
|
144
|
+
|
145
|
+
it "test delete all records and add one" do
|
146
|
+
|
147
|
+
init_file = "spec/fixtures/data-init.yml"
|
148
|
+
init_data = YAML::load_file init_file
|
149
|
+
|
150
|
+
file = "spec/fixtures/data-delete-all-records-add-one.yml"
|
151
|
+
#data = YAML::load_file file
|
152
|
+
|
153
|
+
h = load_and_process_file(file, :redis)
|
154
|
+
|
155
|
+
h.add_count.should eq(1)
|
156
|
+
h.delete_count.should eq(5)
|
157
|
+
h.change_count.should eq(0)
|
158
|
+
h.source_count.should eq(1)
|
159
|
+
h.cache_count.should eq(init_data.count + 1)
|
160
|
+
|
161
|
+
# h.my_barn.log_run("#{file}-#{Random.rand(100)}", @crop_number, Time.now, Time.now, h.source_count, h.change_count, h.add_count, h.delete_count)
|
162
|
+
|
163
|
+
end
|
164
|
+
|
165
|
+
|
166
|
+
after(:each) do
|
167
|
+
end
|
168
|
+
|
169
|
+
|
170
|
+
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: barnyard_harvester
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.7
|
5
5
|
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
@@ -9,11 +9,11 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2012-11-
|
12
|
+
date: 2012-11-26 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: rspec
|
16
|
-
requirement: &
|
16
|
+
requirement: &70277157472920 !ruby/object:Gem::Requirement
|
17
17
|
none: false
|
18
18
|
requirements:
|
19
19
|
- - ! '>='
|
@@ -21,10 +21,10 @@ dependencies:
|
|
21
21
|
version: '0'
|
22
22
|
type: :development
|
23
23
|
prerelease: false
|
24
|
-
version_requirements: *
|
24
|
+
version_requirements: *70277157472920
|
25
25
|
- !ruby/object:Gem::Dependency
|
26
26
|
name: resque
|
27
|
-
requirement: &
|
27
|
+
requirement: &70277157468160 !ruby/object:Gem::Requirement
|
28
28
|
none: false
|
29
29
|
requirements:
|
30
30
|
- - ! '>='
|
@@ -32,10 +32,10 @@ dependencies:
|
|
32
32
|
version: '0'
|
33
33
|
type: :runtime
|
34
34
|
prerelease: false
|
35
|
-
version_requirements: *
|
35
|
+
version_requirements: *70277157468160
|
36
36
|
- !ruby/object:Gem::Dependency
|
37
37
|
name: crack
|
38
|
-
requirement: &
|
38
|
+
requirement: &70277157454620 !ruby/object:Gem::Requirement
|
39
39
|
none: false
|
40
40
|
requirements:
|
41
41
|
- - ! '>='
|
@@ -43,10 +43,10 @@ dependencies:
|
|
43
43
|
version: '0'
|
44
44
|
type: :runtime
|
45
45
|
prerelease: false
|
46
|
-
version_requirements: *
|
46
|
+
version_requirements: *70277157454620
|
47
47
|
- !ruby/object:Gem::Dependency
|
48
48
|
name: json
|
49
|
-
requirement: &
|
49
|
+
requirement: &70277157449620 !ruby/object:Gem::Requirement
|
50
50
|
none: false
|
51
51
|
requirements:
|
52
52
|
- - ! '>='
|
@@ -54,10 +54,10 @@ dependencies:
|
|
54
54
|
version: '0'
|
55
55
|
type: :runtime
|
56
56
|
prerelease: false
|
57
|
-
version_requirements: *
|
57
|
+
version_requirements: *70277157449620
|
58
58
|
- !ruby/object:Gem::Dependency
|
59
59
|
name: uuid
|
60
|
-
requirement: &
|
60
|
+
requirement: &70277157443200 !ruby/object:Gem::Requirement
|
61
61
|
none: false
|
62
62
|
requirements:
|
63
63
|
- - ! '>='
|
@@ -65,10 +65,10 @@ dependencies:
|
|
65
65
|
version: '0'
|
66
66
|
type: :runtime
|
67
67
|
prerelease: false
|
68
|
-
version_requirements: *
|
68
|
+
version_requirements: *70277157443200
|
69
69
|
- !ruby/object:Gem::Dependency
|
70
70
|
name: bson_ext
|
71
|
-
requirement: &
|
71
|
+
requirement: &70277157439700 !ruby/object:Gem::Requirement
|
72
72
|
none: false
|
73
73
|
requirements:
|
74
74
|
- - =
|
@@ -76,10 +76,10 @@ dependencies:
|
|
76
76
|
version: 1.6.0
|
77
77
|
type: :runtime
|
78
78
|
prerelease: false
|
79
|
-
version_requirements: *
|
79
|
+
version_requirements: *70277157439700
|
80
80
|
- !ruby/object:Gem::Dependency
|
81
81
|
name: mongo
|
82
|
-
requirement: &
|
82
|
+
requirement: &70277157436860 !ruby/object:Gem::Requirement
|
83
83
|
none: false
|
84
84
|
requirements:
|
85
85
|
- - =
|
@@ -87,7 +87,29 @@ dependencies:
|
|
87
87
|
version: 1.6.0
|
88
88
|
type: :runtime
|
89
89
|
prerelease: false
|
90
|
-
version_requirements: *
|
90
|
+
version_requirements: *70277157436860
|
91
|
+
- !ruby/object:Gem::Dependency
|
92
|
+
name: amqp
|
93
|
+
requirement: &70277157435420 !ruby/object:Gem::Requirement
|
94
|
+
none: false
|
95
|
+
requirements:
|
96
|
+
- - ! '>='
|
97
|
+
- !ruby/object:Gem::Version
|
98
|
+
version: '0'
|
99
|
+
type: :runtime
|
100
|
+
prerelease: false
|
101
|
+
version_requirements: *70277157435420
|
102
|
+
- !ruby/object:Gem::Dependency
|
103
|
+
name: aws-sdk
|
104
|
+
requirement: &70277157431160 !ruby/object:Gem::Requirement
|
105
|
+
none: false
|
106
|
+
requirements:
|
107
|
+
- - ! '>='
|
108
|
+
- !ruby/object:Gem::Version
|
109
|
+
version: '0'
|
110
|
+
type: :runtime
|
111
|
+
prerelease: false
|
112
|
+
version_requirements: *70277157431160
|
91
113
|
description: Performs harvests on data sources and detects adds, changes and deletes.
|
92
114
|
email:
|
93
115
|
- supercoder@gmail.com
|
@@ -105,15 +127,18 @@ files:
|
|
105
127
|
- barnyard_harvester.gemspec
|
106
128
|
- lib/.DS_Store
|
107
129
|
- lib/barnyard_harvester.rb
|
130
|
+
- lib/barnyard_harvester/dynamodb.rb
|
108
131
|
- lib/barnyard_harvester/hash.rb
|
109
132
|
- lib/barnyard_harvester/hash_queue.rb
|
110
133
|
- lib/barnyard_harvester/mongodb.rb
|
111
134
|
- lib/barnyard_harvester/mongodb_helper.rb
|
112
|
-
- lib/barnyard_harvester/
|
135
|
+
- lib/barnyard_harvester/rabbitmq_queue.rb
|
113
136
|
- lib/barnyard_harvester/redis.rb
|
114
|
-
- lib/barnyard_harvester/
|
137
|
+
- lib/barnyard_harvester/resque_queue.rb
|
138
|
+
- lib/barnyard_harvester/sqs_queue.rb
|
115
139
|
- lib/barnyard_harvester/version.rb
|
116
140
|
- lib/test.yml
|
141
|
+
- spec/dynamodb_sqs_spec.rb
|
117
142
|
- spec/fixtures/data-add.yml
|
118
143
|
- spec/fixtures/data-change.yml
|
119
144
|
- spec/fixtures/data-delete-all-records-add-one.yml
|
@@ -123,7 +148,9 @@ files:
|
|
123
148
|
- spec/loader_spec.rb
|
124
149
|
- spec/mongo_helper_spec.rb
|
125
150
|
- spec/mongo_spec.rb
|
151
|
+
- spec/redis_rabbitmq_spec.rb
|
126
152
|
- spec/redis_spec.rb
|
153
|
+
- spec/redis_sqs_spec.rb
|
127
154
|
- spec/spec_helper.rb
|
128
155
|
homepage: https://github.com/jongillies/barnyard/tree/master/barnyard_harvester
|
129
156
|
licenses: []
|
@@ -150,6 +177,7 @@ signing_key:
|
|
150
177
|
specification_version: 3
|
151
178
|
summary: Please check the README.md for more information.
|
152
179
|
test_files:
|
180
|
+
- spec/dynamodb_sqs_spec.rb
|
153
181
|
- spec/fixtures/data-add.yml
|
154
182
|
- spec/fixtures/data-change.yml
|
155
183
|
- spec/fixtures/data-delete-all-records-add-one.yml
|
@@ -159,5 +187,7 @@ test_files:
|
|
159
187
|
- spec/loader_spec.rb
|
160
188
|
- spec/mongo_helper_spec.rb
|
161
189
|
- spec/mongo_spec.rb
|
190
|
+
- spec/redis_rabbitmq_spec.rb
|
162
191
|
- spec/redis_spec.rb
|
192
|
+
- spec/redis_sqs_spec.rb
|
163
193
|
- spec/spec_helper.rb
|
@@ -1,84 +0,0 @@
|
|
1
|
-
module BarnyardHarvester
|
2
|
-
|
3
|
-
class ChangeLogs
|
4
|
-
@queue = :logs_change
|
5
|
-
end
|
6
|
-
|
7
|
-
class Queue
|
8
|
-
|
9
|
-
class ResqueQueue
|
10
|
-
def initialize(queue, queued_at, harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value, old_value)
|
11
|
-
Resque.enqueue(queue, queued_at, harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value, old_value)
|
12
|
-
Resque.enqueue(ChangeLogs,queued_at, harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value, old_value)
|
13
|
-
end
|
14
|
-
end
|
15
|
-
|
16
|
-
def initialize(args)
|
17
|
-
|
18
|
-
@debug = args.fetch(:debug) { false }
|
19
|
-
@log = args.fetch(:logger) { Logger.new(STDOUT) }
|
20
|
-
|
21
|
-
raise "arguments must contain :crop_number => some_integer" if args[:crop_number].nil?
|
22
|
-
|
23
|
-
resque_class_name = "Distribute"
|
24
|
-
|
25
|
-
# If the class does not exist, the rescue block will create it.
|
26
|
-
# The Class Queue is inherited by the AddQueue, ChangeQueue and DeleteQueue, but
|
27
|
-
# we only want to create one "resque" queue for this instantiation
|
28
|
-
begin
|
29
|
-
Object.const_get(resque_class_name)
|
30
|
-
rescue
|
31
|
-
# Set the queue name to this apol_harvester's id prefixed with a Q_
|
32
|
-
#Object.const_set(resque_class_name, Class.new { @queue = "Q_#{args[:crop_number]}"})
|
33
|
-
Object.const_set(resque_class_name, Class.new { @queue = "Farmer"})
|
34
|
-
end
|
35
|
-
|
36
|
-
@resque_queue = Object.const_get(resque_class_name)
|
37
|
-
|
38
|
-
end
|
39
|
-
|
40
|
-
def push(harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value, old_value=Hash.new)
|
41
|
-
check_key primary_key
|
42
|
-
|
43
|
-
ResqueQueue.new(@resque_queue, DateTime.now, harvester_uuid, crop_change_uuid, crop_number, primary_key, transaction_type, value.to_json, old_value.to_json)
|
44
|
-
|
45
|
-
message = "RedisQueue: #{@resque_queue}, Now: #{DateTime.now}, Harvester:#{harvester_uuid}, Change:#{crop_change_uuid} crop_number: #{crop_number}, key: #{primary_key}, transaction_type: #{transaction_type})"
|
46
|
-
|
47
|
-
if @log.level == Logger::DEBUG
|
48
|
-
message += ", value: #{value.to_json}, old_value: #{old_value.to_json}"
|
49
|
-
@log.debug message
|
50
|
-
end
|
51
|
-
end
|
52
|
-
|
53
|
-
# Flush any data if needed.
|
54
|
-
#
|
55
|
-
def flush
|
56
|
-
end
|
57
|
-
|
58
|
-
private
|
59
|
-
|
60
|
-
# Raise an exception here if the key must conform to a specific format
|
61
|
-
#
|
62
|
-
def check_key(primary_key)
|
63
|
-
# Example: raise "key must be a string object" unless key.is_a? String
|
64
|
-
primary_key
|
65
|
-
end
|
66
|
-
|
67
|
-
end
|
68
|
-
|
69
|
-
# AddQueue
|
70
|
-
#
|
71
|
-
class AddQueue < Queue
|
72
|
-
end
|
73
|
-
|
74
|
-
# ChangeQueue
|
75
|
-
#
|
76
|
-
class ChangeQueue < Queue
|
77
|
-
end
|
78
|
-
|
79
|
-
# DeleteQueue
|
80
|
-
#
|
81
|
-
class DeleteQueue < Queue
|
82
|
-
end
|
83
|
-
|
84
|
-
end
|