pokan 0.1.0rc1
Sign up to get free protection for your applications and to get access to all the features.
- data/.document +5 -0
- data/.rspec +1 -0
- data/Gemfile +12 -0
- data/Gemfile.lock +37 -0
- data/Guardfile +5 -0
- data/LICENSE.txt +20 -0
- data/README.rdoc +19 -0
- data/Rakefile +60 -0
- data/lib/pokan.rb +19 -0
- data/lib/pokan/collective_peer_operations.rb +88 -0
- data/lib/pokan/connection.rb +27 -0
- data/lib/pokan/entity.rb +165 -0
- data/lib/pokan/event_handler.rb +113 -0
- data/lib/pokan/network.rb +43 -0
- data/lib/pokan/peer.rb +136 -0
- data/lib/pokan/query.rb +39 -0
- data/lib/pokan/request_handler.rb +90 -0
- data/lib/pokan/server.rb +230 -0
- data/lib/pokan/server_messages.rb +27 -0
- data/lib/pokan/version.rb +3 -0
- data/pokan.gemspec +53 -0
- data/spec/pokan/connection_spec.rb +10 -0
- data/spec/pokan/entity_spec.rb +147 -0
- data/spec/pokan/event_handler_spec.rb +84 -0
- data/spec/pokan/network_spec.rb +32 -0
- data/spec/pokan/peer_spec.rb +94 -0
- data/spec/pokan/query_spec.rb +67 -0
- data/spec/pokan/request_handler_spec.rb +253 -0
- data/spec/pokan/server_messages_spec.rb +81 -0
- data/spec/pokan/server_spec.rb +46 -0
- data/spec/spec_helper.rb +4 -0
- metadata +196 -0
data/.document
ADDED
data/.rspec
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
--color --format doc
|
data/Gemfile
ADDED
data/Gemfile.lock
ADDED
@@ -0,0 +1,37 @@
|
|
1
|
+
GEM
|
2
|
+
remote: http://rubygems.org/
|
3
|
+
specs:
|
4
|
+
diff-lcs (1.1.3)
|
5
|
+
eventmachine (0.12.10)
|
6
|
+
git (1.2.5)
|
7
|
+
guard (0.6.3)
|
8
|
+
thor (~> 0.14.6)
|
9
|
+
guard-rspec (0.4.4)
|
10
|
+
guard (>= 0.4.0)
|
11
|
+
jeweler (1.6.4)
|
12
|
+
bundler (~> 1.0)
|
13
|
+
git (>= 1.2.5)
|
14
|
+
rake
|
15
|
+
rake (0.9.2)
|
16
|
+
redis (2.2.2)
|
17
|
+
rspec (2.6.0)
|
18
|
+
rspec-core (~> 2.6.0)
|
19
|
+
rspec-expectations (~> 2.6.0)
|
20
|
+
rspec-mocks (~> 2.6.0)
|
21
|
+
rspec-core (2.6.4)
|
22
|
+
rspec-expectations (2.6.0)
|
23
|
+
diff-lcs (~> 1.1.2)
|
24
|
+
rspec-mocks (2.6.0)
|
25
|
+
thor (0.14.6)
|
26
|
+
|
27
|
+
PLATFORMS
|
28
|
+
ruby
|
29
|
+
|
30
|
+
DEPENDENCIES
|
31
|
+
bundler
|
32
|
+
eventmachine
|
33
|
+
guard
|
34
|
+
guard-rspec
|
35
|
+
jeweler
|
36
|
+
redis
|
37
|
+
rspec
|
data/Guardfile
ADDED
data/LICENSE.txt
ADDED
@@ -0,0 +1,20 @@
|
|
1
|
+
Copyright (c) 2011 Renato Mascarenhas
|
2
|
+
|
3
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
4
|
+
a copy of this software and associated documentation files (the
|
5
|
+
"Software"), to deal in the Software without restriction, including
|
6
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
7
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
8
|
+
permit persons to whom the Software is furnished to do so, subject to
|
9
|
+
the following conditions:
|
10
|
+
|
11
|
+
The above copyright notice and this permission notice shall be
|
12
|
+
included in all copies or substantial portions of the Software.
|
13
|
+
|
14
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
15
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
16
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
17
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
18
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
19
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
20
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.rdoc
ADDED
@@ -0,0 +1,19 @@
|
|
1
|
+
= pokan
|
2
|
+
|
3
|
+
Description goes here.
|
4
|
+
|
5
|
+
== Contributing to pokan
|
6
|
+
|
7
|
+
* Check out the latest master to make sure the feature hasn't been implemented or the bug hasn't been fixed yet
|
8
|
+
* Check out the issue tracker to make sure someone already hasn't requested it and/or contributed it
|
9
|
+
* Fork the project
|
10
|
+
* Start a feature/bugfix branch
|
11
|
+
* Commit and push until you are happy with your contribution
|
12
|
+
* Make sure to add tests for it. This is important so I don't break it in a future version unintentionally.
|
13
|
+
* Please try not to mess with the Rakefile, version, or history. If you want to have your own version, or is otherwise necessary, that is fine, but please isolate to its own commit so I can cherry-pick around it.
|
14
|
+
|
15
|
+
== Copyright
|
16
|
+
|
17
|
+
Copyright (c) 2011 Renato Mascarenhas. See LICENSE.txt for
|
18
|
+
further details.
|
19
|
+
|
data/Rakefile
ADDED
@@ -0,0 +1,60 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
require File.expand_path('../lib/pokan/version.rb', __FILE__)
|
4
|
+
|
5
|
+
require 'rubygems'
|
6
|
+
require 'bundler'
|
7
|
+
begin
|
8
|
+
Bundler.setup(:default, :development)
|
9
|
+
rescue Bundler::BundlerError => e
|
10
|
+
$stderr.puts e.message
|
11
|
+
$stderr.puts "Run `bundle install` to install missing gems"
|
12
|
+
exit e.status_code
|
13
|
+
end
|
14
|
+
require 'rake'
|
15
|
+
|
16
|
+
require 'jeweler'
|
17
|
+
Jeweler::Tasks.new do |gem|
|
18
|
+
gem.name = "pokan"
|
19
|
+
gem.version = Pokan::VERSION
|
20
|
+
|
21
|
+
gem.homepage = "http://github.com/haze/pokan"
|
22
|
+
gem.license = "MIT"
|
23
|
+
|
24
|
+
gem.summary = %Q{Gossip-protocol implementation with an event based API}
|
25
|
+
|
26
|
+
gem.description = %Q{pokan is an implementation of the Gossip protocol
|
27
|
+
(best described in http://www.cs.cornell.edu/home/rvr/papers/flowgossip.pdf),
|
28
|
+
which aims to have a simple, event based API capable of being used in large,
|
29
|
+
not previously known networks.}
|
30
|
+
|
31
|
+
gem.email = "haze-gem@googlegroups.com"
|
32
|
+
gem.authors = ["Renato Mascarenhas", "Rafael Regis do Prado", "Fabio Lima Pereira"]
|
33
|
+
|
34
|
+
gem.add_dependency 'json'
|
35
|
+
gem.add_dependency 'eventmachine'
|
36
|
+
gem.add_dependency 'redis'
|
37
|
+
end
|
38
|
+
Jeweler::RubygemsDotOrgTasks.new
|
39
|
+
|
40
|
+
require 'rspec/core'
|
41
|
+
require 'rspec/core/rake_task'
|
42
|
+
RSpec::Core::RakeTask.new(:spec) do |spec|
|
43
|
+
spec.pattern = FileList['spec/**/*_spec.rb']
|
44
|
+
end
|
45
|
+
|
46
|
+
RSpec::Core::RakeTask.new(:rcov) do |spec|
|
47
|
+
spec.pattern = 'spec/**/*_spec.rb'
|
48
|
+
end
|
49
|
+
|
50
|
+
task :default => :spec
|
51
|
+
|
52
|
+
require 'rake/rdoctask'
|
53
|
+
Rake::RDocTask.new do |rdoc|
|
54
|
+
version = Pokan::VERSION
|
55
|
+
|
56
|
+
rdoc.rdoc_dir = 'rdoc'
|
57
|
+
rdoc.title = "pokan #{version}"
|
58
|
+
rdoc.rdoc_files.include('README*')
|
59
|
+
rdoc.rdoc_files.include('lib/**/*.rb')
|
60
|
+
end
|
data/lib/pokan.rb
ADDED
@@ -0,0 +1,19 @@
|
|
1
|
+
module Pokan
|
2
|
+
require_relative 'pokan/connection'
|
3
|
+
require_relative 'pokan/entity'
|
4
|
+
require_relative 'pokan/peer'
|
5
|
+
require_relative 'pokan/query'
|
6
|
+
require_relative 'pokan/event_handler'
|
7
|
+
require_relative 'pokan/collective_peer_operations'
|
8
|
+
require_relative 'pokan/request_handler'
|
9
|
+
require_relative 'pokan/network'
|
10
|
+
require_relative 'pokan/version'
|
11
|
+
require_relative 'pokan/server_messages'
|
12
|
+
require_relative 'pokan/server'
|
13
|
+
|
14
|
+
class << self
|
15
|
+
def root
|
16
|
+
@@root = File.expand_path File.dirname(__FILE__) + '/..'
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
@@ -0,0 +1,88 @@
|
|
1
|
+
# lib/pokan/collective_peer_operations.rb
|
2
|
+
|
3
|
+
module Pokan
|
4
|
+
##
|
5
|
+
# The CollectivePeerOperations should be used when it is necessary to apply a
|
6
|
+
# operation over a undefined amount of peers
|
7
|
+
module CollectivePeerOperations
|
8
|
+
|
9
|
+
##
|
10
|
+
# Stores the keys-values-timestamps with timestamp greater than the
|
11
|
+
# peers' timestamp
|
12
|
+
# === Usage
|
13
|
+
# class Collective; include CollectivePeerOperations; end
|
14
|
+
# col = Collective.new
|
15
|
+
# col.merge({ id1: { key: { value: 'FAIL', timestamp: => 223412 } } }) # pair stored
|
16
|
+
def merge(peers)
|
17
|
+
query = Query.new(Peer)
|
18
|
+
peers.each do |id, key_set|
|
19
|
+
peer = query.where(id: id.to_s)[0]
|
20
|
+
unless peer
|
21
|
+
peer = Peer.new
|
22
|
+
peer.id = id
|
23
|
+
end
|
24
|
+
key_set.keys.each do |key|
|
25
|
+
value = key_set[key]
|
26
|
+
value.keys.each { |k| value[k.to_sym] = value.delete(k) }
|
27
|
+
key_set[key.to_sym] = key_set.delete(key)
|
28
|
+
end
|
29
|
+
peer.merge(key_set)
|
30
|
+
peer.save
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
##
|
35
|
+
# Returns all peers' updated keys-values-timestamps in relation
|
36
|
+
# to a given digest in a Complete Structure
|
37
|
+
#
|
38
|
+
# Given structure +{'id1' => {'status' => 3234124325.234323}}+
|
39
|
+
#
|
40
|
+
# Return structure +{ peer_id => { key => {"value" => value, "timestamp" => timestamp}, ...},...}+
|
41
|
+
def newer(digest)
|
42
|
+
query = Query.new(Peer)
|
43
|
+
newer_keys = Hash.new
|
44
|
+
digest.each do |id, keys|
|
45
|
+
peer = query.where(id: id.to_s)[0]
|
46
|
+
newer_keys[id] = peer ? peer.newer(keys) : {}
|
47
|
+
end
|
48
|
+
|
49
|
+
newer_keys
|
50
|
+
end
|
51
|
+
|
52
|
+
##
|
53
|
+
# Returns all peers' outdated keys in relation to a given digest in a Old Key Structure
|
54
|
+
#
|
55
|
+
# Given structure +{'id1' => {'status' => 3234124325.234323}}+
|
56
|
+
#
|
57
|
+
# Return structure: +{peer_id => [old_key, ...], ...}+
|
58
|
+
def older(data)
|
59
|
+
query = Query.new(Peer)
|
60
|
+
older_keys = Hash.new
|
61
|
+
|
62
|
+
data.each do |id, dig|
|
63
|
+
peer = query.where(id: id.to_s)[0]
|
64
|
+
older_keys[id] = peer ? peer.older(dig) : dig.keys
|
65
|
+
end
|
66
|
+
|
67
|
+
older_keys
|
68
|
+
end
|
69
|
+
|
70
|
+
##
|
71
|
+
# Returns all the ids' keys/values/timestamps for the keys in the given hash
|
72
|
+
#
|
73
|
+
# Given structure +{id1: [:k1, :k2, :foo]}+
|
74
|
+
#
|
75
|
+
# Return structure +{peer_id => { key => {"value" => value, "timestamp" => timestamp}, ...},...}+
|
76
|
+
def retrieve(req)
|
77
|
+
query = Query.new(Peer)
|
78
|
+
data = Hash.new
|
79
|
+
|
80
|
+
req.each do |id, keys|
|
81
|
+
peer = query.where(id: id.to_s, status:'alive')[0]
|
82
|
+
data[id] = peer.values(keys) if peer
|
83
|
+
end
|
84
|
+
|
85
|
+
data
|
86
|
+
end
|
87
|
+
end
|
88
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# lib/pokan/connection.rb
|
2
|
+
require 'singleton'
|
3
|
+
require 'redis'
|
4
|
+
|
5
|
+
module Pokan
|
6
|
+
|
7
|
+
# Connection is a singleton for redis connection
|
8
|
+
class Connection
|
9
|
+
##
|
10
|
+
# Redis connection
|
11
|
+
attr_reader :redis
|
12
|
+
include Singleton
|
13
|
+
|
14
|
+
# creates a new instance of Redis
|
15
|
+
def initialize
|
16
|
+
@redis = Redis.new
|
17
|
+
end
|
18
|
+
|
19
|
+
# Get the instance of the class Redis
|
20
|
+
# === Usage
|
21
|
+
# redis = Pokan::Connection.redis
|
22
|
+
# redis.flushall
|
23
|
+
def self.redis
|
24
|
+
instance.redis
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
data/lib/pokan/entity.rb
ADDED
@@ -0,0 +1,165 @@
|
|
1
|
+
# lib/pokan/entity.rb
|
2
|
+
|
3
|
+
module Pokan
|
4
|
+
|
5
|
+
##
|
6
|
+
# Entity is the class that takes care of data management, including versioning
|
7
|
+
# and persistence.
|
8
|
+
# == Basic Usage
|
9
|
+
# e = Entity.new
|
10
|
+
# e.id = 'my_id'
|
11
|
+
# e.store(:my_key, 'my_value')
|
12
|
+
# e.value(:my_key) # => 'my_value'
|
13
|
+
# e.save # persists the data
|
14
|
+
class Entity
|
15
|
+
|
16
|
+
attr_accessor :id
|
17
|
+
|
18
|
+
def initialize
|
19
|
+
@key_set = Hash.new
|
20
|
+
end
|
21
|
+
|
22
|
+
##
|
23
|
+
# Stores the value and the timestamp for a given key.
|
24
|
+
# If the key, which must be a symbol, already exists, it will be updated if
|
25
|
+
# the timestamp is greater.
|
26
|
+
# The timestamp defaults to Time.now.
|
27
|
+
def store(key, value, timestamp = Time.now)
|
28
|
+
if !@key_set.has_key?(key) || @key_set[key][:timestamp] < timestamp.to_f
|
29
|
+
@key_set[key] = { value: value, timestamp: timestamp.to_f }
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
##
|
34
|
+
# Gets the value of the requested key.
|
35
|
+
# The given key must be a symbol.
|
36
|
+
def value(key)
|
37
|
+
@key_set[key][:value] if @key_set.has_key?(key)
|
38
|
+
end
|
39
|
+
|
40
|
+
##
|
41
|
+
# Gets the current timestamp of the requested key
|
42
|
+
# The given key must be a symbol and the returned timestamp will be a Time object.
|
43
|
+
def timestamp(key)
|
44
|
+
if @key_set.has_key?(key)
|
45
|
+
Time.at(@key_set[key][:timestamp])
|
46
|
+
else
|
47
|
+
Time.at(0)
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
##
|
52
|
+
# Returns all keys of the peer in an array
|
53
|
+
def keys
|
54
|
+
@key_set.keys
|
55
|
+
end
|
56
|
+
|
57
|
+
##
|
58
|
+
# Returns all the values/timestamps for each given key in \'keys\' Array in
|
59
|
+
# the following structure: { key: { value: 'value', timestamp: <Time obj> },
|
60
|
+
# key2:... }
|
61
|
+
def values(keys)
|
62
|
+
key_values = Hash.new
|
63
|
+
keys.each do |k|
|
64
|
+
key_values[k] = @key_set[k.to_sym] || {}
|
65
|
+
end
|
66
|
+
key_values
|
67
|
+
end
|
68
|
+
|
69
|
+
##
|
70
|
+
# Gets a digest containing all entity's key/timestamp
|
71
|
+
# It returns a hash with all the pairs in the following structure: { key: timestamp, ... }
|
72
|
+
def digest
|
73
|
+
digest = Hash.new
|
74
|
+
@key_set.each {|k, v| digest[k] = v[:timestamp]}
|
75
|
+
digest
|
76
|
+
end
|
77
|
+
|
78
|
+
##
|
79
|
+
# Persists all the entity's data
|
80
|
+
def save
|
81
|
+
db = Connection.redis
|
82
|
+
|
83
|
+
raise "Could not save the data, id not defined" unless @id
|
84
|
+
@key_set.each do |k, v|
|
85
|
+
db.hset("entity:#{@id}:value", k.to_s, v[:value])
|
86
|
+
db.hset("entity:#{@id}:timestamp", k.to_s, v[:timestamp])
|
87
|
+
db.sadd("entity:#{@id}:keys", k.to_s)
|
88
|
+
end
|
89
|
+
db.sadd("entities", @id)
|
90
|
+
end
|
91
|
+
|
92
|
+
##
|
93
|
+
# Deletes all persisted data related to the entity
|
94
|
+
def destroy
|
95
|
+
db = Connection.redis
|
96
|
+
db.del("entity:#{@id}:key")
|
97
|
+
db.del("entity:#{@id}:timestamp")
|
98
|
+
db.del("entity:#{@id}:value")
|
99
|
+
db.srem('entities', @id)
|
100
|
+
end
|
101
|
+
|
102
|
+
##
|
103
|
+
# Gets all the data related to the peer from database.
|
104
|
+
# Newer keys that wasn\'t saved in any momentwill also be substituted.
|
105
|
+
def reload
|
106
|
+
db = Connection.redis
|
107
|
+
values = db.hgetall("entity:#{@id}:value")
|
108
|
+
timestamps = db.hgetall("entity:#{@id}:timestamp")
|
109
|
+
@key_set = Hash.new
|
110
|
+
values.each do |k, v|
|
111
|
+
@key_set[k.to_sym] = { value: v, timestamp: timestamps[k].to_f }
|
112
|
+
end
|
113
|
+
end
|
114
|
+
|
115
|
+
##
|
116
|
+
# Stores the keys-values-timestamps with timestamp greater than the
|
117
|
+
# entity's timestamp.
|
118
|
+
# The 'keys' parameter must have the following structure:
|
119
|
+
# { key: {value: 'value', timestamp: <Time obj> }, key1:.... }
|
120
|
+
def merge(keys)
|
121
|
+
keys.each do |key, data|
|
122
|
+
if data[:timestamp].to_f > timestamp(key).to_f
|
123
|
+
store(key, data[:value], Time.at(data[:timestamp].to_f))
|
124
|
+
end
|
125
|
+
end
|
126
|
+
end
|
127
|
+
|
128
|
+
##
|
129
|
+
# Returns all entity's outdated keys in relation to a given digest.
|
130
|
+
#
|
131
|
+
# Return Structure: [:old_key, ...]
|
132
|
+
def older(digest)
|
133
|
+
digest.select { |key, tmsp| Time.at(tmsp.to_f) > timestamp(key) }.keys
|
134
|
+
end
|
135
|
+
|
136
|
+
##
|
137
|
+
# Returns all peer's updated keys-values-timestamps in relation
|
138
|
+
# to a given digest.
|
139
|
+
#
|
140
|
+
# Return structure: { key: { value: 'value', timestamp: <Time obj> }, ... }
|
141
|
+
def newer(digest)
|
142
|
+
newer = digest.select { |key, tmsp| Time.at(tmsp.to_f) < timestamp(key) }
|
143
|
+
newer.each_key do |key|
|
144
|
+
newer[key] = { value: value(key), timestamp: timestamp(key).to_f }
|
145
|
+
end
|
146
|
+
end
|
147
|
+
|
148
|
+
##
|
149
|
+
# Verifies if the entity have the given keys with the same values.
|
150
|
+
# The query structure must be similar to: { key1: 'v', key2: ['1', '2'] }
|
151
|
+
def match?(query)
|
152
|
+
accepted = true
|
153
|
+
query.each do |key, value|
|
154
|
+
value = [value] unless value.is_a?(Array) || value.is_a?(Hash)
|
155
|
+
if value.is_a?(Array)
|
156
|
+
accepted = false unless value.include?(value(key))
|
157
|
+
else
|
158
|
+
accepted = false if value[:min] && value[:min] >= value(key)
|
159
|
+
accepted = false if value[:max] && value[:max] <= value(key)
|
160
|
+
end
|
161
|
+
end
|
162
|
+
accepted
|
163
|
+
end
|
164
|
+
end
|
165
|
+
end
|