simple-feed 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.codeclimate.yml +30 -0
- data/.gitignore +16 -0
- data/.rspec +2 -0
- data/.rubocop.yml +1156 -0
- data/.travis.yml +14 -0
- data/.yardopts +3 -0
- data/Gemfile +4 -0
- data/Guardfile +18 -0
- data/LICENSE.txt +21 -0
- data/README.md +457 -0
- data/Rakefile +16 -0
- data/bin/console +14 -0
- data/bin/setup +8 -0
- data/examples/hash_provider_example.rb +24 -0
- data/examples/redis_provider_example.rb +28 -0
- data/examples/shared/provider_example.rb +66 -0
- data/lib/simple-feed.rb +1 -0
- data/lib/simple_feed.rb +1 -0
- data/lib/simplefeed.rb +61 -0
- data/lib/simplefeed/activity/base.rb +14 -0
- data/lib/simplefeed/activity/multi_user.rb +71 -0
- data/lib/simplefeed/activity/single_user.rb +70 -0
- data/lib/simplefeed/dsl.rb +38 -0
- data/lib/simplefeed/dsl/activities.rb +70 -0
- data/lib/simplefeed/dsl/formatter.rb +109 -0
- data/lib/simplefeed/event.rb +87 -0
- data/lib/simplefeed/feed.rb +78 -0
- data/lib/simplefeed/providers.rb +45 -0
- data/lib/simplefeed/providers/base/provider.rb +84 -0
- data/lib/simplefeed/providers/hash.rb +8 -0
- data/lib/simplefeed/providers/hash/paginator.rb +31 -0
- data/lib/simplefeed/providers/hash/provider.rb +169 -0
- data/lib/simplefeed/providers/proxy.rb +38 -0
- data/lib/simplefeed/providers/redis.rb +9 -0
- data/lib/simplefeed/providers/redis/boot_info.yml +99 -0
- data/lib/simplefeed/providers/redis/driver.rb +158 -0
- data/lib/simplefeed/providers/redis/provider.rb +255 -0
- data/lib/simplefeed/providers/redis/stats.rb +85 -0
- data/lib/simplefeed/providers/serialization/key.rb +82 -0
- data/lib/simplefeed/response.rb +77 -0
- data/lib/simplefeed/version.rb +3 -0
- data/man/running-the-example.png +0 -0
- data/man/sf-example.png +0 -0
- data/simple-feed.gemspec +44 -0
- metadata +333 -0
@@ -0,0 +1,169 @@
|
|
1
|
+
require 'base62-rb'
|
2
|
+
require 'hashie'
|
3
|
+
require 'set'
|
4
|
+
require 'knjrbfw'
|
5
|
+
|
6
|
+
require 'simplefeed/event'
|
7
|
+
require_relative 'paginator'
|
8
|
+
require_relative '../serialization/key'
|
9
|
+
require_relative '../base/provider'
|
10
|
+
|
11
|
+
module SimpleFeed
|
12
|
+
module Providers
|
13
|
+
module Hash
|
14
|
+
class Provider < ::SimpleFeed::Providers::Base::Provider
|
15
|
+
attr_accessor :h
|
16
|
+
|
17
|
+
include SimpleFeed::Providers::Hash::Paginator
|
18
|
+
|
19
|
+
def self.from_yaml(file)
|
20
|
+
self.new(YAML.load(File.read(file)))
|
21
|
+
end
|
22
|
+
|
23
|
+
def initialize(**opts)
|
24
|
+
self.h = {}
|
25
|
+
h.merge!(opts)
|
26
|
+
end
|
27
|
+
|
28
|
+
def store(user_ids:, value:, at: Time.now)
|
29
|
+
event = create_event(value, at)
|
30
|
+
with_response_batched(user_ids) do |key|
|
31
|
+
add_event(event, key)
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
def delete(user_ids:, value:, at: nil)
|
36
|
+
event = create_event(value, at)
|
37
|
+
with_response_batched(user_ids) do |key|
|
38
|
+
changed_activity_size?(key) do
|
39
|
+
__delete(key, event)
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
def delete_if(user_ids:, &block)
|
45
|
+
with_response_batched(user_ids) do |key|
|
46
|
+
activity(key).each do |event|
|
47
|
+
__delete(key, event) if yield(key.user_id, event)
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
def wipe(user_ids:)
|
53
|
+
with_response_batched(user_ids) do |key|
|
54
|
+
deleted = activity(key).size > 0
|
55
|
+
wipe_user_record(key)
|
56
|
+
deleted
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
def fetch(user_ids:)
|
61
|
+
with_response_batched(user_ids) do |key|
|
62
|
+
activity(key)
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
def paginate(user_ids:, page:, per_page: feed.per_page, **options)
|
67
|
+
reset_last_read(user_ids: user_ids) unless options[:peek]
|
68
|
+
with_response_batched(user_ids) do |key|
|
69
|
+
activity = activity(key)
|
70
|
+
(page && page > 0) ? activity[((page - 1) * per_page)...(page * per_page)] : activity
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
def reset_last_read(user_ids:, at: Time.now)
|
75
|
+
with_response_batched(user_ids) do |key|
|
76
|
+
user_record(key)[:last_read] = at
|
77
|
+
at
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
def total_count(user_ids:)
|
82
|
+
with_response_batched(user_ids) do |key|
|
83
|
+
activity(key).size
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
def unread_count(user_ids:)
|
88
|
+
with_response_batched(user_ids) do |key|
|
89
|
+
activity(key).count { |event| event.at > user_record(key).last_read.to_f }
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
93
|
+
def last_read(user_ids:)
|
94
|
+
with_response_batched(user_ids) do |key|
|
95
|
+
user_record(key).last_read
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
def total_memory_bytes
|
100
|
+
analyzer = Knj::Memory_analyzer::Object_size_counter.new(self.h)
|
101
|
+
analyzer.calculate_size
|
102
|
+
end
|
103
|
+
|
104
|
+
def total_users
|
105
|
+
self.h.size
|
106
|
+
end
|
107
|
+
|
108
|
+
private
|
109
|
+
|
110
|
+
#===================================================================
|
111
|
+
# Methods below operate on a single user only
|
112
|
+
#
|
113
|
+
|
114
|
+
|
115
|
+
def changed_activity_size?(key)
|
116
|
+
ua = activity(key)
|
117
|
+
size_before = ua.size
|
118
|
+
yield(key, ua)
|
119
|
+
size_after = activity(key).size
|
120
|
+
(size_before > size_after)
|
121
|
+
end
|
122
|
+
|
123
|
+
def create_user_record
|
124
|
+
Hashie::Mash.new(
|
125
|
+
{ last_read: 0, activity: SortedSet.new }
|
126
|
+
)
|
127
|
+
end
|
128
|
+
|
129
|
+
def user_record(key)
|
130
|
+
h[key.data] ||= create_user_record
|
131
|
+
end
|
132
|
+
|
133
|
+
def wipe_user_record(key)
|
134
|
+
h[key.data] = create_user_record
|
135
|
+
end
|
136
|
+
|
137
|
+
def activity(key, event = nil)
|
138
|
+
user_record(key)[:activity] << event if event
|
139
|
+
user_record(key)[:activity].to_a
|
140
|
+
end
|
141
|
+
|
142
|
+
def add_event(event, key)
|
143
|
+
uas = user_record(key)[:activity]
|
144
|
+
if uas.include?(event)
|
145
|
+
false
|
146
|
+
else
|
147
|
+
uas << event.dup
|
148
|
+
if uas.size > feed.max_size
|
149
|
+
uas.delete(uas.to_a.last)
|
150
|
+
end
|
151
|
+
true
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
def __last_read(key, value = nil)
|
156
|
+
user_record(key)[:last_read]
|
157
|
+
end
|
158
|
+
|
159
|
+
def __delete(key, event)
|
160
|
+
user_record(key)[:activity].delete(event)
|
161
|
+
end
|
162
|
+
|
163
|
+
def create_event(*args, **opts)
|
164
|
+
::SimpleFeed::Event.new(*args, **opts)
|
165
|
+
end
|
166
|
+
end
|
167
|
+
end
|
168
|
+
end
|
169
|
+
end
|
@@ -0,0 +1,38 @@
|
|
1
|
+
module SimpleFeed
|
2
|
+
module Providers
|
3
|
+
class Proxy
|
4
|
+
attr_accessor :provider
|
5
|
+
|
6
|
+
def self.from(definition)
|
7
|
+
if definition.is_a?(::Hash)
|
8
|
+
::SimpleFeed.symbolize!(definition)
|
9
|
+
self.new(definition[:klass], *definition[:args], **definition[:opts])
|
10
|
+
else
|
11
|
+
self.new(definition)
|
12
|
+
end
|
13
|
+
|
14
|
+
end
|
15
|
+
|
16
|
+
def initialize(provider_or_klass, *args, **options)
|
17
|
+
if provider_or_klass.is_a?(::String)
|
18
|
+
self.provider = ::Object.const_get(provider_or_klass).new(*args, **options)
|
19
|
+
else
|
20
|
+
self.provider = provider_or_klass
|
21
|
+
end
|
22
|
+
|
23
|
+
SimpleFeed::Providers::REQUIRED_METHODS.each do |m|
|
24
|
+
raise ArgumentError, "Invalid provider #{provider.class}\nMethod '#{m}' is required." unless provider.respond_to?(m)
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
# Forward all other method calls to Provider
|
29
|
+
def method_missing(name, *args, &block)
|
30
|
+
if self.provider && provider.respond_to?(name)
|
31
|
+
self.provider.send(name, *args, &block)
|
32
|
+
else
|
33
|
+
super(name, *args, &block)
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
@@ -0,0 +1,99 @@
|
|
1
|
+
---
|
2
|
+
redis_version: 3.2.5
|
3
|
+
redis_git_sha1: '00000000'
|
4
|
+
redis_git_dirty: '0'
|
5
|
+
redis_build_id: 9fe990583d8f1fbf
|
6
|
+
redis_mode: standalone
|
7
|
+
os: Darwin 16.1.0 x86_64
|
8
|
+
arch_bits: '64'
|
9
|
+
multiplexing_api: kqueue
|
10
|
+
gcc_version: 4.2.1
|
11
|
+
process_id: '12404'
|
12
|
+
run_id: de08831d36013df78346b72b3ffc3f1f45147780
|
13
|
+
tcp_port: '6379'
|
14
|
+
uptime_in_seconds: '26'
|
15
|
+
uptime_in_days: '0'
|
16
|
+
hz: '10'
|
17
|
+
lru_clock: '4352014'
|
18
|
+
executable: "/usr/local/opt/redis/bin/redis-server"
|
19
|
+
config_file: "/usr/local/etc/redis.conf"
|
20
|
+
connected_clients: '1'
|
21
|
+
client_longest_output_list: '0'
|
22
|
+
client_biggest_input_buf: '0'
|
23
|
+
blocked_clients: '0'
|
24
|
+
used_memory: '1008384'
|
25
|
+
used_memory_human: 984.75K
|
26
|
+
used_memory_rss: '2027520'
|
27
|
+
used_memory_rss_human: 1.93M
|
28
|
+
used_memory_peak: '1008384'
|
29
|
+
used_memory_peak_human: 984.75K
|
30
|
+
total_system_memory: '17179869184'
|
31
|
+
total_system_memory_human: 16.00G
|
32
|
+
used_memory_lua: '37888'
|
33
|
+
used_memory_lua_human: 37.00K
|
34
|
+
maxmemory: '0'
|
35
|
+
maxmemory_human: 0B
|
36
|
+
maxmemory_policy: noeviction
|
37
|
+
mem_fragmentation_ratio: '2.01'
|
38
|
+
mem_allocator: libc
|
39
|
+
loading: '0'
|
40
|
+
rdb_changes_since_last_save: '0'
|
41
|
+
rdb_bgsave_in_progress: '0'
|
42
|
+
rdb_last_save_time: '1480746996'
|
43
|
+
rdb_last_bgsave_status: ok
|
44
|
+
rdb_last_bgsave_time_sec: "-1"
|
45
|
+
rdb_current_bgsave_time_sec: "-1"
|
46
|
+
aof_enabled: '0'
|
47
|
+
aof_rewrite_in_progress: '0'
|
48
|
+
aof_rewrite_scheduled: '0'
|
49
|
+
aof_last_rewrite_time_sec: "-1"
|
50
|
+
aof_current_rewrite_time_sec: "-1"
|
51
|
+
aof_last_bgrewrite_status: ok
|
52
|
+
aof_last_write_status: ok
|
53
|
+
total_connections_received: '1'
|
54
|
+
total_commands_processed: '1'
|
55
|
+
instantaneous_ops_per_sec: '0'
|
56
|
+
total_net_input_bytes: '43'
|
57
|
+
total_net_output_bytes: '2168'
|
58
|
+
instantaneous_input_kbps: '0.00'
|
59
|
+
instantaneous_output_kbps: '0.00'
|
60
|
+
rejected_connections: '0'
|
61
|
+
sync_full: '0'
|
62
|
+
sync_partial_ok: '0'
|
63
|
+
sync_partial_err: '0'
|
64
|
+
expired_keys: '0'
|
65
|
+
evicted_keys: '0'
|
66
|
+
keyspace_hits: '0'
|
67
|
+
keyspace_misses: '0'
|
68
|
+
pubsub_channels: '0'
|
69
|
+
pubsub_patterns: '0'
|
70
|
+
latest_fork_usec: '0'
|
71
|
+
migrate_cached_sockets: '0'
|
72
|
+
role: master
|
73
|
+
connected_slaves: '0'
|
74
|
+
master_repl_offset: '0'
|
75
|
+
repl_backlog_active: '0'
|
76
|
+
repl_backlog_size: '1048576'
|
77
|
+
repl_backlog_first_byte_offset: '0'
|
78
|
+
repl_backlog_histlen: '0'
|
79
|
+
used_cpu_sys: '0.02'
|
80
|
+
used_cpu_user: '0.01'
|
81
|
+
used_cpu_sys_children: '0.00'
|
82
|
+
used_cpu_user_children: '0.00'
|
83
|
+
cluster_enabled: '0'
|
84
|
+
db0: keys=0,expires=0,avg_ttl=0
|
85
|
+
db1: keys=0,expires=0,avg_ttl=0
|
86
|
+
db2: keys=0,expires=0,avg_ttl=0
|
87
|
+
db3: keys=0,expires=0,avg_ttl=0
|
88
|
+
db4: keys=0,expires=0,avg_ttl=0
|
89
|
+
db5: keys=0,expires=0,avg_ttl=0
|
90
|
+
db6: keys=0,expires=0,avg_ttl=0
|
91
|
+
db7: keys=0,expires=0,avg_ttl=0
|
92
|
+
db8: keys=0,expires=0,avg_ttl=0
|
93
|
+
db9: keys=0,expires=0,avg_ttl=0
|
94
|
+
db10: keys=0,expires=0,avg_ttl=0
|
95
|
+
db11: keys=0,expires=0,avg_ttl=0
|
96
|
+
db12: keys=0,expires=0,avg_ttl=0
|
97
|
+
db13: keys=0,expires=0,avg_ttl=0
|
98
|
+
db14: keys=0,expires=0,avg_ttl=0
|
99
|
+
db15: keys=0,expires=0,avg_ttl=0
|
@@ -0,0 +1,158 @@
|
|
1
|
+
require 'redis'
|
2
|
+
require 'redis/connection/hiredis'
|
3
|
+
require 'connection_pool'
|
4
|
+
require 'colored2'
|
5
|
+
require 'hashie/mash'
|
6
|
+
require 'yaml'
|
7
|
+
require 'pp'
|
8
|
+
|
9
|
+
module SimpleFeed
|
10
|
+
module Providers
|
11
|
+
module Redis
|
12
|
+
@debug = ENV['REDIS_DEBUG']
|
13
|
+
|
14
|
+
def self.debug?
|
15
|
+
self.debug
|
16
|
+
end
|
17
|
+
|
18
|
+
class << self
|
19
|
+
attr_accessor :debug
|
20
|
+
end
|
21
|
+
|
22
|
+
module Driver
|
23
|
+
class Error < StandardError;
|
24
|
+
end
|
25
|
+
|
26
|
+
class LoggingRedis < Struct.new(:redis)
|
27
|
+
def method_missing(m, *args, &block)
|
28
|
+
if redis.respond_to?(m)
|
29
|
+
result = redis.send(m, *args, &block)
|
30
|
+
STDERR.printf "%40s %s\n", "#{m.to_s.upcase.bold.red}", "#{args.inspect.gsub(/[\[\]]/, '').magenta}"
|
31
|
+
result
|
32
|
+
else
|
33
|
+
super
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
def debug?
|
39
|
+
SimpleFeed::Providers::Redis.debug?
|
40
|
+
end
|
41
|
+
|
42
|
+
attr_accessor :pool
|
43
|
+
|
44
|
+
=begin
|
45
|
+
|
46
|
+
Various ways of defining a new Redis driver:
|
47
|
+
|
48
|
+
SimpleFeed::Redis::Driver.new(pool: ConnectionPool.new(size: 2) { Redis.new })
|
49
|
+
SimpleFeed::Redis::Driver.new(redis: -> { Redis.new }, pool_size: 2)
|
50
|
+
SimpleFeed::Redis::Driver.new(redis: Redis.new)
|
51
|
+
SimpleFeed::Redis::Driver.new(redis: { host: 'localhost', port: 6379, db: 1, timeout: 0.2 }, pool_size: 1)
|
52
|
+
|
53
|
+
=end
|
54
|
+
def initialize(**opts)
|
55
|
+
if opts[:pool] && opts[:pool].respond_to?(:with)
|
56
|
+
self.pool = opts[:pool]
|
57
|
+
|
58
|
+
elsif opts[:redis]
|
59
|
+
redis = opts[:redis]
|
60
|
+
redis_proc = nil
|
61
|
+
|
62
|
+
if redis.is_a?(::Hash)
|
63
|
+
redis_proc = -> { ::Redis.new(**opts[:redis]) }
|
64
|
+
elsif redis.is_a?(::Proc)
|
65
|
+
redis_proc = redis
|
66
|
+
elsif redis.is_a?(::Redis)
|
67
|
+
redis_proc = -> { redis }
|
68
|
+
end
|
69
|
+
|
70
|
+
if redis_proc
|
71
|
+
self.pool = ::ConnectionPool.new(size: (opts[:pool_size] || 2)) do
|
72
|
+
redis_proc.call
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
raise ArgumentError, "Unable to construct Redis connection from arguments: #{opts.inspect}" unless self.pool && self.pool.respond_to?(:with)
|
78
|
+
end
|
79
|
+
|
80
|
+
%i(set get incr decr setex expire del setnx exists zadd zrange).each do |method|
|
81
|
+
define_method(method) do |*args|
|
82
|
+
self.exec method, *args
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
alias_method :delete, :del
|
87
|
+
alias_method :rm, :del
|
88
|
+
alias_method :exists?, :exists
|
89
|
+
|
90
|
+
def exec(redis_method, *args, **opts, &block)
|
91
|
+
send_proc = redis_method if redis_method.respond_to?(:call)
|
92
|
+
send_proc ||= ->(redis) { redis.send(redis_method, *args, &block) }
|
93
|
+
|
94
|
+
if opts[:pipelined]
|
95
|
+
opts.delete :pipelined
|
96
|
+
with_pipelined { |redis| send_proc.call(redis) }
|
97
|
+
else
|
98
|
+
with_redis { |redis| send_proc.call(redis) }
|
99
|
+
end
|
100
|
+
end
|
101
|
+
|
102
|
+
class MockRedis
|
103
|
+
def method_missing(name, *args, &block)
|
104
|
+
puts "calling redis.#{name}(#{args.to_s.gsub(/[\[\]]/, '')}) { #{block ? block.call : nil} }"
|
105
|
+
end
|
106
|
+
end
|
107
|
+
|
108
|
+
def with_redis
|
109
|
+
with_retries do
|
110
|
+
pool.with do |redis|
|
111
|
+
yield(self.debug? ? LoggingRedis.new(redis) : redis)
|
112
|
+
end
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
def with_pipelined
|
117
|
+
with_retries do
|
118
|
+
with_redis do |redis|
|
119
|
+
redis.pipelined do
|
120
|
+
yield(redis)
|
121
|
+
end
|
122
|
+
end
|
123
|
+
end
|
124
|
+
end
|
125
|
+
|
126
|
+
def with_multi
|
127
|
+
with_retries do
|
128
|
+
with_redis do |redis|
|
129
|
+
redis.multi do
|
130
|
+
yield(redis)
|
131
|
+
end
|
132
|
+
end
|
133
|
+
end
|
134
|
+
end
|
135
|
+
|
136
|
+
def with_retries(tries = 3)
|
137
|
+
yield(tries)
|
138
|
+
rescue Errno::EINVAL => e
|
139
|
+
on_error e
|
140
|
+
rescue ::Redis::BaseConnectionError => e
|
141
|
+
if (tries -= 1) > 0
|
142
|
+
sleep rand(0..0.01)
|
143
|
+
retry
|
144
|
+
else
|
145
|
+
on_error e
|
146
|
+
end
|
147
|
+
rescue ::Redis::CommandError => e
|
148
|
+
(e.message =~ /loading/i || e.message =~ /connection/i) ? on_error(e) : raise(e)
|
149
|
+
end
|
150
|
+
|
151
|
+
def on_error(e)
|
152
|
+
raise Error.new(e)
|
153
|
+
end
|
154
|
+
|
155
|
+
end
|
156
|
+
end
|
157
|
+
end
|
158
|
+
end
|