couchdb-extras 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,20 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'couch_http'
4
+
5
+ ARGV.each {|couch|
6
+ puts "Server #{couch}"
7
+ get("#{couch}/_all_dbs")
8
+ .select { |db| !db.start_with?("_") && !db.end_with?("_history")}
9
+ .each { |db|
10
+ puts "Database #{couch}/#{db}"
11
+ ARGV.select {|server| server != couch}
12
+ .each {|other_couch|
13
+ replication = {:source=>"#{couch}/#{db}",:target=>"#{other_couch}/#{db}",:continuous=>true,:create_target=>true}
14
+ puts "Replication #{replication}"
15
+ r = post("#{couch}/_replicator",replication)
16
+ puts "#{replication} = #{r}"
17
+ }
18
+ }
19
+ }
20
+
@@ -0,0 +1,119 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # a bot is uniq in it's arguments
4
+ require 'base64'
5
+ id = Base64.encode64("#{ARGV[0]}+#{ARGV[1]}").gsub("=","").gsub("\n","")
6
+
7
+ # ensure no overlap
8
+ exit 1 unless File.new("/tmp/bot_es_#{id}.lock", "a").flock(File::LOCK_EX | File::LOCK_NB)
9
+
10
+ # read args
11
+ @couch = ARGV[0] || "http://localhost:5984";
12
+ @es = ARGV[1] || "http://localhost:9200";
13
+
14
+ require 'couch_http'
15
+
16
+ def onchange(db,change)
17
+ if !change["doc"].has_key?("deleted") && !change["doc"].has_key?("_deleted")
18
+ doc = change["doc"].clone
19
+ doc["id"] = doc["_id"]
20
+ doc["rev"] = doc["_rev"]
21
+ doc.delete("_id")
22
+ doc.delete("_rev")
23
+ doc.delete("_attachments")
24
+ type = "default"
25
+ if doc.has_key?("metadata") && doc["metadata"].has_key?("type")
26
+ type = doc["metadata"]["type"]
27
+ elsif doc.has_key?("type")
28
+ type = doc["type"]
29
+ end
30
+ response = post("#{@es}/#{db}/#{URI.encode(type)}/#{URI.encode(doc["id"])}",doc)
31
+ puts "post #{@es}/#{db}/#{type}/#{doc["id"]} #{response}"
32
+ elsif change["doc"].has_key?("_deleted") || change["doc"].has_key?("deleted")
33
+ doc = change["doc"].clone
34
+ q = URI.encode("id:\"#{doc["_id"]}\"");
35
+ response = delete("#{@es}/#{db}/_query?q=#{q}")
36
+ puts "delete #{@es}/#{db}/_query?q=#{q} #{ response }"
37
+ end
38
+ end
39
+
40
+ def listen_changes(db)
41
+ puts "listen #{db}"
42
+
43
+ system("echo 0 > /tmp/es_changes_#{db}.last_seq") #unless File.exists?("/tmp/es_changes_#{db}.last_seq")
44
+
45
+ while true do
46
+ # reads last sequence
47
+ last_file = File.open("/tmp/es_changes_#{db}.last_seq",'r');
48
+ last_seq = last_file.gets.to_i;
49
+ last_file.close
50
+
51
+ puts "info #{db} #{last_seq}"
52
+
53
+ # get latest changes
54
+ changes_uri = URI("#{@couch}/#{db}/_changes?since=#{last_seq}&limit=500&feed=longpoll&include_docs=true");
55
+ changes = JSON.parse(Net::HTTP.get(changes_uri))['results']
56
+ changes.each { |change|
57
+ onchange(db,change)
58
+ last_seq = change["seq"]
59
+ }
60
+
61
+ # writes last_seq
62
+ system("echo #{last_seq} > /tmp/es_changes_#{db}.last_seq")
63
+ end
64
+ end
65
+
66
+ puts "Starting..."
67
+
68
+ # hold the threads
69
+ dbs_changes = {}
70
+
71
+ # listen dbs
72
+ Thread.new do
73
+ while true do
74
+ update = get("#{@couch}/_db_updates?feed=longpoll");
75
+ Thread.new do
76
+ ok = update["ok"]
77
+ db = update["db_name"]
78
+ type = update["type"]
79
+ if ok && !db.start_with?("_") && !db.end_with?("_history") then
80
+ if type == "created" then
81
+ puts "DB created: #{db}"
82
+ r = put("#{@es}/#{db}",{});
83
+ puts "Creating #{@es}/#{db} = #{r}"
84
+ dbs_changes[db] = Thread.new do
85
+ listen_changes(db)
86
+ end
87
+ elsif type == 'deleted'
88
+ puts "DB deleted: #{db}"
89
+ dbs_changes[db].stop();
90
+ r = delete("#{@es}/#{db}");
91
+ puts "Delete #{@es}/#{db} = #{r}"
92
+ system("echo 0 > /tmp/es_changes_#{db}.last_seq")
93
+ end
94
+ end
95
+ end
96
+ end
97
+ end
98
+
99
+ # initial run
100
+ get("#{@couch}/_all_dbs")
101
+ .select { |db| !db.start_with?("_") && !db.end_with?("_history")}
102
+ .each { |db|
103
+ puts "DB existed: #{db}"
104
+ r=put("#{@es}/#{db}",{});
105
+ puts "Creating #{@es}/#{db} = #{r}"
106
+ dbs_changes[db] = Thread.new do
107
+ listen_changes(db)
108
+ end
109
+ }
110
+
111
+
112
+ puts "Started "
113
+
114
+ while true
115
+ sleep 10
116
+ end
117
+
118
+ puts "Ended"
119
+
@@ -0,0 +1,118 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # a bot is uniq in it's arguments
4
+ require 'base64'
5
+ id = Base64.encode64("#{ARGV[0]}+#{ARGV[1]}").gsub("=","").gsub("\n","")
6
+
7
+ # ensure no overlap
8
+ exit 1 unless File.new("/tmp/h_bot_#{id}.lock", "a").flock(File::LOCK_EX | File::LOCK_NB)
9
+
10
+ require 'couch_http'
11
+
12
+ def past(couch,db,id)
13
+ get("#{couch}/#{db}/#{id}?revs_info=true")["_revs_info"]
14
+ .reverse
15
+ .select {|r| r["status"] == 'available'}
16
+ .slice([1,2])
17
+ .each {|r|
18
+ doc = get("#{couch}/#{db}/#{id}?rev=#{r["rev"]}")
19
+ doc["_id"] = "#{doc["_id"]}:#{doc["_rev"]}"
20
+ doc.delete("_rev")
21
+ doc.delete("_attachments")
22
+ response = post("#{couch}/#{db}_history",doc)
23
+ puts "post #{couch}/#{db}/#{doc["_id"]} = #{response}"
24
+ }
25
+ end
26
+
27
+ def listen_changes(couch,db)
28
+ puts "listen #{couch}/#{db}"
29
+ id = Base64.encode64("#{couch}/#{db}").gsub("=","").gsub("\n","")
30
+ system("echo 0 > /tmp/h_changes_#{id}.last_seq") # unless File.exists?("/tmp/h_changes_#{id}.last_seq")
31
+ while true do
32
+ # reads last sequence
33
+ last_file = File.open("/tmp/h_changes_#{id}.last_seq",'r');
34
+ last_seq = last_file.gets.to_i;
35
+ last_file.close
36
+
37
+ puts "info #{couch}/#{db} (#{id}) #{last_seq}"
38
+
39
+ # get latest changes
40
+ changes_uri = URI("#{couch}/#{db}/_changes?since=#{last_seq}&limit=500&feed=longpoll&include_docs=true");
41
+ changes = JSON.parse(Net::HTTP.get(changes_uri))['results']
42
+ changes.each { |change|
43
+ if !change["doc"].has_key?("deleted") && !change["doc"].has_key?("_deleted")
44
+ doc = change["doc"].clone
45
+ doc["_id"] = "#{doc["_id"]}:#{doc["_rev"]}"
46
+ doc.delete("_rev")
47
+ doc.delete("_attachments")
48
+ response = post("#{couch}/#{db}_history",doc)
49
+ puts "post #{couch}/#{db}/#{change["id"]} = #{response}"
50
+ Thread.new do
51
+ past(couch,db,change["id"])
52
+ end
53
+ end
54
+ last_seq = change["seq"]
55
+ }
56
+
57
+ # writes last_seq
58
+ system("echo #{last_seq} > /tmp/h_changes_#{id}.last_seq")
59
+ end
60
+ end
61
+
62
+ puts "Starting..."
63
+
64
+ ARGV.each {|arg|
65
+ couch = arg.clone
66
+ puts "Server #{couch}"
67
+
68
+ # hold the threads
69
+ dbs_changes = {}
70
+
71
+ # listen dbs
72
+ Thread.new do
73
+ couch = couch.clone
74
+ while true do
75
+ update = get("#{couch}/_db_updates?feed=longpoll");
76
+ Thread.new do
77
+ ok = update["ok"]
78
+ db = update["db_name"]
79
+ type = update["type"]
80
+ if ok && !db.start_with?("_") && !db.end_with?("_history") then
81
+ if type == "created" then
82
+ puts "DB created: #{db}"
83
+ r = put("#{couch}/#{db}_history",{});
84
+ puts "Create #{couch}/#{db}_history = #{r}"
85
+ dbs_changes["#{couch}/#{db}"] = Thread.new do
86
+ listen_changes(couch,db)
87
+ end
88
+ elsif type == 'deleted'
89
+ puts "DB deleted: #{db}"
90
+ dbs_changes["#{couch}/#{db}"].stop();
91
+ end
92
+ end
93
+ end
94
+ end
95
+ end
96
+
97
+ # initial run
98
+ get("#{couch}/_all_dbs")
99
+ .select { |db| !db.start_with?("_") && !db.end_with?("_history")}
100
+ .each { |db|
101
+ puts "DB existed: #{couch}/#{db}"
102
+ r = put("#{couch}/#{db}_history",{});
103
+ puts "Create #{couch}/#{db}_history = #{r}"
104
+ dbs_changes["#{couch}/#{db}"] = Thread.new do
105
+ listen_changes(couch,db)
106
+ end
107
+ }
108
+
109
+ }
110
+
111
+ puts "Started"
112
+
113
+ while true
114
+ sleep 10
115
+ end
116
+
117
+ puts "Ended"
118
+
data/lib/couch_http.rb ADDED
@@ -0,0 +1,36 @@
1
+
2
+ require 'net/http'
3
+ require 'uri'
4
+ require 'json'
5
+
6
+ def get(uri)
7
+ JSON.parse(Net::HTTP.get(URI(uri)))
8
+ end
9
+
10
+ def put(uri,doc)
11
+ uri = URI.parse(uri)
12
+ header = {'Content-Type'=> 'application/json'}
13
+ http = Net::HTTP.new(uri.host, uri.port)
14
+ request = Net::HTTP::Put.new(uri.request_uri, header)
15
+ request.body = doc.to_json
16
+ response = http.request(request)
17
+ JSON.parse(response.body)
18
+ end
19
+
20
+ def post(uri,doc)
21
+ uri = URI.parse(uri)
22
+ header = {'Content-Type'=> 'application/json'}
23
+ http = Net::HTTP.new(uri.host, uri.port)
24
+ request = Net::HTTP::Post.new(uri.request_uri, header)
25
+ request.body = doc.to_json
26
+ response = http.request(request)
27
+ JSON.parse(response.body)
28
+ end
29
+
30
+ def delete(uri)
31
+ uri = URI.parse(uri)
32
+ http = Net::HTTP.new(uri.host, uri.port)
33
+ request = Net::HTTP::Delete.new(uri.request_uri)
34
+ response = http.request(request)
35
+ JSON.parse(response.body)
36
+ end
metadata ADDED
@@ -0,0 +1,52 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: couchdb-extras
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ prerelease:
6
+ platform: ruby
7
+ authors:
8
+ - Diogo Silva
9
+ autorequire:
10
+ bindir: bin
11
+ cert_chain: []
12
+ date: 2014-09-26 00:00:00.000000000 Z
13
+ dependencies: []
14
+ description: CouchDB to ElasticSearch, CouchDB to CouchDB History and CouchDB to CouchDB.
15
+ email: diogo@diogok.net
16
+ executables:
17
+ - couchdb2couchdb
18
+ - couchdb2history
19
+ - couchdb2elasticsearch
20
+ extensions: []
21
+ extra_rdoc_files: []
22
+ files:
23
+ - lib/couch_http.rb
24
+ - bin/couchdb2couchdb
25
+ - bin/couchdb2history
26
+ - bin/couchdb2elasticsearch
27
+ homepage: https://github.com/cncflora/couchdb-extras
28
+ licenses:
29
+ - MIT
30
+ post_install_message:
31
+ rdoc_options: []
32
+ require_paths:
33
+ - lib
34
+ required_ruby_version: !ruby/object:Gem::Requirement
35
+ none: false
36
+ requirements:
37
+ - - ! '>='
38
+ - !ruby/object:Gem::Version
39
+ version: '0'
40
+ required_rubygems_version: !ruby/object:Gem::Requirement
41
+ none: false
42
+ requirements:
43
+ - - ! '>='
44
+ - !ruby/object:Gem::Version
45
+ version: '0'
46
+ requirements: []
47
+ rubyforge_project:
48
+ rubygems_version: 1.8.23
49
+ signing_key:
50
+ specification_version: 3
51
+ summary: Small CouchDB utils
52
+ test_files: []