puppetdb_query 0.0.3 → 0.0.4
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/puppetdb_query/mongodb.rb +20 -17
- data/lib/puppetdb_query/operator.rb +0 -0
- data/lib/puppetdb_query/parser.rb +0 -0
- data/lib/puppetdb_query/puppetdb.rb +5 -5
- data/lib/puppetdb_query/sync.rb +48 -0
- data/lib/puppetdb_query/term.rb +0 -0
- data/lib/puppetdb_query/to_mongo.rb +0 -0
- data/lib/puppetdb_query/tokenizer.rb +0 -0
- data/lib/puppetdb_query/updater.rb +46 -11
- data/lib/puppetdb_query/version.rb +1 -1
- data/lib/puppetdb_query.rb +1 -0
- metadata +2 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: c33cfa1bbb82f1324f6716de9d17ed70308b35d2
|
4
|
+
data.tar.gz: 51b845298b9ded7a278ccf96481c96f2b287c936
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5f6c17658656b5452215c8f297bcd76ab4a48c1305e04b23e36e4fb9f1a504c5d0a9abe8256767b8295e8fb211a7d71c4ac5ccd35c8ae1b9acadb93c3f44cb35
|
7
|
+
data.tar.gz: 3eaeae64fda4338cf502e9bd2b4a304d24e6f975ff9c5310e74c0b91d6cc989d4fd5fee062f9a4df4ea6aa34a6bc2457a4b01b4c7a449121e8ba271d7eac2c3a
|
@@ -8,7 +8,7 @@ module PuppetDBQuery
|
|
8
8
|
include Logging
|
9
9
|
attr_reader :connection
|
10
10
|
attr_reader :nodes_collection
|
11
|
-
attr_reader :
|
11
|
+
attr_reader :node_properties_collection
|
12
12
|
attr_reader :meta_collection
|
13
13
|
|
14
14
|
# initialize access to mongodb
|
@@ -18,12 +18,12 @@ module PuppetDBQuery
|
|
18
18
|
#
|
19
19
|
# @param connection mongodb connection, should already be switched to correct database
|
20
20
|
# @param nodes symbol for collection that contains nodes with their facts
|
21
|
-
# @param
|
21
|
+
# @param node_properties symbol for collection for nodes with their update timestamps
|
22
22
|
# @param meta symbol for collection with update metadata
|
23
|
-
def initialize(connection, nodes = :nodes,
|
23
|
+
def initialize(connection, nodes = :nodes, node_properties = :node_properties, meta = :meta)
|
24
24
|
@connection = connection
|
25
25
|
@nodes_collection = nodes
|
26
|
-
@
|
26
|
+
@node_properties_collection = node_properties
|
27
27
|
@meta_collection = meta
|
28
28
|
end
|
29
29
|
|
@@ -89,18 +89,26 @@ module PuppetDBQuery
|
|
89
89
|
end
|
90
90
|
|
91
91
|
# update node properties
|
92
|
-
def node_properties_update(new_node_properties
|
93
|
-
collection = connection[
|
92
|
+
def node_properties_update(new_node_properties)
|
93
|
+
collection = connection[node_properties_collection]
|
94
94
|
old_names = collection.find.batch_size(999).projection(_id: 1).map { |k| k[:_id] }
|
95
95
|
delete = old_names - new_node_properties.keys
|
96
|
-
collection.insert_many(
|
96
|
+
collection.insert_many(new_node_properties.map { |k, v| v.dup.tap { v[:_id] = k } })
|
97
97
|
collection.delete_many(_id: { '$in' => delete })
|
98
|
-
|
98
|
+
end
|
99
|
+
|
100
|
+
# update or insert timestamps for given fact update method
|
101
|
+
def meta_fact_update(method, ts_begin, ts_end)
|
99
102
|
connection[meta_collection].find_one_and_update(
|
100
103
|
{},
|
101
104
|
{
|
102
105
|
'$set' => {
|
103
|
-
|
106
|
+
last_fact_update: {
|
107
|
+
ts_begin: ts_begin,
|
108
|
+
ts_end: ts_end,
|
109
|
+
method: method
|
110
|
+
},
|
111
|
+
method => {
|
104
112
|
ts_begin: ts_begin,
|
105
113
|
ts_end: ts_end
|
106
114
|
}
|
@@ -110,18 +118,13 @@ module PuppetDBQuery
|
|
110
118
|
)
|
111
119
|
end
|
112
120
|
|
113
|
-
# update or insert timestamps for
|
114
|
-
def
|
121
|
+
# update or insert timestamps for node_properties_update
|
122
|
+
def meta_node_properties_update(ts_begin, ts_end)
|
115
123
|
connection[meta_collection].find_one_and_update(
|
116
124
|
{},
|
117
125
|
{
|
118
126
|
'$set' => {
|
119
|
-
|
120
|
-
ts_begin: ts_begin,
|
121
|
-
ts_end: ts_end,
|
122
|
-
method: method
|
123
|
-
},
|
124
|
-
method => {
|
127
|
+
last_node_properties_update: {
|
125
128
|
ts_begin: ts_begin,
|
126
129
|
ts_end: ts_end
|
127
130
|
}
|
File without changes
|
File without changes
|
@@ -24,8 +24,8 @@ module PuppetDBQuery
|
|
24
24
|
api_nodes.map { |data| data['certname'] }
|
25
25
|
end
|
26
26
|
|
27
|
-
# get
|
28
|
-
def
|
27
|
+
# get hash of node update properties
|
28
|
+
def node_properties
|
29
29
|
result = {}
|
30
30
|
api_nodes.each do |data|
|
31
31
|
next if data['deactivated']
|
@@ -41,7 +41,7 @@ module PuppetDBQuery
|
|
41
41
|
# get all nodes that have updated facts
|
42
42
|
def nodes_update_facts_since(timestamp)
|
43
43
|
ts = (timestamp.is_a?(String) ? Time.iso8601(ts) : timestamp)
|
44
|
-
|
44
|
+
node_properties.delete_if do |_k, data|
|
45
45
|
# TODO: in '/v3/nodes' we must take 'facts_timestamp'
|
46
46
|
!data["facts-timestamp"] || Time.iso8601(data["facts-timestamp"]) < ts
|
47
47
|
end.keys
|
@@ -74,7 +74,7 @@ module PuppetDBQuery
|
|
74
74
|
|
75
75
|
def get_json(url, timeout)
|
76
76
|
@lock.synchronize do
|
77
|
-
logger.info "get json from #{url}"
|
77
|
+
logger.info " get json from #{url}"
|
78
78
|
uri = URI.parse(url)
|
79
79
|
http = Net::HTTP.new(uri.host, uri.port)
|
80
80
|
http.use_ssl = uri.scheme == 'https'
|
@@ -82,7 +82,7 @@ module PuppetDBQuery
|
|
82
82
|
request = Net::HTTP::Get.new(uri.request_uri)
|
83
83
|
request['Accept'] = "application/json"
|
84
84
|
response = http.request(request)
|
85
|
-
logger.info "
|
85
|
+
logger.info " got #{response.body.size} characters from #{url}"
|
86
86
|
JSON.parse(response.body)
|
87
87
|
end
|
88
88
|
end
|
@@ -0,0 +1,48 @@
|
|
1
|
+
require_relative 'logging'
|
2
|
+
require_relative 'updater'
|
3
|
+
|
4
|
+
module PuppetDBQuery
|
5
|
+
# sync node and fact data from source to destination
|
6
|
+
class Sync
|
7
|
+
include Logging
|
8
|
+
|
9
|
+
attr_reader :source
|
10
|
+
attr_reader :destination
|
11
|
+
|
12
|
+
def initialize(source, destination)
|
13
|
+
@source = source
|
14
|
+
@destination = destination
|
15
|
+
end
|
16
|
+
|
17
|
+
def sync(minutes = 5, seconds = 10)
|
18
|
+
logger.info "syncing puppetdb nodes and facts started"
|
19
|
+
Timeout.timeout(60 * minutes - seconds) do
|
20
|
+
updater = PuppetDBQuery::Updater.new(source, destination)
|
21
|
+
|
22
|
+
updater.update_node_properties
|
23
|
+
|
24
|
+
# make a full update
|
25
|
+
timestamp = Time.now
|
26
|
+
updater.update2
|
27
|
+
|
28
|
+
# make delta updates til our time is up
|
29
|
+
loop do
|
30
|
+
begin
|
31
|
+
ts = Time.now
|
32
|
+
updater.update3(timestamp - 2)
|
33
|
+
timestamp = ts
|
34
|
+
rescue
|
35
|
+
logger.error $!
|
36
|
+
end
|
37
|
+
logger.info "sleep for #{seconds} seconds"
|
38
|
+
sleep(seconds)
|
39
|
+
end
|
40
|
+
end
|
41
|
+
logger.info "syncing puppetdb nodes and facts ended"
|
42
|
+
rescue Timeout::Error
|
43
|
+
logger.info "syncing puppetdb nodes: now our time is up, we finsh"
|
44
|
+
rescue
|
45
|
+
logger.error $!
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
data/lib/puppetdb_query/term.rb
CHANGED
File without changes
|
File without changes
|
File without changes
|
@@ -17,19 +17,24 @@ module PuppetDBQuery
|
|
17
17
|
# update or insert facts for each one
|
18
18
|
#
|
19
19
|
# 335.6 seconds: update time for 1561 nodes
|
20
|
-
def
|
20
|
+
def update1
|
21
|
+
logger.info "update1 started (full update)"
|
22
|
+
tsb = Time.now
|
21
23
|
source_nodes = source.nodes
|
22
24
|
destination_nodes = destination.nodes
|
23
|
-
(destination_nodes
|
24
|
-
|
25
|
-
end
|
25
|
+
delete_missing(destination_nodes, source_nodes)
|
26
|
+
errors = false
|
26
27
|
source_nodes.each do |node|
|
27
28
|
begin
|
28
29
|
destination.node_update(node, source.node_facts(node))
|
29
30
|
rescue
|
31
|
+
errors = true
|
30
32
|
logging.error $!
|
31
33
|
end
|
32
34
|
end
|
35
|
+
tse = Time.now
|
36
|
+
logger.info "update1 updated #{source_nodes.size} nodes in #{tse - tsb}"
|
37
|
+
destination.meta_fact_update("update1", tsb, tse) unless errors
|
33
38
|
end
|
34
39
|
|
35
40
|
# update by deleting missing nodes and get a complete map of nodes with facts
|
@@ -37,19 +42,24 @@ module PuppetDBQuery
|
|
37
42
|
#
|
38
43
|
# 166.4 seconds: update time for 1561 nodes
|
39
44
|
def update2
|
45
|
+
logger.info "update2 started (full update)"
|
46
|
+
tsb = Time.now
|
40
47
|
source_nodes = source.nodes
|
41
48
|
destination_nodes = destination.nodes
|
42
|
-
(destination_nodes
|
43
|
-
|
44
|
-
end
|
49
|
+
delete_missing(destination_nodes, source_nodes)
|
50
|
+
errors = false
|
45
51
|
complete = source.facts
|
46
52
|
complete.each do |node, facts|
|
47
53
|
begin
|
48
54
|
destination.node_update(node, facts)
|
49
55
|
rescue
|
56
|
+
errors = true
|
50
57
|
logging.error $!
|
51
58
|
end
|
52
59
|
end
|
60
|
+
tse = Time.now
|
61
|
+
logger.info "update2 updated #{source_nodes.size} nodes in #{tse - tsb}"
|
62
|
+
destination.meta_fact_update("update2", tsb, tse) unless errors
|
53
63
|
end
|
54
64
|
|
55
65
|
# update by deleting missing nodes and getting a list of nodes
|
@@ -57,20 +67,45 @@ module PuppetDBQuery
|
|
57
67
|
#
|
58
68
|
# update time depends extremly on the number of changed nodes
|
59
69
|
def update3(last_update_timestamp)
|
70
|
+
logger.info "update3 started (incremental)"
|
71
|
+
tsb = Time.now
|
60
72
|
source_nodes = source.nodes
|
61
73
|
destination_nodes = destination.nodes
|
62
|
-
(destination_nodes
|
63
|
-
|
64
|
-
end
|
74
|
+
delete_missing(destination_nodes, source_nodes)
|
75
|
+
errors = false
|
65
76
|
modified = source.nodes_update_facts_since(last_update_timestamp)
|
66
77
|
modified.each do |node|
|
67
78
|
begin
|
68
79
|
destination.node_update(node, source.node_facts(node))
|
69
80
|
rescue
|
81
|
+
errors = true
|
70
82
|
logging.error $!
|
71
83
|
end
|
72
84
|
end
|
73
|
-
|
85
|
+
tse = Time.now
|
86
|
+
logger.info "update3 updated #{modified.size} nodes in #{tse - tsb}"
|
87
|
+
destination.meta_fact_update("update3", tsb, tse) unless errors
|
88
|
+
end
|
89
|
+
|
90
|
+
def update_node_properties
|
91
|
+
logger.info "update_node_properties started"
|
92
|
+
tsb = Time.now
|
93
|
+
source_node_properties = source.node_properties
|
94
|
+
destination.node_properties_update(source_node_properties)
|
95
|
+
tse = Time.now
|
96
|
+
logger.info "update_node_properties updated #{source_node_properties.size} nodes " \
|
97
|
+
"in #{tse - tsb}"
|
98
|
+
destination.meta_node_properties_update(tsb, tse)
|
99
|
+
end
|
100
|
+
|
101
|
+
private
|
102
|
+
|
103
|
+
def delete_missing(destination_nodes, source_nodes)
|
104
|
+
missing = destination_nodes - source_nodes
|
105
|
+
missing.each do |node|
|
106
|
+
destination.node_delete(node)
|
107
|
+
end
|
108
|
+
logger.info " deleted #{missing.size} nodes"
|
74
109
|
end
|
75
110
|
end
|
76
111
|
end
|
data/lib/puppetdb_query.rb
CHANGED
@@ -5,6 +5,7 @@ require_relative "puppetdb_query/tokenizer"
|
|
5
5
|
require_relative "puppetdb_query/parser"
|
6
6
|
require_relative "puppetdb_query/puppetdb"
|
7
7
|
require_relative "puppetdb_query/mongodb"
|
8
|
+
require_relative "puppetdb_query/sync"
|
8
9
|
require_relative "puppetdb_query/to_mongo"
|
9
10
|
require_relative "puppetdb_query/updater"
|
10
11
|
require_relative "puppetdb_query/version"
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: puppetdb_query
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.4
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Michael Meyling
|
@@ -100,6 +100,7 @@ files:
|
|
100
100
|
- lib/puppetdb_query/operator.rb
|
101
101
|
- lib/puppetdb_query/parser.rb
|
102
102
|
- lib/puppetdb_query/puppetdb.rb
|
103
|
+
- lib/puppetdb_query/sync.rb
|
103
104
|
- lib/puppetdb_query/term.rb
|
104
105
|
- lib/puppetdb_query/to_mongo.rb
|
105
106
|
- lib/puppetdb_query/tokenizer.rb
|