opensecret 0.0.951 → 0.0.957

Sign up to get free protection for your applications and to get access to all the features.
Files changed (34) hide show
  1. checksums.yaml +4 -4
  2. data/lib/extension/array.rb +29 -0
  3. data/lib/extension/string.rb +31 -0
  4. data/lib/factbase/facts.opensecret.io.ini +17 -9
  5. data/lib/notepad/blow.rb +108 -5
  6. data/lib/opensecret.rb +32 -6
  7. data/lib/plugins/cipher.rb +7 -7
  8. data/lib/plugins/ciphers/blowfish.rb +63 -157
  9. data/lib/plugins/usecase.rb +1 -1
  10. data/lib/plugins/usecases/init.rb +57 -116
  11. data/lib/plugins/usecases/lock.rb +178 -0
  12. data/lib/plugins/usecases/open.rb +17 -86
  13. data/lib/plugins/usecases/put.rb +137 -0
  14. data/lib/plugins/usecases/safe.rb +8 -10
  15. data/lib/session/attributes.rb +16 -11
  16. data/lib/session/dictionary.rb +191 -0
  17. data/lib/session/session.rb +80 -0
  18. data/lib/session/time.stamp.rb +89 -106
  19. data/lib/using.txt +100 -0
  20. data/lib/version.rb +1 -1
  21. metadata +6 -15
  22. data/lib/opensecret/commons/eco.faculty.rb +0 -364
  23. data/lib/opensecret/commons/eco.system.rb +0 -437
  24. data/lib/opensecret/commons/eco.systems.rb +0 -98
  25. data/lib/opensecret/factbase/hub-runtime.ini +0 -123
  26. data/lib/opensecret/factbase/known-hosts.ini +0 -75
  27. data/lib/opensecret/factbase/published.facts/blobbolicious-facts.ini +0 -553
  28. data/lib/opensecret/factbase/published.facts/credential-facts.ini +0 -40
  29. data/lib/opensecret/factbase/published.facts/infrastructure-facts.ini +0 -63
  30. data/lib/opensecret/factbase/readme.md +0 -24
  31. data/lib/opensecret/factbase/retired.facts/maven.database.ide.facts.ini +0 -127
  32. data/lib/opensecret/factbase/retired.facts/s3-upload-block-facts.ini +0 -17
  33. data/lib/opensecret/plugins.io/file/file.rb +0 -483
  34. data/lib/plugins/usecases/on.rb +0 -33
data/lib/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module OpenSecret
2
- VERSION = "0.0.951"
2
+ VERSION = "0.0.957"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: opensecret
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.951
4
+ version: 0.0.957
5
5
  platform: ruby
6
6
  authors:
7
7
  - Apollo Akora
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-02-26 00:00:00.000000000 Z
11
+ date: 2018-03-04 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: inifile
@@ -117,24 +117,12 @@ files:
117
117
  - lib/logging/gem.logging.rb
118
118
  - lib/notepad/blow.rb
119
119
  - lib/opensecret.rb
120
- - lib/opensecret/commons/eco.faculty.rb
121
- - lib/opensecret/commons/eco.system.rb
122
- - lib/opensecret/commons/eco.systems.rb
123
120
  - lib/opensecret/executors/crypt.keys/crypt.keys.ini
124
121
  - lib/opensecret/executors/crypt.keys/crypt.keys.rb
125
122
  - lib/opensecret/executors/decrypt/decrypt.ini
126
123
  - lib/opensecret/executors/decrypt/decrypt.rb
127
124
  - lib/opensecret/executors/encrypt/encrypt.ini
128
125
  - lib/opensecret/executors/encrypt/encrypt.rb
129
- - lib/opensecret/factbase/hub-runtime.ini
130
- - lib/opensecret/factbase/known-hosts.ini
131
- - lib/opensecret/factbase/published.facts/blobbolicious-facts.ini
132
- - lib/opensecret/factbase/published.facts/credential-facts.ini
133
- - lib/opensecret/factbase/published.facts/infrastructure-facts.ini
134
- - lib/opensecret/factbase/readme.md
135
- - lib/opensecret/factbase/retired.facts/maven.database.ide.facts.ini
136
- - lib/opensecret/factbase/retired.facts/s3-upload-block-facts.ini
137
- - lib/opensecret/plugins.io/file/file.rb
138
126
  - lib/opensecret/plugins.io/git/git.flow.rb
139
127
  - lib/plugins/cipher.rb
140
128
  - lib/plugins/ciphers/aes-256.rb
@@ -142,12 +130,15 @@ files:
142
130
  - lib/plugins/stores/store.rb
143
131
  - lib/plugins/usecase.rb
144
132
  - lib/plugins/usecases/init.rb
145
- - lib/plugins/usecases/on.rb
133
+ - lib/plugins/usecases/lock.rb
146
134
  - lib/plugins/usecases/open.rb
135
+ - lib/plugins/usecases/put.rb
147
136
  - lib/plugins/usecases/safe.rb
148
137
  - lib/session/attributes.rb
138
+ - lib/session/dictionary.rb
149
139
  - lib/session/fact.finder.rb
150
140
  - lib/session/require.gem.rb
141
+ - lib/session/session.rb
151
142
  - lib/session/time.stamp.rb
152
143
  - lib/session/user.home.rb
153
144
  - lib/using.txt
@@ -1,364 +0,0 @@
1
- #!/usr/bin/ruby
2
-
3
- # --
4
- # -- eco-system [provisioning] begins in earnest here. By making
5
- # -- a [super] call (at the beginning, middle or end) - eco-systems
6
- # -- can extend the functionality provided here.
7
- # --
8
- # -- To prevent this code running, child classes must provide their
9
- # -- own provision along an (optional) alternative implementation.
10
- # --
11
- class EcoFaculty
12
-
13
- attr_reader :eco_id_str
14
-
15
- #--
16
- #-- Get the folder path to the eco-system [plugin]
17
- #-- source directory.
18
- #--
19
- #-- Also see the plugin runtime directory which is
20
- #-- the runtime folder created by the method named
21
- #-- => EcoFaculty.instantiate_runtime
22
- #--
23
- #-- -------------
24
- #-- Dependency
25
- #-- -------------
26
- #--
27
- #-- The method :core_provisioning in the plugin must
28
- #-- exist and must have been "Required".
29
- #--
30
- def plugin_src_dir
31
-
32
- return File.dirname self.method(:core_provisioning).source_location.first
33
-
34
- end
35
-
36
-
37
- # --
38
- # -- Facts are the business of this eco-system behaviour class.
39
- # -- This provision method will collect the eco-system plugin id
40
- # -- and from that assimilate all known fact files.
41
- # --
42
- def provision
43
-
44
- @eco_id_str = SnapFlat.do self.class.name
45
- @eco_id_sym = @eco_id_str.gsub(".", "_").to_sym
46
- @plugin_path = plugin_src_dir
47
-
48
- FactTree.instance.instantiate @eco_id_str, @plugin_path
49
- FactTree.instance.assimilate_instance_facts
50
-
51
- instantiate_runtime FactTree.instance.f
52
-
53
- FactTree.instance.identify_my_workstation
54
- FactTree.instance.assimilate_station_facts
55
- FactTree.instance.assimilate_plugin_facts
56
- FactTree.instance.assimilate_general_facts
57
-
58
- @c = FactTree.instance.f
59
- @i = FactTree.instance.i
60
- @p = FactTree.instance.f[@eco_id_sym]
61
-
62
- # --
63
- # -- assimilate and configure aws cloud credentials
64
- # -- if the plugin fact [aws.creds.dir] exists.
65
- # --
66
- configure_aws_credentials if @c[:aws][:creds_exist?]
67
-
68
- end
69
-
70
-
71
- # --
72
- # -- Assimilate and configure aws cloud credentials.
73
- # -- Call this method only when it is certain that the
74
- # -- hub fact [aws.creds.dir] exists.
75
- # --
76
- # -- By convention this method expects the decrypted
77
- # -- AWS user credential INI file to be inside the above
78
- # -- dir with name <<plugin.id>>.aws.credentials.ini
79
- # --
80
- def configure_aws_credentials
81
-
82
- # --
83
- # -- Before the rubber can hit the road - we must
84
- # -- first read in the secret credentials. To avoid
85
- # -- logging the secrets we read it up into its own
86
- # -- isolated mini fact database.
87
- # --
88
- # -- The aws creds database lives just long enough
89
- # -- to programmatical configure the AWS IAM user.
90
- # --
91
- FactTree.instance.assimilate_ini_file @c[:aws][:creds_path]
92
- aws_creds = FactTree.instance.f
93
- log.info(ere) { "AWS Region Key => #{aws_creds[@eco_id_sym][:aws_region_key]}" }
94
-
95
- # --
96
- # -- Now the rubber can hit the road and we configure
97
- # -- the AWS Ruby SDK with the IAM credentials that
98
- # -- have been recovered.
99
- # --
100
- Aws.use_bundled_cert!
101
- Aws.config.update({
102
- :access_key_id => aws_creds[@eco_id_sym][:aws_access_key],
103
- :secret_access_key => aws_creds[@eco_id_sym][:aws_secret_key],
104
- :region => aws_creds[@eco_id_sym][:aws_region_key]
105
- })
106
-
107
- # --
108
- # -- Temporarily add the AWS credentials to the environment
109
- # -- variables so that infrastructure provisioning tools like
110
- # -- Terraform can access the cloud cedentials.
111
- # --
112
- EnvVar.new.write_env_var "AWS_ACCESS_KEY_ID", aws_creds[@eco_id_sym][:aws_access_key]
113
- EnvVar.new.write_env_var "AWS_SECRET_ACCESS_KEY", aws_creds[@eco_id_sym][:aws_secret_key]
114
- EnvVar.new.write_env_var "AWS_DEFAULT_REGION", aws_creds[@eco_id_sym][:aws_region_key]
115
-
116
- end
117
-
118
-
119
- # --
120
- # -- This key (instantiation) behaviour
121
- # --
122
- # -- 1 - [CREATES] the runtime plugin folder [then]
123
- # -- 2 - [COPIES] into it files from the plugin source folder [and]
124
- # -- 3 - [NOTIFY] (point) log utility to new (plugin dir) logfile
125
- # --
126
- # -- Before continuing the fact assimilation process we now
127
- # -- know enough to instantiate the plugin runtime folder and
128
- # -- start logging to a file within it.
129
- # --
130
- def instantiate_runtime core_db
131
-
132
- Throw.if_exists core_db[:runtime][:dir]
133
-
134
- # --
135
- # -- Create the new runtime directory then
136
- # -- Copy asset templates into it.
137
- # -- Note that these templates contain unresolved fact directives.
138
- # --
139
- FileUtils.mkdir_p core_db[:runtime][:dir] unless File.exists? core_db[:runtime][:dir]
140
- FileUtils.copy_entry @plugin_path, core_db[:runtime][:dir]
141
- FactTree.instance.add_dir_to_identity core_db[:runtime][:dir]
142
-
143
- # --
144
- # -- Evaluate the absolute [logfile path] within
145
- # -- the eco system instance runtime directory.
146
- # --
147
- # -- Then tell the logging.rb MIXIN that the new logfile
148
- # -- at (logfile_path) is ready to be used.
149
- # -- This MIXIN affects all classes that have log write statements
150
- # --
151
- logfile_path = File.join core_db[:runtime][:dir], "#{core_db[:runtime][:dirname]}.log"
152
- log.info(ere) { "[LAST] - The last logging statement going to STDOUT but NOT logfile." }
153
- set_logfile_path logfile_path
154
- log.info(ere) { "[FIRST] - The first log going to [both] STDOUT and a logfile." }
155
- log.info(ere) { "logfile => #{logfile_path}" }
156
- log.info(ere) { "Log Level is now set @ => #{log.level.to_s}" }
157
-
158
- end
159
-
160
-
161
- # --
162
- # -- Write the entirery of command line options into
163
- # -- the fact database as an array.
164
- # --
165
- # -- Group Symbol => :cmd_line
166
- # -- Key Symbol => :options
167
- # -- Value Type => (Array of Strings)
168
- # --
169
- #### def read_cmdline_facts
170
-
171
- #### puts
172
- #### puts "# -- ------------------ -------------------------- --- "
173
- #### puts "# -- Command Line Facts -------------------------- --- "
174
- #### puts "# -- ------------------ -------------------------- --- "
175
- #### pp CmdLine.instance.args_cache
176
- #### puts "# -- ------------------ -------------------------- --- "
177
- #### puts
178
-
179
- #### @fact_db.add_fact :cmd_line, :options, CmdLine.instance.args_cache
180
-
181
- #### end
182
-
183
-
184
- # --
185
- # -- Scan every file in the eco plugin directory in search of
186
- # -- fact placeholder keys contained in the fact database.
187
- # --
188
- # -- Once found, replace them with the corresponding value.
189
- # --
190
- # -- -----------
191
- # -- Warning
192
- # -- -----------
193
- # -- This behaviour is [NOT RECURSIVE].
194
- # -- Only files in the folder are examined.
195
- # -- Folders in the folder are ignored.
196
- # --
197
- def replace_placeholders
198
-
199
- Files.find_replace Refactor.flatten(@c), @c[:runtime][:dir]
200
-
201
- end
202
-
203
-
204
- ## --- ---------------------------------------------- --- #
205
- ## --- Return the factthat is --- #
206
- ## --- [1] under the current eco plugin banner --- #
207
- ## --- [2] has the key symbol in the parameter --- #
208
- ## --- ---------------------------------------------- --- #
209
- def get_eco_fact key_symbol
210
- error_msg = "fact 4 group => #{@eco_id_sym} + key => #{key_symbol} not in db."
211
- raise ArgumentError.new "\n\n#{error_msg}\n\n" unless eco_fact_exists? key_symbol
212
- return @c[@eco_id_sym][key_symbol]
213
- end
214
-
215
-
216
- ## --- ---------------------------------------------- --- #
217
- ## --- Return the fact that is --- #
218
- ## --- [1] under the current eco plugin banner --- #
219
- ## --- [2] has the key symbol in the parameter --- #
220
- ## --- ---------------------------------------------- --- #
221
- def e_fact key_symbol
222
- error_msg = "fact 4 group => #{@eco_id_sym} + key => #{key_symbol} not in db."
223
- raise ArgumentError.new "\n\n#{error_msg}\n\n" unless eco_fact_exists? key_symbol
224
- return @c[@eco_id_sym][key_symbol]
225
- end
226
-
227
-
228
- ## --- ---------------------------------------------- --- #
229
- ## --- Return the fact that is --- #
230
- ## --- [1] under the current eco plugin banner --- #
231
- ## --- [2] has the key symbol in the parameter --- #
232
- ## --- ---------------------------------------------- --- #
233
- def plugin_fact key_symbol
234
- error_msg = "fact 4 group => #{@eco_id_sym} + key => #{key_symbol} not in db."
235
- raise ArgumentError.new "\n\n#{error_msg}\n\n" unless eco_fact_exists? key_symbol
236
- return @c[@eco_id_sym][key_symbol]
237
- end
238
-
239
-
240
- # --
241
- # -- Return [true] if a fact exists that is
242
- # --
243
- # -- [1] under the banner of the current plugin id
244
- # -- [2] with a key symbol matching the parameter
245
- # -- [3] is neither empty nor solely whitespace
246
- # --
247
- def plugin_fact_exists? key_symbol
248
- return db_fact_exists? @eco_id_sym, key_symbol
249
- end
250
-
251
-
252
- ## --- ------------------------------------------------------ --- #
253
- ## --- Return [true] if a fact exists that is --- #
254
- ## --- [1] under the banner of the current plugin id --- #
255
- ## --- [2] with a key symbol matching the parameter --- #
256
- ## --- [3] is neither empty nor solely whitespace --- #
257
- ## --- ------------------------------------------------------ --- #
258
- def eco_fact_exists? key_symbol
259
- return db_fact_exists? @eco_id_sym, key_symbol
260
- end
261
-
262
-
263
- ## --- ---------------------------------------------------------- --- #
264
- ## --- Return [true] if a fact exists that --- #
265
- ## --- [1] DOES NOT ASSUME FACT IS A STRING
266
- ## --- [1] carries the primary symbol key in 1st parameter --- #
267
- ## --- [2] carries the secondary symbol key in 2nd parameter --- #
268
- ## --- [3] is neither empty nor solely whitespace --- #
269
- ## --- ---------------------------------------------------------- --- #
270
- def db_fact_exists? group_symbol, key_symbol
271
- return false unless @c.has_key? group_symbol
272
- fact_value = @c[group_symbol][key_symbol]
273
- return !fact_value.nil?
274
- end
275
-
276
-
277
- ## --- [1] ASSUMES THAT FACT IS A STRING --- #
278
- def string_fact_exists? group_symbol, key_symbol
279
- return false unless @c.has_key? group_symbol
280
- fact_value = @c[group_symbol][key_symbol]
281
- return false if fact_value.nil?
282
- return fact_value.strip.length > 0
283
- end
284
-
285
-
286
- # --
287
- # -- If eco plugins wish to update only [SOME] of the properties
288
- # -- within [application.properties] then they should set
289
- # --
290
- # -- fact key => @c[@c[:plugin][:id]][:props_src_path]
291
- # -- fact val => "/path/to/application.properties
292
- # --
293
- # -- The property file will be assimilated and the key / value
294
- # -- pairs will be attached under the :app_properties grouping
295
- # -- symbol. One o more facts can be updated (overwritten) and
296
- # -- later written back (see write_app_properties)
297
- # --
298
- def read_properties
299
-
300
- return unless eco_fact_exists? :props_src_path
301
- @fact_db.assimilate_property_file e_fact(:props_src_path), :app_properties
302
-
303
- end
304
-
305
-
306
- # --
307
- # -- If eco plugins wish the properties in the database under
308
- # -- :app_properties to be written to create (or overwrite)
309
- # -- the app properties they should create
310
- # --
311
- # -- fact key => @c[@c[:plugin][:id]][:props_dst_dirs]
312
- # -- fact val => "/path/to/application.properties
313
- # --
314
- # -- ----------------
315
- # -- Warning (Array)
316
- # -- ----------------
317
- # -- The [:props_dst_dirs] fact is an [ARRAY].
318
- # -- The same property key/value pairs will be serialized into
319
- # -- each file url stated in the array. The [:props_dst_dirs]
320
- # -- is (in contrast) a simple single filename.
321
- # --
322
- # -- -----------------------------
323
- # -- (Optional) - Src Properties
324
- # -- -----------------------------
325
- # -- Plugins do not need to set the :app_props_src_path key.
326
- # -- If they only set the (dst) key, any properties set in the
327
- # -- fact database will still be written out.
328
- # --
329
- def write_properties
330
-
331
- return unless eco_fact_exists? :props_dst_dirs
332
- property_files = e_fact :props_dst_dirs
333
-
334
- property_files.each do | property_dir |
335
-
336
- Dir.mkdir property_dir unless File.exists? property_dir
337
- Files.write_properties(
338
- @c[:app_properties],
339
- property_dir,
340
- e_fact(:props_dst_name)
341
- )
342
-
343
- end
344
-
345
- end
346
-
347
-
348
- def read_block_facts module_path, method_name, grp_sym, key_sym, key_val
349
-
350
- # -- ---------------------------------------------------------- -- #
351
- # -- Derive name of block db ini file using id and method name. -- #
352
- # -- ---------------------------------------------------------- -- #
353
- block_filepath = FactLocator.block_factdb_path module_path, method_name
354
- Throw.if_not_exists block_filepath
355
-
356
- # -- ----------------------------------------------------------- -- #
357
- # -- Initialize block database building upon the wider scoped @e -- #
358
- # -- ----------------------------------------------------------- -- #
359
- FactReader.new @c, block_filepath, grp_sym, key_sym, key_val
360
-
361
- end
362
-
363
-
364
- end
@@ -1,437 +0,0 @@
1
- #!/usr/bin/ruby
2
-
3
- # --- --------------------------------------------------------------------------------------- -- #
4
- # --- Build the [services eco-system]. The app sits at the centre of the services eco-system. -- #
5
- # --- Everything that is done -> is done for (because of, to, in spite of) the [application]. -- #
6
- # --- --------------------------------------------------------------------------------------- -- #
7
- # --- --------------------------------------------------------------------------- --- #
8
- # --- The [eco service folder] contains the templates, scripts and configuration. --- #
9
- # --- By convention the folder name (off prj root) matches the name of the class. --- #
10
- # --- --------------------------------------------------------------------------- --- #
11
- # --- Example => ProvisionMongoDb assets are provision.mongo.db --- #
12
- # --- --------------------------------------------------------------------------- --- #
13
- # --- By Convention --- #
14
- # --- Ruby Class => EcoAppServer --- #
15
- # --- is Found in File => eco.system.plugins/eco.app.server.rb --- #
16
- # --- Has Assets in => provision.app.server/ --- #
17
- # --- and Inherits from => ProvisionEcoService --- #
18
- # --- Found in File => provision.services/provision.eco.service.rb --- #
19
- # --- --------------------------------------------------------------------------- --- #
20
- class EcoSystem
21
-
22
- # -- -------------------------------------------------------------- -- #
23
- # -- eco-system [provisioning] begins in earnest here. By making -- #
24
- # -- a [super] call (at the beginning, middle or end) - eco-systems -- #
25
- # -- can extend the functionality provided here. -- #
26
- # -- -------------------------------------------------------------- -- #
27
- # -- To prevent this code running, child classes must provide their -- #
28
- # -- own provision along an (optional) alternative implementation. -- #
29
- # -- -------------------------------------------------------------- -- #
30
- def provision
31
-
32
- super
33
-
34
- pre_provisioning # --> Do work to gather key provisioning facts
35
- replace_placeholders # --> Replace key facts in files within the eco folder
36
-
37
- core_provisioning # --> Do the heavy lifting 4 provisioning the eco service
38
-
39
- overwrite_lines # --> Replace pinpointed lines that include a string set.
40
- replace_placeholders # --> Replace xtra key facts to prep 4 post provisioning.
41
- post_provisioning # --> Notifying service dependents is usually done here.
42
-
43
- end
44
-
45
- ## --- ----------------------------------------------------------------------------- --- #
46
- ## --- Provision the services eco-system (universe) with the app as the focal point. --- #
47
- ## --- ----------------------------------------------------------------------------- --- #
48
- def pre_provisioning
49
-
50
- read_properties
51
- inject_reusables
52
-
53
- end
54
-
55
-
56
- # --
57
- # -- Implements service discovery for the provisioned eco-system services.
58
- # --
59
- def post_provisioning
60
-
61
- execute_scripts
62
- s3_upload
63
- s3_synchronize
64
- write_properties
65
- sync_2s3_bucket
66
-
67
- end
68
-
69
-
70
- #--
71
- #-- Get eco-system reusable directory filepaths within
72
- #-- an array.
73
- #--
74
- #-- The two known directories are
75
- #--
76
- #-- [1] - reusable.scripts
77
- #-- [2] - reusable.templates
78
- #--
79
- #--
80
- def self.reusable_buckets
81
-
82
- project_basedir = File.dirname( File.dirname( __FILE__ ) )
83
- reusable_buckets = Array.new
84
-
85
- reusable_buckets.push( File.join(project_basedir, "reusable.scripts") )
86
- reusable_buckets.push( File.join(project_basedir, "reusable.templates") )
87
-
88
- return reusable_buckets
89
-
90
- end
91
-
92
-
93
- # --
94
- # -- Gather the reusable [file] resources from the directoy bucket
95
- # -- arrays that are declared to hold these assets.
96
- # --
97
- # -- The reusables are gathered only if the plugin declares a fact
98
- # -- called [:reusables] that is an array of simple filenames.
99
- # --
100
- # -- This method does a recursive search to find and then copy over
101
- # -- these reusable files into the runtime directory.
102
- # --
103
- # -- ------------------------------
104
- # -- Constraint - Duplicate Names
105
- # -- ------------------------------
106
- # --
107
- # -- Duplicate asset filenames introduce ambiguity in as far as
108
- # -- reusable assets are concerned. Therefore an error will be
109
- # -- raised if this situation arises.
110
- # --
111
- def inject_reusables
112
-
113
- return unless eco_fact_exists?(:inventory) || eco_fact_exists?(:runnables)
114
- files_map = Files.in_folders EcoSystem.reusable_buckets
115
- reusables = e_fact(:inventory).merge( e_fact(:runnables) )
116
- reusables.each do |source_name, target_name|
117
-
118
- error_1 = "Cannot find reusable [#{source_name}].\n\n#{files_map.inspect}"
119
- raise ArgumentError.new error_1 unless files_map.has_key? source_name
120
- log.info(ere) {"Copying reusable #{source_name} => to => #{target_name}"}
121
- source_file = File.join files_map[source_name], source_name
122
- target_file = File.join @c[:runtime][:dir], target_name
123
- log.info(ere) {"Source DevOps Asset => #{nickname source_file}"}
124
- log.info(ere) {"Target DevOps Asset => #{nickname target_file}"}
125
-
126
- FileUtils.cp source_file, target_file
127
-
128
- end
129
-
130
- end
131
-
132
-
133
- # --
134
- # -- Use the remote host instantiated for the eco plugin.
135
- # -- Upload the plugin folder and run the reusables.
136
- # --
137
- def execute_scripts
138
-
139
- return unless eco_fact_exists? :runnables
140
-
141
- log.info(ere) { "[collate] ---------------------------------------- --- #" }
142
- log.info(ere) { "[collate] collate will upload and execute scripts. --- #" }
143
- log.info(ere) { "[collate] ---------------------------------------- --- #" }
144
- log.info(ere) { "#{pp e_fact(:runnables).values}" }
145
- log.info(ere) { "[collate] ---------------------------------------- --- #" }
146
-
147
- install_dos2unix = "sudo apt-get install -y dos2unix"
148
- plugin_host = @c[:machine][:host_class]
149
- plugin_host.runtime_dir = @c[:runtime][:dir]
150
-
151
- plugin_host.execute_cmd install_dos2unix
152
- plugin_host.upload_folder @c[:runtime][:dstname], @c[:runtime][:dir]
153
-
154
-
155
- e_fact(:runnables).each_value do | script_name |
156
-
157
- script_path = @c[:runtime][:dstname] + "/" + @c[:runtime][:dirname] + "/" + script_name
158
-
159
- cmd1 = "chmod u+x " + script_path
160
- cmd2 = "dos2unix " + script_path
161
- cmd3 = script_path
162
-
163
- #### plugin_host.execute_ansible_cmd @c[:runtime][:dir]
164
- #### exit
165
-
166
-
167
- plugin_host.execute_cmd cmd1
168
- plugin_host.execute_cmd cmd2
169
- plugin_host.execute_cmd cmd3
170
-
171
- end
172
-
173
- plugin_host.log_remote_host
174
-
175
- end
176
-
177
-
178
- # -- ----------------------------------------------------- -- #
179
- # -- sync folder with s3 bucket under certain conditions. -- #
180
- # -- ----------------------------------------------------- -- #
181
- # -- Sync Conditions -- #
182
- # -- ----------------------------------------------------- -- #
183
- # -- [1] - running in a unix environment -- #
184
- # -- [2] - key [s3sync.bucket.name] exists -- #
185
- # -- [3] - key [s3sync.path.offset] exists -- #
186
- # -- [4] - s3 bucket exists and is writeable -- #
187
- # -- [5] - local dir exists and is readable -- #
188
- # -- -- #
189
- # -- ----------------------------------------------------- -- #
190
- # -- Dependencies and Assumptions -- #
191
- # -- ----------------------------------------------------- -- #
192
- # -- the aws iam environment variables are set -- #
193
- # -- the s3 bucket specified exists and is writable -- #
194
- # -- the s3 bucket contents are deletable -- #
195
- # -- local path offset off [plugin folder] exists -- #
196
- # -- the [awscli] apt-get package is installed -- #
197
- # -- ----------------------------------------------------- -- #
198
- def s3_synchronize
199
-
200
- return if Gem.win_platform?
201
- return unless eco_fact_exists? :s3sync_bucket
202
- return unless eco_fact_exists? :s3sync_folder
203
-
204
- log.info(ere) { "[s3 sync] -------------------------------------------- --- #" }
205
- log.info(ere) { "[s3 sync] eco plugin running on a non-windows platform --- #" }
206
- log.info(ere) { "[s3 sync] with s3 sync parameters available. --- #" }
207
- log.info(ere) { "[s3 sync] -------------------------------------------- --- #" }
208
-
209
- AwsS3.instance.log_bucket_summary
210
- AwsS3.instance.sync_with_s3 e_fact(:s3sync_bucket), e_fact(:s3sync_folder)
211
- AwsS3.instance.log_bucket_summary
212
-
213
- end
214
-
215
-
216
- # --
217
- # -- [SYNC] a local folder with a given S3 bucket at a particular
218
- # -- folder offset location, with a specific set of sync options.
219
- # --
220
- # -- This behaviour is driven by a (plugin.id).s3.sync.spec.json
221
- # -- specification file that states
222
- # --
223
- # -- [1] - the source folder whose contents will be sync'd up
224
- # -- [2] - the S3 bucket name into which to sync the contents
225
- # -- [3] - the S3 folder path offset (within the S3 bucket)
226
- # -- [4] - sync options like delete, size-only, acl and more
227
- # --
228
- def sync_2s3_bucket
229
-
230
- return unless @c.has_key?(:s3_sync) && File.exists?(@c[:s3_sync][:spec_filepath])
231
-
232
- AwsS3.instance.log_bucket_summary
233
-
234
- sync_directives = JSON.parse(
235
- File.read(@c[:s3_sync][:spec_filepath]),
236
- object_class: OpenStruct
237
- )
238
-
239
- sync_directives.each do | sync_directive |
240
-
241
- log.info(ere) { "[sync] ############################################################### ### #" }
242
- log.info(ere) { "[sync] --------------------------------------------------------------- --- #" }
243
- log.info(ere) { "[sync] sync-ing local folder to s3 bucket [#{sync_directive.s3_bucket_name}]" }
244
- log.info(ere) { "[sync] --------------------------------------------------------------- --- #" }
245
- log.info(ere) { "[sync] sync source folder => #{sync_directive.local_folder}" }
246
- log.info(ere) { "[sync] source bucket name => #{sync_directive.s3_bucket_name}" }
247
- log.info(ere) { "[sync] mirror bucket name => #{sync_directive.bucket_b4_name}" }
248
- log.info(ere) { "[sync] bucket offset path => #{sync_directive.offset_path}" }
249
- log.info(ere) { "[sync] sync options array => #{sync_directive.sync_options}" }
250
- log.info(ere) { "[sync] --------------------------------------------------------------- --- #" }
251
-
252
- # --
253
- # -- Is it worthwhile to copy between S3 buckets first
254
- # -- before sync-ing up the local folder?
255
- # --
256
- # -- We deem it yes if (and only if)
257
- # --
258
- # -- a) the to-sync folder is over [10MB]
259
- # -- b) a bucket_b4_name has been specified
260
- # -- c) the folder to sync does [NOT] exist.
261
- # -- d) the b4 folder [DOES] exist.
262
- # --
263
- # -- If so a S3 [bucket] to [bucket] mirror/copy may
264
- # -- dramatically reduce sync time.
265
- # --
266
- AwsS3.instance.copy_folder_between_buckets(
267
- sync_directive.bucket_b4_name,
268
- sync_directive.s3_bucket_name,
269
- sync_directive.offset_path
270
- ) if copy_b4_sync_worthwhile?( sync_directive )
271
-
272
-
273
- AwsS3.instance.sync_local_to_s3(
274
- sync_directive.local_folder,
275
- sync_directive.s3_bucket_name,
276
- sync_directive.offset_path,
277
- sync_directive.sync_options
278
- )
279
-
280
- end
281
-
282
- AwsS3.instance.log_bucket_summary
283
-
284
- end
285
-
286
-
287
- # --
288
- # -- [COPY] from another s3 bucket [B4 SYNC] if [WORTHWHILE]
289
- # --
290
- # -- Once a month (or week) performance may be gained by copying
291
- # -- from the previous s3 bucket before sync-ing the local folder.
292
- # --
293
- # -- The first [backup] of the new month/week/day is a full backup
294
- # -- of a local folder to up-sync. This can take a lot of time for
295
- # -- a say [7Gig] folder holding many little files.
296
- # --
297
- # -- -------------------
298
- # -- S3 to S3 Mirror
299
- # -- -------------------
300
- # -- If we copy (mirror) the previous S3 bucket folder before the
301
- # -- sync we gain much in performance because S3 to S3 copying is
302
- # -- super fast - then just the delta is sync'd up.
303
- # --
304
- # -- -------------------------------
305
- # -- Pre-Conditions - Copy B4 Sync
306
- # -- -------------------------------
307
- # --
308
- # -- They copy/mirror before sync will occur when the
309
- # --
310
- # -- 1 - [sync_options.copy_b4_sync_if] flag is [true]
311
- # -- 2 - to sync S3 folder (not bucket) does NOT exist
312
- # -- 3 - previous periods (month/week..) folder exists
313
- # --
314
- # -- -------------
315
- # -- Assumptions
316
- # -- -------------
317
- # -- Currently assumes the period is ALWAYS [monthly].
318
- # -- Change this to cater for
319
- # -- [ hourly, daily, weekly, monthly, quarterly, yearly ]
320
- # --
321
- def copy_b4_sync_worthwhile? sync_attr
322
-
323
- return false if sync_attr.bucket_b4_name.nil?
324
-
325
- sync_folder_exists =
326
- AwsS3.instance.bucket_folder_contains_something?(
327
- sync_attr.s3_bucket_name,
328
- sync_attr.offset_path
329
- )
330
-
331
- return false if sync_folder_exists
332
-
333
- b4_folder_exists =
334
- AwsS3.instance.bucket_folder_contains_something?(
335
- sync_attr.bucket_b4_name,
336
- sync_attr.offset_path
337
- )
338
-
339
- return b4_folder_exists
340
-
341
- end
342
-
343
-
344
- # --- ---------------------------------------------------------------------------- --- #
345
- # --- Any file in the eco folder whose name starts with [:s3][:upload_prefix] gets --- #
346
- # --- uploaded to the S3 provisioning folder (in monthly bucket). Then the url --- #
347
- # --- is written into the app properties database with a key that is the remaining --- #
348
- # --- filename after the preceeding s3 prefix is removed and subsequently appended --- #
349
- # --- appended with the string ".url" --- #
350
- # --- ---------------------------------------------------------------------------- --- #
351
- def s3_upload
352
-
353
- log.info(ere) { "[s3 upload] examing files in #{@c[:runtime][:dir]}" }
354
-
355
- # -- ------------------------------------------------------------------ -- #
356
- # -- Scan folder for files whose names begin with the s3 upload prefix. -- #
357
- # -- ------------------------------------------------------------------ -- #
358
- Dir.foreach( @c[:runtime][:dir] ) do | file_name |
359
-
360
- file_path = File.join @c[:runtime][:dir], file_name
361
- next if File.directory? file_path
362
- next unless file_name.start_with? @c[:s3][:upload_prefix]
363
-
364
- read_block_facts __FILE__, __method__, :upload, :src_file_name, file_name
365
- Dir.mkdir @c[:s3][:uploads_dir] unless File.exists? @c[:s3][:uploads_dir]
366
- next if File.exists? @c[:upload][:dst_file_path]
367
-
368
- FileUtils.cp @c[:upload][:src_file_path], @c[:upload][:dst_file_path]
369
-
370
- AwsS3.instance.log_bucket_summary
371
-
372
- log.warn(ere) { "Warning - Not uploading to S3 bucket = File ==| #{@c[:upload][:dst_file_path]}" }
373
- log.warn(ere) { "Warning - Not adding S3 resource URL fact to app_properties fact group." }
374
- ##### === =============================================================================================
375
- ##### === Commenting this prevents uploading any file with the s3put tag.
376
- ##### === =============================================================================================
377
- ##### === s3_url = AwsS3.instance.upload_to_s3 @c[:s3][:bucket_name], @c[:upload][:dst_file_path]
378
- ##### === @c.add_fact :app_properties, @c[:upload][:app_props_key], s3_url
379
- ##### === =============================================================================================
380
-
381
- end
382
-
383
- end
384
-
385
-
386
- # --
387
- # -- [FIND] lines that include a set of configured strings and
388
- # -- [REPLACE] then with the configured alternative.
389
- # --
390
- # -- This behaviour is driven by a (plugin.id).line.replace.json
391
- # -- configuration file that states
392
- # --
393
- # -- [1] - the target file to change
394
- # -- [2] - the array of words to match each line against
395
- # -- [3] - new line replacing old if all the words match
396
- # --
397
- # -- -----------------------------------
398
- # -- [Pre-Conditions] => Only act when
399
- # -- -----------------------------------
400
- # --
401
- # -- 1. plugin dir has a json [targetting] configuration file
402
- # --
403
- # -- ---------------------------------
404
- # -- [Dependencies and Assumptions]
405
- # -- ---------------------------------
406
- # --
407
- # -- 1. json file is formatted with below keys (and value types)
408
- # --
409
- # -- - replace_file_path : value type => String
410
- # -- - line_search_strings : value type => Array of Strings
411
- # -- - replace_with_string : value type => String
412
- # --
413
- # -- 2. every file specified exists and is readable + writeable
414
- # --
415
- def overwrite_lines
416
-
417
- return unless File.exists? @c[:overwrite][:spec_filepath]
418
-
419
- pointers = JSON.parse(
420
- File.read(@c[:overwrite][:spec_filepath]),
421
- object_class: OpenStruct
422
- )
423
-
424
- pointers.each do | pinpoint |
425
-
426
- Files.find_replace_lines(
427
- pinpoint.replace_file_path,
428
- pinpoint.line_search_strings,
429
- pinpoint.replace_with_string
430
- )
431
-
432
- end
433
-
434
- end
435
-
436
-
437
- end