opensecret 0.0.951 → 0.0.957

Sign up to get free protection for your applications and to get access to all the features.
Files changed (34) hide show
  1. checksums.yaml +4 -4
  2. data/lib/extension/array.rb +29 -0
  3. data/lib/extension/string.rb +31 -0
  4. data/lib/factbase/facts.opensecret.io.ini +17 -9
  5. data/lib/notepad/blow.rb +108 -5
  6. data/lib/opensecret.rb +32 -6
  7. data/lib/plugins/cipher.rb +7 -7
  8. data/lib/plugins/ciphers/blowfish.rb +63 -157
  9. data/lib/plugins/usecase.rb +1 -1
  10. data/lib/plugins/usecases/init.rb +57 -116
  11. data/lib/plugins/usecases/lock.rb +178 -0
  12. data/lib/plugins/usecases/open.rb +17 -86
  13. data/lib/plugins/usecases/put.rb +137 -0
  14. data/lib/plugins/usecases/safe.rb +8 -10
  15. data/lib/session/attributes.rb +16 -11
  16. data/lib/session/dictionary.rb +191 -0
  17. data/lib/session/session.rb +80 -0
  18. data/lib/session/time.stamp.rb +89 -106
  19. data/lib/using.txt +100 -0
  20. data/lib/version.rb +1 -1
  21. metadata +6 -15
  22. data/lib/opensecret/commons/eco.faculty.rb +0 -364
  23. data/lib/opensecret/commons/eco.system.rb +0 -437
  24. data/lib/opensecret/commons/eco.systems.rb +0 -98
  25. data/lib/opensecret/factbase/hub-runtime.ini +0 -123
  26. data/lib/opensecret/factbase/known-hosts.ini +0 -75
  27. data/lib/opensecret/factbase/published.facts/blobbolicious-facts.ini +0 -553
  28. data/lib/opensecret/factbase/published.facts/credential-facts.ini +0 -40
  29. data/lib/opensecret/factbase/published.facts/infrastructure-facts.ini +0 -63
  30. data/lib/opensecret/factbase/readme.md +0 -24
  31. data/lib/opensecret/factbase/retired.facts/maven.database.ide.facts.ini +0 -127
  32. data/lib/opensecret/factbase/retired.facts/s3-upload-block-facts.ini +0 -17
  33. data/lib/opensecret/plugins.io/file/file.rb +0 -483
  34. data/lib/plugins/usecases/on.rb +0 -33
@@ -1,40 +0,0 @@
1
-
2
- # --
3
- # -- If a plugin requires AWS IAM user credentials you must
4
- # -- define the directory the credentials are in within the
5
- # -- [plugin+platform] fact file.
6
- # --
7
- # -- A credentials INI factfile is expected to exist within
8
- # -- that directory with the name
9
- # --
10
- # -- => [:eco][id] + ".aws.credentials.ini"
11
- # -- => ci.hub.aws.credentials.ini [(if id is ci.hub)]
12
- # --
13
- # -- To specify AWS IAM credentials for a plugin, you
14
- # --
15
- # -- [1] specify [:aws][:keys_folder] in [plugin+platform]
16
- # -- [2] put <<eco.id>>.aws.credentials.ini inside folder
17
- # --
18
- # -- The format of the credentials file for a plugin ID of
19
- # -- (ci.hub) named [ci.hub.aws.credentials.ini] would be
20
- # --
21
- # -- [ci.hub]
22
- # -- aws.access.key = AKIA12345
23
- # -- aws.secret.key = abc123secret
24
- # -- aws.region.key = us-east-2
25
- # --
26
- # -- ----------------------------
27
- # -- AWS Credentials | Summary
28
- # -- ----------------------------
29
- # --
30
- # -- [One] plugin running on [a] workstation or inside [a]
31
- # -- container can only use [one] set of IAM user credentials
32
- # -- located in [an] accessible folder (on a removable drive).
33
- # --
34
-
35
- [aws]
36
- creds.name = e>> @f[:eco][:id] + Do.t + "aws.credentials.ini"
37
- dir.exists? = e>> !@f[@i[:workstation]][:aws_creds_dir].nil?
38
- creds.path(if) = { @s[:dir_exists?] } e>> File.join @f[@i[:workstation]][:aws_creds_dir], @s[:creds_name]
39
- creds.exist? = e>> exists?(@s[:creds_path]) && File.exists?(@s[:creds_path])
40
-
@@ -1,63 +0,0 @@
1
-
2
- [terraform]
3
- aws.instance.id = e>> "ec2-" + @f[:stamp][:mini_2]
4
- aws.sgroup.id = e>> "acl-" + @f[:stamp][:mini_2]
5
- route53.dns.id = e>> "dns-" + @f[:stamp][:mini_2]
6
- route53.www.id = e>> "www-" + @f[:stamp][:mini_2]
7
- route53.ssl.id = e>> "ssl-" + @f[:stamp][:mini_2]
8
- resource.group = e>> @f[:stamp][:midi]
9
-
10
- ec2.acl.name = e>> "acl." + @f[:stamp][:midi]
11
- ec2.tag.name = e>> "ec2." + @f[:stamp][:midi]
12
- ebs.tag.name = e>> "ebs." + @f[:stamp][:midi]
13
- ec2.acl.desc = e>> "Rules (acl) for the " + @f[:stamp][:midi] + " tcp/ip traffic."
14
-
15
- username = ubuntu
16
- keypair.name = e>> "keypair." + @f[:stamp][:midi]
17
- private.key = e>> File.join @f[:runtime][:dir], ( @s[:keypair_name] + ".pem" )
18
-
19
- aws.exe.win = windows_amd64/terraform-provider-aws_v1.7.0_x4.exe
20
- aws.exe.linux = linux_amd64/terraform-provider-aws_v1.6.0_x4
21
-
22
- exe.filename(if)= { Gem.win_platform? } e>> @s[:aws_exe_win]
23
- exe.filename(un)= { Gem.win_platform? } e>> @s[:aws_exe_linux]
24
- exe.source.dir = e>> File.join @f[:runtime][:dir], ".terraform/plugins"
25
-
26
- ec2.host = e>> LinuxHost.ec2_using_terraform( \
27
- @s[:username], \
28
- @s[:private_key], \
29
- @f[:runtime][:dir], \
30
- @s[:keypair_name], \
31
- @s[:exe_filename], \
32
- @s[:exe_source_dir], \
33
- @f[:runtime][:archive] \
34
- )
35
-
36
-
37
- [domain]
38
- has.domain? = e>> exists?( @p[:root_domain] )
39
- root(if) = { @s[:has_domain?] } e>> @p[:root_domain]
40
- www(if) = { @s[:has_domain?] } e>> "www" + dot + @s[:root]
41
- https(if) = { @s[:has_domain?] } e>> "https://" + @s[:www]
42
- http(if) = { @s[:has_domain?] } e>> "http://" + @s[:www]
43
-
44
-
45
- [machine]
46
- suffix = host
47
- name.known? = e>> exists?(@f[@i[:workstation]][plugin_symbol(@s[:suffix])])
48
-
49
- fact.name(if) = { @s[:name_known?] } e>> @f[@i[:workstation]][plugin_symbol(@s[:suffix])]
50
- key.name(if) = { @s[:name_known?] } e>> @s[:fact_name] + ".ssh.key.pem"
51
- ssh.key(if) = { @s[:name_known?] } e>> File.join @f[@i[:workstation]][:ssh_keydir], @s[:key_name]
52
- user.name(if) = { @s[:name_known?] } e>> @f[symbol(@s[:fact_name])][:username]
53
- host.name(if) = { @s[:name_known?] } e>> @f[symbol(@s[:fact_name])][:hostnames].first
54
- net.bridge(if) = { @s[:name_known?] } e>> @f[symbol(@s[:fact_name])][:net_bridge]
55
-
56
- host.class(if) = { @s[:name_known?] } e>> LinuxHost.existing_host( \
57
- @s[:host_name], \
58
- @s[:user_name], \
59
- @s[:ssh_key], \
60
- @s[:net_bridge] \
61
- )
62
-
63
- host.class(un) = { @s[:name_known?] } e>> @f[:terraform][:ec2_host]
@@ -1,24 +0,0 @@
1
-
2
- # Fact File Read Order
3
-
4
- Fact files are assimilated in a simple yet powerful manner.
5
-
6
- ## Assimilation Order
7
-
8
- The fact files are assimilated in this order
9
-
10
- - first the **core fact files** in the **reusable.facts** folder
11
- - next is the **plugin's fact file** eg ***gitlab.dev.ini*** in reusable.plugins/gitlab.dev
12
- - then every fact file in the **reusable.facts/general** directory and subdirectories
13
- - then it is all the fact files declared for **import via HTTP, SCP, SFTP and Rest APIs**
14
- - then from **git repositories, key-value stores** (etcd, redis) and **(nosql) databases**
15
- - finally, **credential fact files on removable media**, in vaults and key pass structures
16
-
17
- ## Indexing for Large Fact Repositories
18
-
19
- Once the third reusable.facts/general store becomes **large** we could devise methods of
20
-
21
- - **indexing** all the **long-lived facts** as and when they are produced
22
- - **mining fact dependencies** from plugin software, fact files and templates
23
- - then **importing** just the needful set of facts (and their dependencies)
24
-
@@ -1,127 +0,0 @@
1
- [database]
2
- name = app_datastore
3
- admin.username = e>> "admin.usr." + Stamp.yyjjj_hhmm_sst
4
- admin.password = e>> Secrets.derive_alphanum
5
- app.username = e>> "app.usr." + Stamp.yyjjj_hhmm_sst
6
- app.password = e>> Secrets.derive_alphanum
7
-
8
- content.url = https://www.eco-platform.co.uk/content/database.git/
9
- content.name = e>> "db.content." + Stamp.yyjjj_hhmm_sst
10
- content.zip = e>> @f[:database][:content_name] + ".zip"
11
- content.files = e>> GitFlow.file_names @f[:database][:content_url]
12
- content.path = /var/lib/mongodb/content.library
13
-
14
- import.stmts = e>> MongoFlow.to_inport_stmts( \
15
- @f[:database][:content_files], \
16
- @f[:database][:content_path], \
17
- @f[:database][:database_name], \
18
- @f[:database][:app_username], \
19
- @f[:database][:app_password] \
20
- )
21
-
22
-
23
- [rest]
24
- docs.url = https://www.eco-platform.co.uk/content/rest.documents.git/
25
- docs.offset = application.objects/
26
- zip.basename = e>> @f[:s3][:upload_prefix] + "application.objects"
27
- zip.filename = e>> @f[:rest][:zip_basename] + ".zip"
28
-
29
-
30
- [ide]
31
- idea.iml.dir = e>> File.join @f[:runtime][:dir], "idea_modules"
32
- conf.repo.url = https://www.eco-platform.co.uk/content/intellij.conf.git/
33
- conf.base.dir = e>> File.join @f[:runtime][:dir], "ide_config"
34
- conf.dir.name = apollo.laundry4j
35
- conf.home.dir = e>> File.join @f[:ide][:conf_base_dir], @f[:ide][:conf_dir_name]
36
-
37
- idea.prop.src = e>> File.join @f[:runtime][:dir], "asset.idea.properties"
38
- idea.prop.dir = C:/Program Files (x86)/JetBrains/IntelliJ IDEA Community Edition 14.0.2/bin
39
- idea.prop.dst = e>> File.join @f[:ide][:idea_prop_dir], "idea.properties"
40
-
41
- options.base.dir = e>> File.join @f[:runtime][:dir], "ide_config/apollo.laundry4j/options"
42
- default.xml.file = project.default.xml
43
- default.xml.path = e>> File.join @f[:ide][:options_base_dir], @f[:ide][:default_xml_file]
44
-
45
-
46
- [maven]
47
- jar.projects = e>> %w[ \
48
- https://www.eco-platform.co.uk/commons/laundry4j.facility \
49
- https://www.eco-platform.co.uk/commons/laundry4j.mappable \
50
- https://www.eco-platform.co.uk/commons/laundry4j.mappers \
51
- https://www.eco-platform.co.uk/commons/laundry4j.clusters \
52
- https://www.eco-platform.co.uk/football/footb4ll.net \
53
- https://www.eco-platform.co.uk/commons/laundry4j.explorer \
54
- ]
55
-
56
-
57
- install.dir = C:/Program Files/apache-maven-3.3.9
58
- settings.xml = C:/Program Files/apache-maven-3.3.9/conf/settings.xml
59
- version.dev = e>> Stamp.yyjjj_hhmm_sst + "-SNAPSHOT"
60
- version.push = e>> Stamp.yyjjj_hhmm_sst
61
-
62
- builds.dir = e>> File.join @f[:runtime][:dir], "maven_builds"
63
- amalgam.dir = e>> File.join @f[:runtime][:dir], "maven_projects"
64
- javadocs.dir = e>> File.join @f[:runtime][:dir], "maven_javadocs"
65
- apidocs.dir = e>> File.join @f[:maven][:javadocs_dir], "site/apidocs"
66
- pom.xml.src = e>> File.join @f[:runtime][:dir], "asset.amalgam.pom.xml"
67
- pom.xml.dst = e>> File.join @f[:maven][:amalgam_dir], "pom.xml"
68
-
69
- css.file.src = e>> File.join @f[:runtime][:dir], "asset.stylesheet.css"
70
- css.file.dst = e>> File.join @f[:maven][:apidocs_dir], "stylesheet.css"
71
-
72
- index.html.src = e>> File.join @f[:maven][:apidocs_dir], "overview-summary.html"
73
- index.html.dst = e>> File.join @f[:maven][:apidocs_dir], "index.html"
74
-
75
- cmd.prefix = e>> "mvn -f " + @f[:maven][:pom_xml_dst] + " clean "
76
- cmd.javadocs = e>> @f[:maven][:cmd_prefix] + "javadoc:aggregate source:aggregate"
77
-
78
- war.project = https://www.eco-platform.co.uk/commons/laundry4j.web
79
- war.module = e>> Pom.get_module_name @f[:maven][:war_project]
80
- projects = e>> @f[:maven][:jar_projects].push @f[:maven][:war_project]
81
- war.prj.dir = e>> File.join @f[:maven][:amalgam_dir], @f[:maven][:war_module]
82
- war.pom.xml = e>> File.join @f[:maven][:war_prj_dir], "pom.xml"
83
- war.run.cmd = e>> "mvn -f " + @f[:maven][:war_pom_xml] + " cargo:run -P tomcat8x"
84
-
85
- module.names = e>> Pom.get_module_names @f[:maven][:projects]
86
- module.lines = e>> Refactor.sandwich_lines @f[:maven][:module_names], "<module>", "</module>", 16
87
-
88
- no1.prj.dir = e>> File.join @f[:maven][:amalgam_dir], @f[:maven][:module_names].first
89
- no1.pom.xml = e>> File.join @f[:maven][:no1_prj_dir], "pom.xml"
90
- version.cmd = e>> "mvn -f " + @f[:maven][:no1_pom_xml] + " versions:set -DgroupId=com.* -DartifactId=* -DnewVersion=" + @f[:maven][:version_dev] + " -DgenerateBackupPoms=false"
91
-
92
-
93
- ############ mvn -f facility/pom.xml versions:set -DgroupId=com.* -DartifactId=* -DnewVersion=77.77.00-SNAPSHOT
94
- ###### http://localhost:8899/explorer-17.263.1858-SNAPSHOT
95
-
96
- # --
97
- # -- Create ruby/maven commands that act on one or more projects
98
- # -- Command examples
99
- # -- - system "mvn -f maven_projects/customer/pom.xml clean install"
100
- # --
101
- install.opts = clean install
102
- install.cmd = e>> "mvn -f " + @f[:maven][:pom_xml_dst] + " clean install"
103
-
104
- ## cmd.prefix = e>> "system " + Ch.dq + "mvn -f maven_projects/"
105
- ## cmd.postfix = /pom.xml
106
-
107
-
108
- [git]
109
- base.url = https://www.eco-platform.co.uk
110
- local.dir = e>> File.dirname Dir.pwd
111
- dot.git.dir = e>> File.join @f[:git][:local_dir], ".git"
112
- revision = e>> GitFlow.wc_revision @f[:git][:dot_git_dir]
113
- ssh.keyfile = e>> File.join Home.dir, "com.laundry4j.drive/library.ssh.access.keys/gitlab.laundry4j.private.key.pem"
114
-
115
- [git.content]
116
- url = e>> File.join @f[:git][:base_url], "content"
117
-
118
-
119
- [git.commons]
120
- url = e>> File.join @f[:git][:base_url], "commons"
121
- eco.proj.name = eco-platform
122
- eco.repo.name = e>> @s[:eco_proj_name] + ".git"
123
- eco.repo.url = e>> File.join @s[:url], @s[:eco_repo_name]
124
-
125
-
126
-
127
-
@@ -1,17 +0,0 @@
1
- # --- --------------------------------------------------------------------- --- #
2
- # --- This fact (INI) file database carries facts for the s3 upload block. --- #
3
- # --- --------------------------------------------------------------------- --- #
4
- # --- [DEPENDENCY] => @f[:upload][:src_file_name] --- #
5
- # --- => This dependency must be set before fact evaluation. --- #
6
- # --- --------------------------------------------------------------------- --- #
7
-
8
- [upload]
9
- src.file.path = e>> File.join @f[:runtime][:dir], @f[:upload][:src_file_name]
10
- dot.full.extn = e>> File.extname @f[:upload][:src_file_path]
11
- src.base.name = e>> File.basename @f[:upload][:src_file_name], @f[:upload][:dot_full_extn]
12
- prefix.less.name = e>> @f[:upload][:src_base_name][ @f[:s3][:upload_prefix].length .. -1 ]
13
- app.props.base = e>> @f[:upload][:prefix_less_name] + @f[:upload][:dot_full_extn]
14
- app.props.key = e>> (@f[:upload][:app_props_base] + Ch.p + "url").gsub(".","_").to_sym
15
- dst.file.base = e>> @f[:upload][:prefix_less_name] + Ch.p + @f[:stamp][:midi]
16
- dst.file.name = e>> @f[:upload][:dst_file_base] + @f[:upload][:dot_full_extn]
17
- dst.file.path = e>> File.join @f[:s3][:uploads_dir], @f[:upload][:dst_file_name]
@@ -1,483 +0,0 @@
1
- #!/usr/bin/ruby
2
-
3
- # -- --------------------------------------------------------------- -- #
4
- # -- File facts are placeholders (keys in effect) just begging to be -- #
5
- # -- replaced by values sourced from some sort of map. This software -- #
6
- # -- centres around the DevOps fact replacement placeholder pattern. -- #
7
- # -- --------------------------------------------------------------- -- #
8
- class Files
9
- #### ==> ===================================================================================
10
- #### ==> ===================================================================================
11
- #### ==> Name this class FindReplace or simply just Replace => Replace.contains? Replace.do
12
- #### ==> Name this class FindReplace or simply just Replace => Replace.contains? Replace.do
13
- #### ==> Name this class FindReplace or simply just Replace => Replace.contains? Replace.do
14
- #### ==> Name this class FindReplace or simply just Replace => Replace.contains? Replace.do
15
- #### ==> Name this class FindReplace or simply just Replace => Replace.contains? Replace.do
16
- #### ==> Name this class FindReplace or simply just Replace => Replace.contains? Replace.do
17
- #### ==> Name this class FindReplace or simply just Replace => Replace.contains? Replace.do
18
- #### ==> ===================================================================================
19
- #### ==> ===================================================================================
20
-
21
-
22
-
23
- ## --- ---------------------------------------------------------------------- --- #
24
- ## --- How to Use File to extract different parts of an [absolute] file path. --- #
25
- ## --- ---------------------------------------------------------------------- --- #
26
- ### File Names ====] file = "/path/to/xyz.mp4"
27
- ### File Names ====] comp = File.basename file # => "xyz.mp4"
28
- ### File Names ====] extn = File.extname file # => ".mp4"
29
- ### File Names ====] name = File.basename file, extn # => "xyz"
30
- ### File Names ====] path = File.dirname file # => "/path/to"
31
- ## --- ---------------------------------------------------------------------- --- #
32
-
33
-
34
- # --
35
- # -- Scan every file in directory for placeholder keys within the
36
- # -- parameter map and replace them with the corresponding value.
37
- # -- Not Recursive => the scan does [not] recurse into folders
38
- # --
39
- # -- ----------------------------------
40
- # -- Replacing Betty (Nested Replace)
41
- # -- ----------------------------------
42
- # --
43
- # -- "GoBBetBettytyettyne" => "Gone"
44
- # --
45
- # -- A nested replace is done up to approximately 5 levels deep.
46
- # -- So replacing "Betty" from the above string does [NOT] produce
47
- # -- "GoBBettyettyne". The string becomes "Gone".
48
- # --
49
- # -- -----------
50
- # -- Parameters
51
- # -- -----------
52
- # -- fact_map : driving [placeholder] => [value] map
53
- # -- replace_dir : dir subject of the scan and replace
54
- # --
55
- def self.find_replace fact_map, replace_folder
56
-
57
- log.info(ere){ "#-- --------------------------------------------------------- #" }
58
- log.info(ere){ "#-- File changing in #{File.basename replace_folder}" }
59
- log.info(ere){ "#-- #{replace_folder}" }
60
- log.info(ere){ "#-- --------------------------------------------------------- #" }
61
-
62
- # -- ------------------------------------------------------------------------------ -- #
63
- # -- Iterate to substitute matched strings in file with their corresponding values. -- #
64
- # -- ------------------------------------------------------------------------------ -- #
65
- Dir.foreach( replace_folder ) do | file_name |
66
-
67
- file_path = File.join replace_folder, file_name
68
- next if File.directory? file_path
69
- next if File.extname(file_path).eql? ".log"
70
-
71
- nested_key_replace fact_map, file_path
72
-
73
- end
74
-
75
- log.info(ere){ "#-- --------------------------------------------------------- #" }
76
- log.info(ere){ "#-- Done changing files in #{File.basename replace_folder}" }
77
- log.info(ere){ "#-- --------------------------------------------------------- #" }
78
-
79
- end
80
-
81
- #--
82
- #-- Using the parameter [file] [replace] any occurence
83
- #-- of any [key] found in the parameter [map] with the
84
- #-- corresponding mapped key [value].
85
- #--
86
- #-- -----------------------------------
87
- #-- [Nested Replace]
88
- # -- Replacing Betty (Nested Replace)
89
- # -- ----------------------------------
90
- # --
91
- # -- "GoBBetBettytyettyne" => "Gone"
92
- # --
93
- # -- A nested replace is done up to approximately 5 levels deep.
94
- # -- So replacing "Betty" from the above string does [NOT] produce
95
- # -- "GoBBettyettyne". The string becomes "Gone".
96
- # --
97
- def self.nested_key_replace source_map, filepath
98
-
99
- for i in 1..5
100
- file_changed = key_replace source_map, filepath
101
- return unless file_changed
102
- log.info(ere){ "#-- --------------------------------------------------------- #" }
103
- log.info(ere){ "#-- File #{File.basename filepath} has been changed" }
104
- log.info(ere){ "#-- --------------------------------------------------------- #" }
105
- end
106
-
107
- end
108
-
109
- #--
110
- #-- Using the parameter [file] [replace] any occurence
111
- #-- of any [key] found in the parameter [map] with the
112
- #-- corresponding mapped key [value].
113
- #--
114
- #-- Side Effect => If at least one replacement occured this
115
- #-- method returns true, else false.
116
- #--
117
- def self.key_replace source_map, filepath
118
-
119
- replace_happened = false
120
-
121
- source_map.each do |theKey, theValue|
122
-
123
- next unless has_string? filepath, theKey
124
- factorize theKey, theValue, filepath
125
- replace_happened = true
126
-
127
- end
128
-
129
- return replace_happened
130
-
131
- end
132
-
133
-
134
-
135
- #--
136
- #-- Produce a map of every "recursive" file sitting under
137
- #-- any and all of the parameter directories array.
138
- #--
139
- #-- Harboured folders are traversed but folder names are
140
- #-- excluded from the resulting map.
141
- #--
142
- #-- The key/value structure of the map is
143
- #--
144
- #-- key => simple filename
145
- #-- value => (abs) folder path
146
- #--
147
- #-- --------------------------------
148
- #-- Filename NOT UNIQUE Exception
149
- #-- --------------------------------
150
- #--
151
- #-- Simple filename UNIQUENESS must prevail.
152
- #--
153
- #-- If the same base filename is found in any part or level
154
- #-- of the directory trees under the every parameter parent
155
- #-- folder - an exception will be thrown.
156
- #--
157
- def self.in_folders parent_folders
158
-
159
- files_map = {}
160
-
161
- parent_folders.each do |parent_folder|
162
-
163
- log.info(ere) { "Create map of files under #{nickname parent_folder}" }
164
- Throw.if_not_exists parent_folder
165
-
166
- Dir["#{parent_folder}/**/*"].each do |child_file|
167
-
168
- next if File.directory? child_file
169
- filename = File.basename child_file
170
- foldername = File.dirname child_file
171
- log.info(ere) { " #{filename} => #{nickname foldername}" }
172
- error_str = "Name NOT UNIQUE Error => [#{filename}].\n\n#{files_map.inspect}"
173
- raise RuntimeError.new error_str if files_map.has_key? filename
174
- files_map.store filename, foldername
175
-
176
- end
177
-
178
- end
179
-
180
- return files_map
181
-
182
- end
183
-
184
-
185
- #--
186
- #-- Find files of a given type (extension) that exist
187
- #-- recursively under a folder.
188
- #--
189
- #-- --------------------------------
190
- #-- Filename NOT UNIQUE Exception
191
- #-- --------------------------------
192
- #--
193
- #-- Simple filename UNIQUENESS must prevail.
194
- #--
195
- #-- If the same base filename is found in any part or level
196
- #-- of the directory trees under the every parameter parent
197
- #-- folder - an exception will be thrown.
198
- #--
199
- #-- ------------
200
- #-- Parameters
201
- #-- ------------
202
- #--
203
- #-- base_folder => the top-level folder to search
204
- #-- dir at (abs) path must exist
205
- #--
206
- #-- file_extn => extension of interesting files
207
- #-- including the leading [period].
208
- #-- (send ".exe" for exe files)
209
- #-- (send ".md" for markdown files)
210
- #--
211
- #-- ------------
212
- #-- Map Returned
213
- #-- ------------
214
- #--
215
- #-- Returns a { filename => rel_path } map of the matching
216
- #-- files in the folder. The map
217
- #--
218
- #-- keys => simple filename without extension
219
- #-- values => relative path from the base folder
220
- #--
221
- def self.to_name_path_map base_folder, file_extn
222
-
223
- files_map = {}
224
-
225
- Dir["#{base_folder}/**/*#{file_extn}"].each do |filepath|
226
-
227
- next if File.directory? filepath
228
- ext_error = "File extension not [#{file_extn}] in => #{nickname filepath}"
229
- raise RuntimeError.new(ext_error) unless File.extname(filepath).eql? "#{file_extn}"
230
-
231
- filename = File.basename filepath
232
-
233
- error_str = "Name NOT UNIQUE Error => [#{filename}].\n\n#{files_map.inspect}"
234
- raise RuntimeError.new error_str if files_map.has_key? filename
235
- files_map.store filename, filepath
236
-
237
- end
238
-
239
- return files_map
240
-
241
- end
242
-
243
-
244
- #--
245
- #-- Return a new map with the values folder path
246
- #-- forwarded by one level
247
- #--
248
- #-- If the map contains the below
249
- #--
250
- #-- {
251
- #-- file1 => user/docs/pdfs,
252
- #-- file2 => user/docs/pdfs/good,
253
- #-- file3 => user/docs/pdfs/bad
254
- #-- }
255
- #--
256
- #-- This method will return a map like this
257
- #--
258
- #-- {
259
- #-- file1 => docs/pdfs,
260
- #-- file2 => docs/pdfs/good,
261
- #-- file3 => docs/pdfs/bad
262
- #-- }
263
- #--
264
- #-- The values part has been forwarded by one level.
265
- #--
266
- def self.forwarded_path files_map
267
-
268
- changed_map = {}
269
-
270
- files_map.each do |the_name, the_old_path|
271
- the_new_path = the_old_path.split("/")[1..-1].join("/")
272
- changed_map.store the_name, the_new_path
273
- log.info(ere){ "Forwarded from #{the_old_path} to #{the_new_path}" }
274
- end
275
-
276
- return changed_map
277
-
278
- end
279
-
280
-
281
- #--
282
- #-- Path stripper expects the path in the first parameter
283
- #-- to start with the path in the second parameter.
284
- #--
285
- #-- It then returns the first parameter path with the
286
- #-- leading path stripped out.
287
- #--
288
- #-- ---------------------------
289
- #-- Strip Leading Path Example
290
- #-- ---------------------------
291
- #--
292
- #-- 1st param path = /home/joe/docs/pdfs/websites
293
- #-- 2nd param path = /home/joe/docs
294
- #-- Returned Path = pdfs/websites
295
- #--
296
- #-- ---------------------------------
297
- #-- The Middle Separator is Stripped
298
- #-- ---------------------------------
299
- #--
300
- #-- Note above that the middle separator is stripped
301
- #-- so the returned string has no leading separator.
302
- #--
303
- def self.lead_path_chopped long_path, lead_path
304
-
305
- return long_path.gsub(lead_path, "")[1..-1]
306
-
307
- end
308
-
309
-
310
- # -- ------------------------------------------------------------------- -- #
311
- # -- Returns true when the parameter file contains the parameter string. -- #
312
- # -- As a side effect the lines with at least 1 string match are logged. -- #
313
- # -- ------------------------------------------------------------------- -- #
314
- def self.has_string? the_file, theString
315
-
316
- containsMatch = false;
317
- line_index = 0;
318
-
319
- File.open( the_file, "r") do | file_obj |
320
-
321
- name_abbrv = File.basename the_file
322
- file_obj.each_line do | file_line |
323
-
324
- line_index = line_index + 1
325
- lineCount = sprintf '%03d', line_index
326
- if file_line.include? theString then
327
-
328
- squeezed_line = file_line.chomp.strip.squeeze(" ")
329
- log.info(ere) { "== string [#{theString}] occurs @line #{lineCount} in #{name_abbrv}" }
330
- log.info(ere) { "== |---> #{squeezed_line}" }
331
- containsMatch = true
332
-
333
- end
334
-
335
- end
336
-
337
- end
338
-
339
- return containsMatch;
340
-
341
- end
342
-
343
-
344
- # --
345
- # -- [FIND] lines that include a set of configured strings and
346
- # -- [REPLACE] then with the configured alternative.
347
- # --
348
- # -- -----------
349
- # -- Parameters
350
- # -- -----------
351
- # --
352
- # -- filepath : path to existing "to be changed" file
353
- # -- includes : include string array for line matching
354
- # -- new_line : replace the matched line with this str
355
- # --
356
- # -- --------------------------------
357
- # -- Dependencies and Assumptions
358
- # -- --------------------------------
359
- # --
360
- # -- file exists at filepath
361
- # --
362
- def self.find_replace_lines filepath, includes, new_line
363
-
364
- Throw.if_not_exists filepath
365
- Throw.if_nil includes
366
- Throw.if_nil new_line
367
-
368
- line_matches_count = 0;
369
- new_file_lines_set = ""
370
-
371
- File.open( filepath, "r") do | file_obj |
372
-
373
- file_obj.each_line do | file_line |
374
-
375
- unless (String.includes_all?( file_line, includes ) ) then
376
- new_file_lines_set += file_line
377
- next
378
- end
379
-
380
- ++line_matches_count
381
- new_file_lines_set += new_line
382
-
383
- log.info(ere) { "[replace] - ------------------------------------------------------- ##" }
384
- log.info(ere) { "[replace] - file name => #{File.basename filepath}" }
385
- log.info(ere) { "[replace] - has words => #{pp includes}" }
386
- log.info(ere) { "[replace] - orig line => #{file_line}" }
387
- log.info(ere) { "[replace] - ------- --------- ------- ---------- ------ ##" }
388
- log.info(ere) { "[replace] - outgoing line => #{file_line}" }
389
- log.info(ere) { "[replace] - incoming line => #{new_line}" }
390
- log.info(ere) { "[replace] - ------- --------- ------- ---------- ------ ##" }
391
-
392
- end
393
-
394
- end
395
-
396
- # -- ---------------------------------------------------------- -- #
397
- # -- [(over)write] new set of file lines to the parameter file. -- #
398
- # -- ---------------------------------------------------------- -- #
399
- File.write filepath, new_file_lines_set
400
- LogObject.file filepath, "replace"
401
-
402
- end
403
-
404
-
405
- # --
406
- # -- Write the 1D key/value map into a properties
407
- # -- file at the parameter folder.
408
- # --
409
- # -- Parameters
410
- # -- properties_map : the key/value map to serialize
411
- # -- props_dir_path : folder holding new properties file
412
- # -- props_filename : name of the new properties file
413
- # --
414
- # -- Dependencies and Assumptions
415
- # -- the directory will be created if it does not exist
416
- # -- we assume the properties file DOES NOT EXIST
417
- # -- the map is 1D and is not nil (can be empty)
418
- # -- the directory is writeable by the user
419
- # --
420
- def self.write_properties properties_map, props_dir_path, props_filename
421
-
422
- Dir.mkdir props_dir_path unless File.exists? props_dir_path
423
- prop_filepath = File.join props_dir_path, props_filename
424
- File.write prop_filepath, to_properties_text(properties_map)
425
-
426
- LogObject.file prop_filepath, "write properties"
427
-
428
- end
429
-
430
-
431
- # --
432
- # -- Create txt file in the properties format containing
433
- # -- a dictionary of name/value pairs separated by an
434
- # -- equals sign.
435
- # --
436
- # -- Parameters
437
- # -- properties_map : the key/value map to serialize
438
- # --
439
- # -- Dependencies and Assumptions
440
- # -- the map is 1D and is not nil (can be empty)
441
- # -- map keys are SYMBOLS so underscores are made dots
442
- # --
443
- def self.to_properties_text properties_map
444
-
445
- hdr_1 = "## Properties file with [#{properties_map.length}] key/value pairs.\n"
446
- hdr_u = "## @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ##\n"
447
-
448
- property_text = hdr_u + hdr_1 + hdr_u + "\n"
449
- properties_map.each do |key_symbol,value|
450
- key_string = key_symbol.to_s.gsub("_", ".")
451
- property_text += "#{key_string}=#{value}\n"
452
- end
453
-
454
- property_text += "\n" + hdr_u
455
- return property_text
456
-
457
- end
458
-
459
-
460
- # -- ---------------------------------------------------------- -- #
461
- # -- When the [file fact] replace [behaviour] is called against -- #
462
- # -- a "file" it [replaces all occurrences] (as best it can) of -- #
463
- # -- the given string within the file. Call replace occurrences -- #
464
- # -- against a folder and it replaces in all constituent files. -- #
465
- # -- ---------------------------------------------------------- -- #
466
- # -- Be careful = this implementation is not that clever. So if -- #
467
- # -- we try to replace all "Betty" occurrences - lines that may -- #
468
- # -- contain ["BetBettyty"] will likely end up with a ["Betty"] -- #
469
- # -- ---------------------------------------------------------- -- #
470
- def self.factorize from_string, to_string, in_file
471
-
472
- the_filename = File.basename in_file
473
-
474
- log.info(ere) { "From String => #{from_string}" }
475
- log.info(ere) { "[To] String => #{to_string}" }
476
- log.info(ere) { "File [Name] => #{the_filename}" }
477
-
478
- File.write( in_file, File.open( in_file, &:read ).gsub( from_string, to_string ) );
479
-
480
- end
481
-
482
-
483
- end