beta_tools 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +11 -0
- data/Capfile +2 -0
- data/Gemfile +4 -0
- data/README +7 -0
- data/Rakefile +7 -0
- data/beta_tools.gemspec +31 -0
- data/bin/deploy +9 -0
- data/bin/documentation_daemon +12 -0
- data/bin/loginToUrl +6 -0
- data/bin/onVpn +6 -0
- data/bin/pub +91 -0
- data/bin/query_solr +11 -0
- data/bin/solrWebApp +6 -0
- data/conf/schema.xml +220 -0
- data/config/deploy.rb +44 -0
- data/config.ru +2 -0
- data/docs/#next_design.mmd# +0 -0
- data/features/deploy.feature +9 -0
- data/features/git.feature +15 -0
- data/features/maven.feature +21 -0
- data/features/step_definitions/deploy_steps.rb +11 -0
- data/features/step_definitions/git_steps.rb +20 -0
- data/features/step_definitions/maven_steps.rb +35 -0
- data/features/support/env.rb +8 -0
- data/int/it_tools/suite_it_tools.rb +2 -0
- data/int/it_tools/test_integ1.rb +17 -0
- data/lib/beta_tools/version.rb +3 -0
- data/lib/beta_tools.rb +5 -0
- data/lib/it_tools/calc.rb +14 -0
- data/lib/it_tools/config.ru +2 -0
- data/lib/it_tools/environment.rb +86 -0
- data/lib/it_tools/git.rb +3 -0
- data/lib/it_tools/html_publish.rb +287 -0
- data/lib/it_tools/mail.rb +21 -0
- data/lib/it_tools/maven.rb +101 -0
- data/lib/it_tools/md5.rb +4 -0
- data/lib/it_tools/multipart.rb +78 -0
- data/lib/it_tools/network_tools.rb +72 -0
- data/lib/it_tools/options.rb +46 -0
- data/lib/it_tools/pub_driver.rb +188 -0
- data/lib/it_tools/publisher2.rb +232 -0
- data/lib/it_tools/sample.rb +9 -0
- data/lib/it_tools/shared.rb +22 -0
- data/lib/it_tools/solr.rb +157 -0
- data/lib/it_tools/views/search_results.erb +16 -0
- data/log/thin.log +177 -0
- data/spec/it_tools/deploy_spec.rb +20 -0
- data/spec/spec_helper.rb +0 -0
- data/test/bin/test_pub.rb +30 -0
- data/test/cleaner.rb +6 -0
- data/test/driver.rb +24 -0
- data/test/integration/tp.rb +107 -0
- data/test/it_tools/for_debugger.rb +4 -0
- data/test/it_tools/publisher2.rb +13 -0
- data/test/it_tools/suite_it_tools.rb +6 -0
- data/test/it_tools/suite_it_tools.rb~ +13 -0
- data/test/it_tools/test_deploy.rb +83 -0
- data/test/it_tools/test_html_publish.rb +54 -0
- data/test/it_tools/test_maven.rb +31 -0
- data/test/it_tools/test_multipart.rb +31 -0
- data/test/it_tools/test_network_tools.rb +14 -0
- data/test/it_tools/test_publisher2.rb +42 -0
- data/test/it_tools/test_publisher2_support.rb +16 -0
- data/test/it_tools/test_sample.rb +10 -0
- data/test/it_tools/test_shared.rb +36 -0
- data/test/it_tools/test_solr.rb +22 -0
- data/test/it_tools/test_solr2.rb +22 -0
- data/testdata/assembly_pom.xml +157 -0
- data/testdata/desir.txt +39 -0
- data/testdata/ear_pom.xml +82 -0
- data/testdata/java.html +524 -0
- data/testdata/pom.xml +174 -0
- data/testdata/publish/src_dir/#arch.mmd# +0 -0
- data/testdata/publish/src_dir/ajax-loader.gif +0 -0
- data/testdata/publish/src_dir/file1.mmd +5 -0
- data/testdata/publish/src_dir/file2.mmd +3 -0
- data/testdata/publish/src_dir/file3.abc +0 -0
- data/testdata/publish/src_dir/file4.mmd +3 -0
- data/testdata/publish/src_dir/help.png +0 -0
- data/testdata/publish/src_dir/images/linux.jpeg +0 -0
- data/testdata/publish/src_dir/inputStyles.css +0 -0
- data/testdata/publish/src_dir/search.html +0 -0
- data/testdata/publish/src_dir/search.js +0 -0
- data/testdata/publish/target_dir/.@arch.html +0 -0
- data/testdata/publish/target_dir/file1.html +7 -0
- data/testdata/publish/target_dir/file2.html +5 -0
- data/testdata/publish/target_dir/search.js +0 -0
- data/testdata/publish/target_dir_static/.@arch.html +1 -0
- data/testdata/publish/target_dir_static/file1.html +7 -0
- data/testdata/publish/target_dir_static/index.html +1 -0
- data/testdata/publish/target_dir_static/style.css +61 -0
- data/testdata/ruby.mmd +404 -0
- data/testdata/simple.mmd +3 -0
- data/testdata/simple.xml +3 -0
- data/testdata/small.txt +2 -0
- data/testdata/utf8_chars.txt +1 -0
- metadata +338 -0
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
require 'optparse'
|
|
2
|
+
require 'logger'
|
|
3
|
+
|
|
4
|
+
class Options
|
|
5
|
+
attr_accessor :options, :log
|
|
6
|
+
def initialize(ops = {})
|
|
7
|
+
@options = {}
|
|
8
|
+
options.merge! ops
|
|
9
|
+
@log = Logger.new('log.txt')
|
|
10
|
+
if level = @options[:debug_level]
|
|
11
|
+
@log.level = level
|
|
12
|
+
else
|
|
13
|
+
@log.level = Logger::DEBUG
|
|
14
|
+
end
|
|
15
|
+
optparse = OptionParser.new do |opts|
|
|
16
|
+
opts.banner = "Usage: pub -e <environment_to_publish_to>"
|
|
17
|
+
@options[:debug] = false
|
|
18
|
+
opts.on( '-d', '--debug', 'Turn on program debugging' ) do |level|
|
|
19
|
+
options[:debug] = true
|
|
20
|
+
options[:debug_level] = level
|
|
21
|
+
end
|
|
22
|
+
@options[:verbose] = false
|
|
23
|
+
opts.on( '-v', '--verbose', 'Output more information' ) do
|
|
24
|
+
@options[:verbose] = true
|
|
25
|
+
end
|
|
26
|
+
@options[:environment] = 'loc'
|
|
27
|
+
opts.on( '-e', '--environment ENV', 'Publish to environment ENV. REQUIRED.' ) do |env|
|
|
28
|
+
options[:environment] = env
|
|
29
|
+
end
|
|
30
|
+
@options[:indexer_url] = nil
|
|
31
|
+
opts.on( '-i', '--indexer_ulr URL', 'Submit docs to indexer with url: URL' ) do |solr_base_url|
|
|
32
|
+
options[:indexer_url] = solr_base_url
|
|
33
|
+
end
|
|
34
|
+
@options[:source_folder] = '.'
|
|
35
|
+
opts.on( '-s', '--source_folder FOLDER', 'Use FOLDER as source of artifact to deploy.') do |source_folder|
|
|
36
|
+
options[:source_folder] = source_folder
|
|
37
|
+
end
|
|
38
|
+
opts.on( '-h', '--help', 'Display this screen' ) do
|
|
39
|
+
puts opts
|
|
40
|
+
exit
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
optparse.parse!
|
|
44
|
+
@log.debug "Deploying to #{options[:environment]} environment." if @options[:debug]
|
|
45
|
+
end
|
|
46
|
+
end
|
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
#!/usr/bin/env ruby
|
|
2
|
+
|
|
3
|
+
require_relative 'html_publish'
|
|
4
|
+
require_relative 'network_tools'
|
|
5
|
+
require_relative 'publisher2'
|
|
6
|
+
require 'logger'
|
|
7
|
+
require 'optparse'
|
|
8
|
+
|
|
9
|
+
module PubDriver
|
|
10
|
+
class Pub
|
|
11
|
+
attr_accessor :ops
|
|
12
|
+
def initialize( ops = {} )
|
|
13
|
+
@ops = {}
|
|
14
|
+
@ops.merge! ops
|
|
15
|
+
@log = Logger.new('log.txt')
|
|
16
|
+
if level = @ops[:debug_level]
|
|
17
|
+
@log.level = level
|
|
18
|
+
@log.debug "Set logger to debug."
|
|
19
|
+
else
|
|
20
|
+
@log.level = Logger::INFO
|
|
21
|
+
@log.info "Set logger to info."
|
|
22
|
+
end
|
|
23
|
+
end
|
|
24
|
+
def validate( options, required, values )
|
|
25
|
+
missing_msg = "Missing required parameter: '"
|
|
26
|
+
required.each do |entry|
|
|
27
|
+
value = values[entry]
|
|
28
|
+
raise missing_msg + value[:short] + "', or '" + value[:long] + "', which specifies the " + value[:mesg] unless options[entry]
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
def publish_locally2
|
|
32
|
+
options = get_local_pub_options
|
|
33
|
+
solr_search_files = [ "search.html",
|
|
34
|
+
"search.js",
|
|
35
|
+
"ajax-loader.gif",
|
|
36
|
+
"help.png" ]
|
|
37
|
+
|
|
38
|
+
parameters = {
|
|
39
|
+
:solr_base_url => "http://searcher:8983/solr/",
|
|
40
|
+
:style_sheet => "inputStyles.css",
|
|
41
|
+
:solr_search_files => solr_search_files,
|
|
42
|
+
:src_dir => options[:src_dir],
|
|
43
|
+
:target_dir => options[:solr_dir],
|
|
44
|
+
:static_dir => options[:static_dir],
|
|
45
|
+
:debug_level => options[:debug_level],
|
|
46
|
+
:solr_search_files_dir => options[:solr_search_files_dir],
|
|
47
|
+
:search_category => options[:search_category] }
|
|
48
|
+
|
|
49
|
+
@log.debug "Params: " + parameters.to_s
|
|
50
|
+
publisher = Publisher::Markdown.new parameters
|
|
51
|
+
publisher.process_files
|
|
52
|
+
end
|
|
53
|
+
def publish_locally
|
|
54
|
+
solr_indexer = "http://127.0.0.1:8983/solr/"
|
|
55
|
+
converter = Website::MarkdownConverter.new :indexer_url => solr_indexer
|
|
56
|
+
converter.go
|
|
57
|
+
end
|
|
58
|
+
def validate_local_publish options
|
|
59
|
+
dir_counts, ext_counts = count_files_in_dir options, [:src_dir, :solr_dir, :static_dir], ['.mmd']
|
|
60
|
+
dir_count_printer = lambda { |key,count,hash| p hash[key] + ": [#{count}]" }
|
|
61
|
+
print_counts dir_counts, dir_count_printer
|
|
62
|
+
print_counts ext_counts, options
|
|
63
|
+
end
|
|
64
|
+
def print_counts hash, printer
|
|
65
|
+
hash.each do |key,value|
|
|
66
|
+
printer.call key, value, hash
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
def count_files_in_dir options, dirs, extension = []
|
|
70
|
+
file_count = {}
|
|
71
|
+
ext_count = {}
|
|
72
|
+
dirs.each do |dir|
|
|
73
|
+
file_count[dir] = Dir.entries(options[dir]).size - 2
|
|
74
|
+
next unless extension.size > 0
|
|
75
|
+
files_with_extension_count = {}
|
|
76
|
+
extension.each do |curr_ext|
|
|
77
|
+
ext_count[curr_ext] = 0
|
|
78
|
+
end
|
|
79
|
+
Dir.entries(options[dir]).each do |file|
|
|
80
|
+
full_path = File.join options[dir], file
|
|
81
|
+
next unless File.exists? full_path
|
|
82
|
+
next if File.directory? full_path
|
|
83
|
+
extension.each do |curr_ext|
|
|
84
|
+
if (File.extname file) == curr_ext
|
|
85
|
+
ext_count[curr_ext] += 1
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
return [file_count,ext_count]
|
|
90
|
+
end
|
|
91
|
+
end
|
|
92
|
+
def rsync_it ops
|
|
93
|
+
command = "rsync -avP --stats #{ops[:from_dir]} "
|
|
94
|
+
command += "#{ops[:as_user]}@" if ops[:as_user]
|
|
95
|
+
command += "#{ops[:to_host]}:#{ops[:to_dir]} "
|
|
96
|
+
p "[command]: " + command
|
|
97
|
+
system command
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
def publish_remote params
|
|
101
|
+
vpnTools = NetworkTools::VpnTools.new
|
|
102
|
+
rsync_ops = {}
|
|
103
|
+
rsync_ops[:from_dir] = params[:doc]
|
|
104
|
+
rsync_ops[:as_user] = params[:as_user]
|
|
105
|
+
if vpnTools.on_vpn
|
|
106
|
+
@log.debug "On VPN, so sync'ing regular docs AND work docs"
|
|
107
|
+
rsync_ops[:to_host] = params[:int_host]
|
|
108
|
+
|
|
109
|
+
rsync_ops[:to_dir] = params[:int_docs]
|
|
110
|
+
rsync_it rsync_ops
|
|
111
|
+
|
|
112
|
+
rsync_ops[:from_dir] = params[:wdoc]
|
|
113
|
+
rsync_ops[:to_dir] = params[:int_wdocs]
|
|
114
|
+
rsync_it rsync_ops
|
|
115
|
+
else
|
|
116
|
+
@log.debug "On VPN, so only sync'ing regular docs."
|
|
117
|
+
rsync_ops[:to_host] = params[:ext_host]
|
|
118
|
+
rsync_ops[:to_dir] = params[:ext_docs]
|
|
119
|
+
rsync_it rsync_ops
|
|
120
|
+
end
|
|
121
|
+
end
|
|
122
|
+
def get_local_pub_options
|
|
123
|
+
options = {}
|
|
124
|
+
src = { :short => '-s', :long => '--src_dir DIR', :mesg => 'Directory of source markdown files.'}
|
|
125
|
+
solr = { :short => '-w', :long => '--solr_write_dir DIR', :mesg => 'Directory to write solr files.'}
|
|
126
|
+
stat = { :short => '-t', :long => '--static_html_dir DIR', :mesg => 'Directory to write static html files.'}
|
|
127
|
+
solr_search = { :short => '-l', :long => '--solr_search_files_dir DIR', :mesg => 'Directory to read solr search files from.'}
|
|
128
|
+
|
|
129
|
+
values = { :src_dir => src, :solr_dir => solr, :static_dir => stat, :ssf => solr_search }
|
|
130
|
+
optparse = OptionParser.new do |opts|
|
|
131
|
+
opts.on( '-h', '--help', 'Display this screen' ) do
|
|
132
|
+
puts opts
|
|
133
|
+
exit
|
|
134
|
+
end
|
|
135
|
+
curr_opt = values[:src_dir]
|
|
136
|
+
opts.on( curr_opt[:short], curr_opt[:long], curr_opt[:mesg] ) do |dir|
|
|
137
|
+
@log.debug "Setting source directory for markdown files."
|
|
138
|
+
raise "Directory not specified! " + curr_opt[:mesg] if dir.nil?
|
|
139
|
+
raise "Can't find markdown source dir as specified #{dir}" unless File.directory? dir
|
|
140
|
+
options[:src_dir] = dir
|
|
141
|
+
end
|
|
142
|
+
curr_opt = values[:solr_dir]
|
|
143
|
+
opts.on( curr_opt[:short], curr_opt[:long], curr_opt[:mesg] ) do |dir|
|
|
144
|
+
raise "Directory not specified! " + curr_opt[:mesg] if dir.nil?
|
|
145
|
+
FileUtils.mkdir dir unless File.exists? dir
|
|
146
|
+
options[:solr_dir] = dir
|
|
147
|
+
end
|
|
148
|
+
curr_opt = values[:static_dir]
|
|
149
|
+
opts.on( curr_opt[:short], curr_opt[:long], curr_opt[:mesg] ) do |dir|
|
|
150
|
+
raise "Directory not specified! " + curr_opt[:mesg] if dir.nil?
|
|
151
|
+
FileUtils.mkdir dir unless File.exists? dir
|
|
152
|
+
options[:static_dir] = dir
|
|
153
|
+
end
|
|
154
|
+
curr_opt = values[:ssf]
|
|
155
|
+
opts.on( curr_opt[:short], curr_opt[:long], curr_opt[:mesg] ) do |dir|
|
|
156
|
+
raise "Directory not specified! " + curr_opt[:mesg] if dir.nil?
|
|
157
|
+
FileUtils.mkdir dir unless File.exists? dir
|
|
158
|
+
options[:solr_search_files_dir] = dir
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
mesg = "Specify the search category: 'public', 'work', etc..."
|
|
162
|
+
opts.on( '-c', '--index_category CATEGORY', mesg ) do |category|
|
|
163
|
+
raise "Missing Field: " + mesg if category.nil?
|
|
164
|
+
options[:search_category] = category
|
|
165
|
+
end
|
|
166
|
+
|
|
167
|
+
mesg = "Set to 'info' or 'debug'"
|
|
168
|
+
opts.on( '-d', '--debug_level LEVEL', mesg ) do |level|
|
|
169
|
+
raise "Debug level not specified! #{mesg}" if level.nil?
|
|
170
|
+
case level
|
|
171
|
+
when "info"
|
|
172
|
+
options[:debug_level] = Logger::INFO
|
|
173
|
+
@log.level = Logger::INFO
|
|
174
|
+
when "debug"
|
|
175
|
+
options[:debug_level] = Logger::DEBUG
|
|
176
|
+
@log.level = Logger::DEBUG
|
|
177
|
+
else
|
|
178
|
+
raise "Debug level not specified properly! #{mesg}"
|
|
179
|
+
end
|
|
180
|
+
end
|
|
181
|
+
end
|
|
182
|
+
optparse.parse!
|
|
183
|
+
validate(options, [:src_dir, :solr_dir, :static_dir], values)
|
|
184
|
+
@ops.merge! options
|
|
185
|
+
return options
|
|
186
|
+
end
|
|
187
|
+
end
|
|
188
|
+
end
|
|
@@ -0,0 +1,232 @@
|
|
|
1
|
+
require "pygments"
|
|
2
|
+
require "redcarpet"
|
|
3
|
+
require 'logger'
|
|
4
|
+
require_relative 'solr'
|
|
5
|
+
|
|
6
|
+
module Publisher
|
|
7
|
+
class Markdown
|
|
8
|
+
attr_accessor :is_markdown_file, :convert_to_html_filename, :ops, :log
|
|
9
|
+
|
|
10
|
+
def initialize(ops = {})
|
|
11
|
+
@ops = { :output_style_sheet => 'style.css' }
|
|
12
|
+
@ops.merge! ops
|
|
13
|
+
@is_markdown_file = lambda { |file| File.extname(file) == '.mmd' }
|
|
14
|
+
@convert_to_html_filename = lambda { |filename| File.basename(filename, File.extname(filename)) + ".html" }
|
|
15
|
+
@log = Logger.new('log.txt')
|
|
16
|
+
if level = @ops[:debug_level]
|
|
17
|
+
@log.level = level
|
|
18
|
+
else
|
|
19
|
+
@log.level = Logger::INFO
|
|
20
|
+
end
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def copy_src_if_newer(read_dir, write_dir)
|
|
24
|
+
@log.info "Copying all files from: #{read_dir}, to: #{write_dir}, if they are newer in from dir."
|
|
25
|
+
return unless Dir.exists? read_dir
|
|
26
|
+
FileUtils.mkdir write_dir unless Dir.exists? write_dir
|
|
27
|
+
Dir.foreach read_dir do |curr_file|
|
|
28
|
+
read = File.absolute_path(File.join(read_dir,curr_file))
|
|
29
|
+
next if File.directory? read
|
|
30
|
+
write = File.absolute_path(File.join(write_dir,curr_file))
|
|
31
|
+
FileUtils.cp_r read, write_dir if file_newer? read, write
|
|
32
|
+
@log.debug "Copied from: #{read}, to: #{write}"
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
def insert_css_link(file_list)
|
|
36
|
+
style_info = '<head><LINK REL=StyleSheet HREF="' + @ops[:output_style_sheet] + '" TYPE="text/css"/></head>'
|
|
37
|
+
prepend_string style_info, file_list
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def prepend_string(string, file_list)
|
|
41
|
+
file_list.each do |file|
|
|
42
|
+
contents = File.open(file,'r'){|f| f.read }
|
|
43
|
+
tmp_file = file + '.tmp'
|
|
44
|
+
File.open(tmp_file, 'w'){|f| f.write string + contents}
|
|
45
|
+
FileUtils.rm_f file
|
|
46
|
+
FileUtils.mv tmp_file, file
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
def copy_files_to_dir(to_dir, file_list)
|
|
50
|
+
@log.debug "Copy to: #{to_dir}. Files: #{file_list}"
|
|
51
|
+
FileUtils.mkdir to_dir unless File.exists? to_dir
|
|
52
|
+
raise "#{to_dir} is not a directory" unless File.directory? to_dir
|
|
53
|
+
static_file_list = []
|
|
54
|
+
file_list.each do |file|
|
|
55
|
+
FileUtils.cp file, to_dir
|
|
56
|
+
static_file_list.push File.absolute_path(File.join(to_dir, File.basename(file)))
|
|
57
|
+
end
|
|
58
|
+
return static_file_list
|
|
59
|
+
end
|
|
60
|
+
def copy_solr_search_files ops, to_dir
|
|
61
|
+
search_files = ops[:solr_search_files]
|
|
62
|
+
solr_search_files_dir = ops[:solr_search_files_dir]
|
|
63
|
+
@log.debug "Copying solr search files from: #{solr_search_files_dir}, to: #{to_dir}"
|
|
64
|
+
search_files.each do |file|
|
|
65
|
+
from = File.join(solr_search_files_dir, file)
|
|
66
|
+
FileUtils.cp from, to_dir
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
def process_files(from_dir = @ops[:src_dir], to_dir = @ops[:target_dir])
|
|
70
|
+
# Files for searching
|
|
71
|
+
newer_files = get_newer_src_files from_dir, to_dir, @is_markdown_file, @convert_to_html_filename
|
|
72
|
+
converted_files = convert_mmd_files newer_files, to_dir
|
|
73
|
+
send_to_indexer converted_files, @ops[:search_category]
|
|
74
|
+
copy_solr_search_files @ops, to_dir
|
|
75
|
+
copy_src_if_newer File.join(from_dir, 'images'), File.join(to_dir, 'images')
|
|
76
|
+
write_stylesheet from_dir, to_dir
|
|
77
|
+
# Now static only files
|
|
78
|
+
static_dir = to_dir + "_static"
|
|
79
|
+
static_file_list = copy_files_to_dir static_dir, converted_files
|
|
80
|
+
copy_src_if_newer File.join(from_dir, 'images'), File.join(static_dir, 'images')
|
|
81
|
+
insert_css_link static_file_list
|
|
82
|
+
write_stylesheet from_dir, static_dir
|
|
83
|
+
create_index static_dir
|
|
84
|
+
end
|
|
85
|
+
def send_to_indexer(file_list, search_category)
|
|
86
|
+
uploader = Solr::Upload.new(ops)
|
|
87
|
+
file_list.each do |file|
|
|
88
|
+
contents = File.open(file,'r'){ |f| f.read }
|
|
89
|
+
extension = File.extname file
|
|
90
|
+
file_name = File.basename file, extension
|
|
91
|
+
@log.debug "Indexing the following: #{file_name}"
|
|
92
|
+
uploader.upload_file file_name, contents, file_name, search_category
|
|
93
|
+
end
|
|
94
|
+
end
|
|
95
|
+
def create_index(dir)
|
|
96
|
+
files = File.join dir,'*.html'
|
|
97
|
+
index = Dir.glob [files]
|
|
98
|
+
index.sort! { |a,b| a.downcase <=> b.downcase }
|
|
99
|
+
file = File.open(File.join(dir,"index.html"), 'w')
|
|
100
|
+
file.write("<html><body><ul>")
|
|
101
|
+
index.each { |filename|
|
|
102
|
+
basename = File.basename(filename, File.extname(filename))
|
|
103
|
+
file.write("<li/><a href=\"#{basename}.html\">#{basename}")
|
|
104
|
+
}
|
|
105
|
+
file.write("</ul></body></html>")
|
|
106
|
+
file.close()
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
def write_stylesheet(read_dir,write_dir)
|
|
110
|
+
contents = File.open(File.join(read_dir, @ops[:style_sheet]), "rb"){|f| f.read}
|
|
111
|
+
css = Pygments.css
|
|
112
|
+
File.open(File.join(write_dir,@ops[:output_style_sheet]), 'w') {|f| f.write(css + "\n" + contents) }
|
|
113
|
+
end
|
|
114
|
+
def convert_mmd_files(files_to_convert, target_dir)
|
|
115
|
+
markdown = Redcarpet::Markdown.new(HTMLwithPygments,
|
|
116
|
+
:fenced_code_blocks => true,
|
|
117
|
+
:tables => true,
|
|
118
|
+
:autolink => true,
|
|
119
|
+
:with_toc_data => true)
|
|
120
|
+
converted_files = []
|
|
121
|
+
files_to_convert.each do |file|
|
|
122
|
+
log.info "Working on file: " + file
|
|
123
|
+
contents = File.open(file, "r"){|f| f.read}
|
|
124
|
+
contents = contents.remove_non_ascii
|
|
125
|
+
|
|
126
|
+
html = markdown.render(contents.force_encoding('US-ASCII'))
|
|
127
|
+
log.debug "Converted markdown"
|
|
128
|
+
target_file = get_target_file file, target_dir, @convert_to_html_filename
|
|
129
|
+
write_file html, target_file
|
|
130
|
+
converted_files.push target_file
|
|
131
|
+
end
|
|
132
|
+
return converted_files
|
|
133
|
+
end
|
|
134
|
+
def write_file( contents, out_file )
|
|
135
|
+
File.open(out_file, 'w') { |f| f.write contents }
|
|
136
|
+
end
|
|
137
|
+
def get_target_file(src_filename, target_dir, make_target_filename = nil)
|
|
138
|
+
if make_target_filename
|
|
139
|
+
target_file_name = make_target_filename.call src_filename
|
|
140
|
+
else
|
|
141
|
+
target_file_name = src_filename
|
|
142
|
+
end
|
|
143
|
+
target_file = File.absolute_path target_file_name, target_dir
|
|
144
|
+
end
|
|
145
|
+
def get_newer_src_files(src_dir, target_dir, is_wanted_src_file = nil, make_target_filename = nil)
|
|
146
|
+
newer_src_files = []
|
|
147
|
+
Dir.foreach(src_dir) do |curr_file|
|
|
148
|
+
next if File.directory? curr_file
|
|
149
|
+
if is_wanted_src_file
|
|
150
|
+
next unless is_wanted_src_file.call curr_file
|
|
151
|
+
end
|
|
152
|
+
target_file = get_target_file(curr_file, target_dir, make_target_filename)
|
|
153
|
+
src_file = File.absolute_path curr_file, src_dir
|
|
154
|
+
newer_src_files.push src_file if file_newer? src_file, target_file
|
|
155
|
+
end
|
|
156
|
+
return newer_src_files
|
|
157
|
+
end
|
|
158
|
+
def file_newer?(first_file, second_file)
|
|
159
|
+
first_file_ts = File.stat(first_file).mtime
|
|
160
|
+
return true unless File.exist?(second_file)
|
|
161
|
+
second_file_ts = File.stat(second_file).mtime
|
|
162
|
+
if second_file_ts > first_file_ts
|
|
163
|
+
return false
|
|
164
|
+
else
|
|
165
|
+
return true
|
|
166
|
+
end
|
|
167
|
+
end
|
|
168
|
+
end
|
|
169
|
+
|
|
170
|
+
#---------------------------------------
|
|
171
|
+
|
|
172
|
+
class HTMLwithPygments < Redcarpet::Render::HTML
|
|
173
|
+
attr_accessor :ops, :log
|
|
174
|
+
|
|
175
|
+
def initialize(options = {})
|
|
176
|
+
super
|
|
177
|
+
@ops = {:log_filename => 'log.txt'}
|
|
178
|
+
@ops.merge! options
|
|
179
|
+
@log = Logger.new(@ops[:log_filename])
|
|
180
|
+
if level = @ops[:debug_level]
|
|
181
|
+
@log.level = level
|
|
182
|
+
else
|
|
183
|
+
@log.level = Logger::INFO
|
|
184
|
+
end
|
|
185
|
+
$toc1 = ""
|
|
186
|
+
$last_header_level = 0
|
|
187
|
+
$last_header_count = 0
|
|
188
|
+
$header = Array.new()
|
|
189
|
+
0.upto(6) { |i| $header[i] = 0 }
|
|
190
|
+
end
|
|
191
|
+
|
|
192
|
+
def block_code(code, language)
|
|
193
|
+
Pygments.highlight(code, :lexer => language)
|
|
194
|
+
end
|
|
195
|
+
def get_header_label( header_level )
|
|
196
|
+
index = header_level - 1
|
|
197
|
+
$header[index] = $header[index] + 1
|
|
198
|
+
header_level.upto($header.length - 1) { |i| $header[i] = 0 }
|
|
199
|
+
header_string = ""
|
|
200
|
+
0.upto(header_level - 1) {
|
|
201
|
+
|i| if i == header_level - 1
|
|
202
|
+
header_string += $header[i].to_s
|
|
203
|
+
@log.debug "Header String: #{header_string}. Header Entry: #{$header[i]}"
|
|
204
|
+
else
|
|
205
|
+
header_string += $header[i].to_s + "."
|
|
206
|
+
end
|
|
207
|
+
}
|
|
208
|
+
@log.info "processed: " + header_string
|
|
209
|
+
return header_string
|
|
210
|
+
end
|
|
211
|
+
def set_less_significant_to_zero(header_level)
|
|
212
|
+
10.times {|i| p i+1}
|
|
213
|
+
end
|
|
214
|
+
def header(text, header_level)
|
|
215
|
+
header_label = get_header_label(header_level)
|
|
216
|
+
id = text.gsub(" ", "_") unless text.nil?
|
|
217
|
+
$toc1 += "<div class=\"toc#{header_level}\"><a href=\"##{id}\">#{header_label} - #{text}</a></div><br/>\n"
|
|
218
|
+
return "\n<h#{header_level} id=\"#{id}\">#{header_label} - #{text}</h#{header_level}>\n"
|
|
219
|
+
end
|
|
220
|
+
def postprocess(full_document)
|
|
221
|
+
temp = $toc1
|
|
222
|
+
$toc1 = ""
|
|
223
|
+
0.upto(6) { |i| $header[i] = 0 }
|
|
224
|
+
return temp + full_document
|
|
225
|
+
end
|
|
226
|
+
end
|
|
227
|
+
end
|
|
228
|
+
class String
|
|
229
|
+
def remove_non_ascii(replacement="")
|
|
230
|
+
self.force_encoding('ASCII-8BIT').gsub(/[\x80-\xff]/,replacement)
|
|
231
|
+
end
|
|
232
|
+
end
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
require 'xmlsimple'
|
|
2
|
+
module SharedTool
|
|
3
|
+
class RegularExpression
|
|
4
|
+
def has_string(data, regex)
|
|
5
|
+
if first_occurence data, regex != ""
|
|
6
|
+
return true
|
|
7
|
+
end
|
|
8
|
+
return false
|
|
9
|
+
end
|
|
10
|
+
def first_occurrence(data, regex)
|
|
11
|
+
if data =~ regex
|
|
12
|
+
return $1
|
|
13
|
+
end
|
|
14
|
+
return ""
|
|
15
|
+
end
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
class String
|
|
19
|
+
def remove_non_ascii(replacement="")
|
|
20
|
+
self.force_encoding('ASCII-8BIT').gsub(/[\x80-\xff]/,replacement)
|
|
21
|
+
end
|
|
22
|
+
end
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
require 'net/http'
|
|
2
|
+
require 'uri'
|
|
3
|
+
require 'sinatra/base'
|
|
4
|
+
require 'sanitize'
|
|
5
|
+
require 'logger'
|
|
6
|
+
require 'erb'
|
|
7
|
+
require_relative 'multipart'
|
|
8
|
+
require_relative 'shared'
|
|
9
|
+
|
|
10
|
+
class WebApp < Sinatra::Base
|
|
11
|
+
configure do
|
|
12
|
+
set :public_folder, '/data/documentation/'
|
|
13
|
+
set :environment, :production
|
|
14
|
+
set :port, 80
|
|
15
|
+
set :solr_host, 'localhost'
|
|
16
|
+
set :solr_port, '8983'
|
|
17
|
+
enable :logging, :dump_errors, :raise_errors
|
|
18
|
+
end
|
|
19
|
+
get '/' do
|
|
20
|
+
"it works!"
|
|
21
|
+
end
|
|
22
|
+
get '/srch' do
|
|
23
|
+
handler = Handler.new
|
|
24
|
+
params[:solr_host] = settings.solr_host
|
|
25
|
+
params[:solr_port] = settings.solr_port
|
|
26
|
+
return "<p>No 'query' specified.</p>" unless params["query"]
|
|
27
|
+
begin
|
|
28
|
+
@highlights = handler.get_srch params
|
|
29
|
+
result = erb :search_results
|
|
30
|
+
return result
|
|
31
|
+
rescue NoResults => reason
|
|
32
|
+
return reason.mesg
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
def check_highlights
|
|
36
|
+
retval = "<ul>"
|
|
37
|
+
@highlights.each do |key, value|
|
|
38
|
+
if value['body'].nil?
|
|
39
|
+
next
|
|
40
|
+
end
|
|
41
|
+
body = value['body'][0]
|
|
42
|
+
body = body.remove_non_ascii
|
|
43
|
+
clean = Sanitize.clean(body, Sanitize::Config::RELAXED)
|
|
44
|
+
retval += "<li>#{key}<br><div>#{clean}</div>"
|
|
45
|
+
end
|
|
46
|
+
retval += "</ul>"
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
class NoResults < RuntimeError
|
|
50
|
+
attr :mesg
|
|
51
|
+
def initialize(mesg)
|
|
52
|
+
@mesg = mesg
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class Handler
|
|
58
|
+
def get_srch params
|
|
59
|
+
query = params["query"]
|
|
60
|
+
if query.nil?
|
|
61
|
+
raise NoResults.new "<p>No Query Specified</p>"
|
|
62
|
+
end
|
|
63
|
+
query = Solr::Query.new params
|
|
64
|
+
resp = query.do_query params
|
|
65
|
+
@highlights = resp['highlighting']
|
|
66
|
+
if @highlights.length == 0
|
|
67
|
+
raise NoResults.new "<p>No Results</p>"
|
|
68
|
+
end
|
|
69
|
+
return @highlights
|
|
70
|
+
end
|
|
71
|
+
end
|
|
72
|
+
module Solr
|
|
73
|
+
class Query
|
|
74
|
+
attr_accessor :ops, :log
|
|
75
|
+
def initialize(options = {})
|
|
76
|
+
@ops = {}
|
|
77
|
+
@ops.merge! options unless options.nil?
|
|
78
|
+
@log = Logger.new('log.txt')
|
|
79
|
+
if level = @ops[:debug_level]
|
|
80
|
+
@log.level = level
|
|
81
|
+
else
|
|
82
|
+
@log.level = Logger::DEBUG
|
|
83
|
+
end
|
|
84
|
+
begin
|
|
85
|
+
@ops[:solr_host] = settings.solr_host
|
|
86
|
+
rescue
|
|
87
|
+
end
|
|
88
|
+
mesg = "Must specify ':solr_host' and ':solr_port' in constructor hash"
|
|
89
|
+
raise mesg unless @ops[:solr_host] and @ops[:solr_port]
|
|
90
|
+
end
|
|
91
|
+
def do_query(params)
|
|
92
|
+
h = Net::HTTP.new(@ops[:solr_host], @ops[:solr_port])
|
|
93
|
+
query = params["query"]
|
|
94
|
+
category = params["category"]
|
|
95
|
+
query = "category:" + category + " AND " + params["query"] unless category.nil?
|
|
96
|
+
all = {
|
|
97
|
+
"q" => URI.escape(query),
|
|
98
|
+
"wt" => "ruby",
|
|
99
|
+
"hl" => "true",
|
|
100
|
+
"hl.fl" => "*"
|
|
101
|
+
}
|
|
102
|
+
query = "/solr/select?"
|
|
103
|
+
all.each do |key, value|
|
|
104
|
+
query += key + "=" + value + "&"
|
|
105
|
+
end
|
|
106
|
+
hresp, data = h.get(query)
|
|
107
|
+
|
|
108
|
+
if data.nil?
|
|
109
|
+
return "<p>nothing</p>"
|
|
110
|
+
else
|
|
111
|
+
rsp = eval(data)
|
|
112
|
+
return rsp
|
|
113
|
+
end
|
|
114
|
+
end
|
|
115
|
+
end
|
|
116
|
+
class Upload
|
|
117
|
+
attr_accessor :ops, :debug
|
|
118
|
+
def initialize(options = {})
|
|
119
|
+
@ops = { :debug => true }
|
|
120
|
+
@ops.merge! options
|
|
121
|
+
mesg = 'Missing param. Example :solr_base_url => "http://192.168.0.22:8983/solr/"'
|
|
122
|
+
raise mesg unless options[:solr_base_url]
|
|
123
|
+
@log = Logger.new('log.txt')
|
|
124
|
+
if level = @ops[:debug_level]
|
|
125
|
+
@log.level = level
|
|
126
|
+
else
|
|
127
|
+
@log.level = Logger::DEBUG
|
|
128
|
+
end
|
|
129
|
+
end
|
|
130
|
+
def upload_file(filename, data, file_id, search_category)
|
|
131
|
+
params = {
|
|
132
|
+
"literal.id" => file_id,
|
|
133
|
+
"commit" => "true",
|
|
134
|
+
"uprefix" => "attr_",
|
|
135
|
+
"fmap.content" => "body"
|
|
136
|
+
}
|
|
137
|
+
if @ops[:search_category]
|
|
138
|
+
params["literal.category"] = @ops[:search_category]
|
|
139
|
+
end
|
|
140
|
+
url = @ops[:solr_base_url] + "update/extract"
|
|
141
|
+
@log.debug "[URL]: " + url
|
|
142
|
+
post = Multipart::Post.new
|
|
143
|
+
post.add_params(params)
|
|
144
|
+
post.add_file(filename, data)
|
|
145
|
+
resp = post.post(url)
|
|
146
|
+
case resp
|
|
147
|
+
when Net::HTTPOK
|
|
148
|
+
@log.debug "Successfully submitted file to indexer."
|
|
149
|
+
else
|
|
150
|
+
@log.error "Failed to submit file to indexer."
|
|
151
|
+
end
|
|
152
|
+
end
|
|
153
|
+
def foo
|
|
154
|
+
puts "bar"
|
|
155
|
+
end
|
|
156
|
+
end
|
|
157
|
+
end
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
<ul>
|
|
2
|
+
<%
|
|
3
|
+
@highlights.each do |key, value|
|
|
4
|
+
if value['body'].nil?
|
|
5
|
+
next
|
|
6
|
+
end
|
|
7
|
+
body = value['body'][0]
|
|
8
|
+
body = body.remove_non_ascii
|
|
9
|
+
clean = Sanitize.clean(body, Sanitize::Config::RELAXED)
|
|
10
|
+
%>
|
|
11
|
+
<li>
|
|
12
|
+
<span class="click_me_span" onclick="load_content('<%= key %>' )">
|
|
13
|
+
<%=key%></span><br/>
|
|
14
|
+
<div><%= clean %></div>
|
|
15
|
+
<%end%>
|
|
16
|
+
</ul>
|