pmux-logview 0.2.2
Sign up to get free protection for your applications and to get access to all the features.
- data/Gemfile +4 -0
- data/LICENSE.txt +22 -0
- data/Makefile +18 -0
- data/README.md +113 -0
- data/Rakefile +4 -0
- data/bin/pmux-logview +4 -0
- data/conf/config.ru +15 -0
- data/conf/password +5 -0
- data/conf/pmux-logview.conf +12 -0
- data/lib/pmux-logview.rb +11 -0
- data/lib/pmux-logview/application.rb +69 -0
- data/lib/pmux-logview/auth_helper.rb +60 -0
- data/lib/pmux-logview/controller.rb +141 -0
- data/lib/pmux-logview/log_parser.rb +368 -0
- data/lib/pmux-logview/logger_wrapper.rb +129 -0
- data/lib/pmux-logview/model.rb +21 -0
- data/lib/pmux-logview/static/css/images/animated-overlay.gif +0 -0
- data/lib/pmux-logview/static/css/images/ui-bg_diagonals-thick_90_eeeeee_40x40.png +0 -0
- data/lib/pmux-logview/static/css/images/ui-bg_flat_15_cd0a0a_40x100.png +0 -0
- data/lib/pmux-logview/static/css/images/ui-bg_glass_100_e4f1fb_1x400.png +0 -0
- data/lib/pmux-logview/static/css/images/ui-bg_glass_50_3baae3_1x400.png +0 -0
- data/lib/pmux-logview/static/css/images/ui-bg_glass_80_d7ebf9_1x400.png +0 -0
- data/lib/pmux-logview/static/css/images/ui-bg_highlight-hard_100_f2f5f7_1x100.png +0 -0
- data/lib/pmux-logview/static/css/images/ui-bg_highlight-hard_70_000000_1x100.png +0 -0
- data/lib/pmux-logview/static/css/images/ui-bg_highlight-soft_100_deedf7_1x100.png +0 -0
- data/lib/pmux-logview/static/css/images/ui-bg_highlight-soft_25_ffef8f_1x100.png +0 -0
- data/lib/pmux-logview/static/css/images/ui-icons_2694e8_256x240.png +0 -0
- data/lib/pmux-logview/static/css/images/ui-icons_2e83ff_256x240.png +0 -0
- data/lib/pmux-logview/static/css/images/ui-icons_3d80b3_256x240.png +0 -0
- data/lib/pmux-logview/static/css/images/ui-icons_72a7cf_256x240.png +0 -0
- data/lib/pmux-logview/static/css/images/ui-icons_ffffff_256x240.png +0 -0
- data/lib/pmux-logview/static/css/jquery-ui-1.10.0.css +1186 -0
- data/lib/pmux-logview/static/css/jquery.dataTables.css +221 -0
- data/lib/pmux-logview/static/css/normalize.css +396 -0
- data/lib/pmux-logview/static/css/pmux-logview.css +161 -0
- data/lib/pmux-logview/static/css/tchart.css +124 -0
- data/lib/pmux-logview/static/font/7TssRTXcaLr8beqDiv5lkQ.woff +0 -0
- data/lib/pmux-logview/static/images/back_disabled.png +0 -0
- data/lib/pmux-logview/static/images/back_enabled.png +0 -0
- data/lib/pmux-logview/static/images/back_enabled_hover.png +0 -0
- data/lib/pmux-logview/static/images/forward_disabled.png +0 -0
- data/lib/pmux-logview/static/images/forward_enabled.png +0 -0
- data/lib/pmux-logview/static/images/forward_enabled_hover.png +0 -0
- data/lib/pmux-logview/static/images/sort_asc.png +0 -0
- data/lib/pmux-logview/static/images/sort_asc_disabled.png +0 -0
- data/lib/pmux-logview/static/images/sort_both.png +0 -0
- data/lib/pmux-logview/static/images/sort_desc.png +0 -0
- data/lib/pmux-logview/static/images/sort_desc_disabled.png +0 -0
- data/lib/pmux-logview/static/js/d3.v3.min.js +4 -0
- data/lib/pmux-logview/static/js/jquery-1.9.1.js +9597 -0
- data/lib/pmux-logview/static/js/jquery-ui-1.10.0.js +14883 -0
- data/lib/pmux-logview/static/js/jquery.activity-indicator-1.0.0.min.js +10 -0
- data/lib/pmux-logview/static/js/jquery.dataTables.min.js +157 -0
- data/lib/pmux-logview/static/js/pmux-logview-base.js +102 -0
- data/lib/pmux-logview/static/js/pmux-logview-detail.js +181 -0
- data/lib/pmux-logview/static/js/pmux-logview-index.js +324 -0
- data/lib/pmux-logview/static/js/tchart.js +2125 -0
- data/lib/pmux-logview/version.rb +5 -0
- data/lib/pmux-logview/views/detail.erb +58 -0
- data/lib/pmux-logview/views/index.erb +97 -0
- data/pmux-logview.gemspec +27 -0
- data/rpm/Makefile +20 -0
- data/rpm/pmux-logview +111 -0
- data/rpm/pmux-logview.spec +65 -0
- metadata +224 -0
@@ -0,0 +1,368 @@
|
|
1
|
+
require 'fileutils'
|
2
|
+
require 'date'
|
3
|
+
require 'yaml'
|
4
|
+
|
5
|
+
module Pmux
|
6
|
+
module LogView
|
7
|
+
class LogParser
|
8
|
+
@@pmux_log_path = ".pmux/log"
|
9
|
+
@@pmux_old_log_path = ".pmux/log/old"
|
10
|
+
@@dispatcher_log = "dispatcher.log"
|
11
|
+
@@max_dispatcher_log_size = 1024 * 64 # 128k
|
12
|
+
|
13
|
+
@@document_re = Regexp.new("^---")
|
14
|
+
@@header_re = Regexp.new("^:[a-z_]+:")
|
15
|
+
@@header_params_re = Regexp.new("^ :[a-z_]+:")
|
16
|
+
@@task_re = Regexp.new(" [a-z_]+:")
|
17
|
+
@@futter_re = Regexp.new("^:[a-z_]+:")
|
18
|
+
@@mapper_re = Regexp.new("^:mapper:")
|
19
|
+
@@job_started_at_re = Regexp.new("^:job_started_at:")
|
20
|
+
@@start_time_re = Regexp.new("^:start_time:")
|
21
|
+
@@invoked_at_re = Regexp.new("^:invoked_at:")
|
22
|
+
@@map_tasks_re = Regexp.new("^:map_tasks:")
|
23
|
+
@@tasksize_re = Regexp.new("^:tasksize:")
|
24
|
+
@@reduce_tasks_re = Regexp.new("^:reduce_tasks:")
|
25
|
+
@@params_re = Regexp.new("^:params:")
|
26
|
+
@@task_id_re = Regexp.new("^[0-9]+:")
|
27
|
+
@@task_allocated_at_re = Regexp.new("^ allocated_at:")
|
28
|
+
@@task_welapse_re = Regexp.new("^ welapse:")
|
29
|
+
@@task_elapse_re = Regexp.new("^ elapse:")
|
30
|
+
@@job_finished_at_re = Regexp.new("^:job_finished_at:")
|
31
|
+
@@quote_re = Regexp.new("^['\"]|['\"]$")
|
32
|
+
|
33
|
+
@logger = nil
|
34
|
+
|
35
|
+
def initialize cache_dir_path
|
36
|
+
@cache_dir_path = cache_dir_path
|
37
|
+
@logger = LoggerWrapper.instance()
|
38
|
+
end
|
39
|
+
|
40
|
+
def get_files user, log_path
|
41
|
+
return Dir.glob(File.expand_path(["~" + user, log_path, "*.yml"].join(File::SEPARATOR)))
|
42
|
+
end
|
43
|
+
|
44
|
+
def fast_parse file, job_id
|
45
|
+
cachable = false
|
46
|
+
job = {"end_time" => nil, "elapsed_time" => nil, "finished_tasks" => 0}
|
47
|
+
job["job_id"] = job_id
|
48
|
+
task_cnt = 0
|
49
|
+
start_time = nil
|
50
|
+
end_time = nil
|
51
|
+
File.open(file) {|f|
|
52
|
+
doc_cnt = 0
|
53
|
+
f.each_line {|ln|
|
54
|
+
if @@document_re =~ ln
|
55
|
+
doc_cnt += 1
|
56
|
+
elsif doc_cnt == 1 && (@@job_started_at_re =~ ln || @@start_time_re =~ ln)
|
57
|
+
empty, key, value = ln.split(":", 3)
|
58
|
+
start_time = DateTime::parse(value.strip())
|
59
|
+
job["start_time_msec"] = start_time.strftime("%Q")
|
60
|
+
job["start_time"] = start_time.strftime("%Y-%m-%d %H:%M:%S")
|
61
|
+
elsif doc_cnt == 1 && @@map_tasks_re =~ ln
|
62
|
+
empty, key, value = ln.split(":", 3)
|
63
|
+
job[key] = value.strip().to_i()
|
64
|
+
elsif doc_cnt == 1 && @@mapper_re =~ ln
|
65
|
+
empty, key, value = ln.split(":", 3)
|
66
|
+
job[key] = value.strip()
|
67
|
+
elsif doc_cnt == 2 && @@task_id_re =~ ln
|
68
|
+
task_cnt += 1
|
69
|
+
elsif doc_cnt == 3 && @@job_finished_at_re =~ ln
|
70
|
+
empty, key, value = ln.split(":", 3)
|
71
|
+
end_time = DateTime::parse(value.strip())
|
72
|
+
job["end_time_msec"] = end_time.strftime("%Q")
|
73
|
+
job["end_time"] = end_time.strftime("%Y-%m-%d %H:%M:%S")
|
74
|
+
cachable = true
|
75
|
+
end
|
76
|
+
}
|
77
|
+
}
|
78
|
+
job["finished_tasks"] = task_cnt
|
79
|
+
job["elapsed_time"] = ((end_time - start_time) * 86400).to_f if !start_time.nil? && !end_time.nil?
|
80
|
+
if end_time.nil?
|
81
|
+
if job["map_tasks"].nil?
|
82
|
+
job["end_time"] = "--- %"
|
83
|
+
elsif job["map_tasks"] == 0
|
84
|
+
job["end_time"] = "100%"
|
85
|
+
else
|
86
|
+
job["end_time"] = ((100 * job["finished_tasks"]) / job["map_tasks"]).to_s + "%"
|
87
|
+
end
|
88
|
+
end
|
89
|
+
return [job, cachable]
|
90
|
+
end
|
91
|
+
|
92
|
+
def load_cache file_path
|
93
|
+
cache = {}
|
94
|
+
return cache if !File.exist?(file_path)
|
95
|
+
begin
|
96
|
+
File.open(file_path, "rb") {|f|
|
97
|
+
f.flock(File::LOCK_SH)
|
98
|
+
cache = Marshal.load(f)
|
99
|
+
f.flock(File::LOCK_UN)
|
100
|
+
}
|
101
|
+
return cache
|
102
|
+
rescue
|
103
|
+
@logger.warn("cannot load cache file (#{file_path})")
|
104
|
+
return cache
|
105
|
+
end
|
106
|
+
end
|
107
|
+
|
108
|
+
def save_cache file_path, jobs, cachable_ids
|
109
|
+
cache = {}
|
110
|
+
for job_id in cachable_ids
|
111
|
+
cache[job_id] = jobs[job_id]
|
112
|
+
end
|
113
|
+
FileUtils.mkdir_p(@cache_dir_path) if !File.exist?(@cache_dir_path)
|
114
|
+
begin
|
115
|
+
File.open(file_path, File::RDWR|File::CREAT, 0644) {|f|
|
116
|
+
f.flock(File::LOCK_EX)
|
117
|
+
f.rewind()
|
118
|
+
Marshal.dump(cache, f)
|
119
|
+
f.flush()
|
120
|
+
f.truncate(f.pos)
|
121
|
+
f.flock(File::LOCK_UN)
|
122
|
+
}
|
123
|
+
rescue
|
124
|
+
@logger.warn("cannot save cache file (#{file_path})")
|
125
|
+
end
|
126
|
+
end
|
127
|
+
|
128
|
+
def add_cache_ids cache_ids, jobs, job_id, job, sort_key, sort_order
|
129
|
+
insert_idx = -1
|
130
|
+
for idx in 0..(cache_ids.length - 1)
|
131
|
+
id = cache_ids[idx]
|
132
|
+
if sort_order == "desc"
|
133
|
+
case sort_key
|
134
|
+
when "start_time"
|
135
|
+
if job["start_time_msec"].to_i > jobs[id]["start_time_msec"].to_i
|
136
|
+
insert_idx = idx
|
137
|
+
break
|
138
|
+
end
|
139
|
+
when "job_id"
|
140
|
+
if job["job_id"].to_i > jobs[id]["job_id"].to_i
|
141
|
+
insert_idx = idx
|
142
|
+
break
|
143
|
+
end
|
144
|
+
when "mapper"
|
145
|
+
if job["mapper"] > jobs[id]["mapper"]
|
146
|
+
insert_idx = idx
|
147
|
+
break
|
148
|
+
end
|
149
|
+
when "end_time"
|
150
|
+
if job["end_time_msec"].to_i > jobs[id]["end_time_msec"].to_i
|
151
|
+
insert_idx = idx
|
152
|
+
break
|
153
|
+
end
|
154
|
+
when "elapsed_time"
|
155
|
+
if job["elapsed_time"].to_i > jobs[id]["elapsed_time"].to_i
|
156
|
+
insert_idx = idx
|
157
|
+
break
|
158
|
+
end
|
159
|
+
end
|
160
|
+
elsif sort_order == "asc"
|
161
|
+
case sort_key
|
162
|
+
when "start_time"
|
163
|
+
if job["start_time_msec"].to_i < jobs[id]["start_time_msec"].to_i
|
164
|
+
insert_idx = idx
|
165
|
+
break
|
166
|
+
end
|
167
|
+
when "job_id"
|
168
|
+
if job["job_id"].to_i < jobs[id]["job_id"].to_i
|
169
|
+
insert_idx = idx
|
170
|
+
break
|
171
|
+
end
|
172
|
+
when "mapper"
|
173
|
+
if job["mapper"] < jobs[id]["mapper"]
|
174
|
+
insert_idx = idx
|
175
|
+
break
|
176
|
+
end
|
177
|
+
when "end_time"
|
178
|
+
if job["end_time_msec"].to_i < jobs[id]["end_time_msec"].to_i
|
179
|
+
insert_idx = idx
|
180
|
+
break
|
181
|
+
end
|
182
|
+
when "elapsed_time"
|
183
|
+
if job["elapsed_time"].to_i < jobs[id]["elapsed_time"].to_i
|
184
|
+
insert_idx = idx
|
185
|
+
break
|
186
|
+
end
|
187
|
+
end
|
188
|
+
end
|
189
|
+
end
|
190
|
+
if insert_idx != -1
|
191
|
+
cache_ids.insert(insert_idx, job_id);
|
192
|
+
else
|
193
|
+
cache_ids.push(job_id)
|
194
|
+
end
|
195
|
+
end
|
196
|
+
|
197
|
+
def parse_data_cache_ids parse_data, jobs, cache_ids, nitems, page
|
198
|
+
start_idx = nitems * page
|
199
|
+
if start_idx >= (cache_ids.length - 1)
|
200
|
+
return
|
201
|
+
end
|
202
|
+
for idx in start_idx..cache_ids.length - 1
|
203
|
+
parse_data[cache_ids[idx]] = jobs[cache_ids[idx]]
|
204
|
+
nitems -= 1
|
205
|
+
if nitems == 0
|
206
|
+
break
|
207
|
+
end
|
208
|
+
end
|
209
|
+
end
|
210
|
+
|
211
|
+
def parse_log_job user, data
|
212
|
+
new_jobs_cookie = DateTime.now().strftime("%Q").to_i
|
213
|
+
jobs = {}
|
214
|
+
new_cache_ids = []
|
215
|
+
update_ids = []
|
216
|
+
cache_file_path = [@cache_dir_path, user].join(File::SEPARATOR)
|
217
|
+
cache = load_cache(cache_file_path)
|
218
|
+
need_save_cache = false
|
219
|
+
for log_path in [@@pmux_log_path]
|
220
|
+
files = get_files(user, log_path)
|
221
|
+
for file in files
|
222
|
+
job_id = File::basename(file).sub(".yml", "")
|
223
|
+
if cache.key?(job_id)
|
224
|
+
jobs[job_id] = cache[job_id]
|
225
|
+
add_cache_ids(new_cache_ids, jobs, job_id, cache[job_id], data["sort_key"], data["sort_order"])
|
226
|
+
update_ids.push(job_id) if data["jobs_cookie"] > 0 && data["jobs_cookie"] <= cache[job_id]["end_time_msec"].to_i
|
227
|
+
next
|
228
|
+
else
|
229
|
+
job, cachable = fast_parse(file, job_id)
|
230
|
+
jobs[job_id] = job
|
231
|
+
update_ids.push(job_id)
|
232
|
+
if cachable
|
233
|
+
add_cache_ids(new_cache_ids, jobs, job_id, job, data["sort_key"], data["sort_order"])
|
234
|
+
need_save_cache = true
|
235
|
+
end
|
236
|
+
end
|
237
|
+
end
|
238
|
+
end
|
239
|
+
save_cache(cache_file_path, jobs, new_cache_ids) if need_save_cache || cache.length != new_cache_ids.length
|
240
|
+
parse_data = { "jobs" => {}, "jobs_cookie" => new_jobs_cookie}
|
241
|
+
if data["type"] == "archive"
|
242
|
+
parse_data_cache_ids(parse_data["jobs"], jobs, new_cache_ids, data["nitems"], data["page"])
|
243
|
+
end
|
244
|
+
for job_id in update_ids
|
245
|
+
parse_data["jobs"][job_id] = jobs[job_id] if jobs.key?(job_id)
|
246
|
+
end
|
247
|
+
return parse_data
|
248
|
+
end
|
249
|
+
|
250
|
+
def full_parse file_path
|
251
|
+
documents = []
|
252
|
+
File.open(file_path) {|f|
|
253
|
+
doc_cnt = 0
|
254
|
+
new_doc = nil
|
255
|
+
task_id = nil
|
256
|
+
f.each_line {|ln|
|
257
|
+
if @@document_re =~ ln
|
258
|
+
if !new_doc.nil?
|
259
|
+
new_doc.delete(task_id) if doc_cnt == 2 && !task_id.nil? && new_doc[task_id].length < 5
|
260
|
+
documents.push(new_doc)
|
261
|
+
end
|
262
|
+
doc_cnt += 1
|
263
|
+
new_doc = {}
|
264
|
+
task_id = nil
|
265
|
+
elsif doc_cnt == 1 && (@@job_started_at_re =~ ln || @@invoked_at_re =~ ln || @@start_time_re =~ ln)
|
266
|
+
empty, key, value = ln.split(":", 3)
|
267
|
+
time = DateTime::parse(value.strip())
|
268
|
+
new_doc[key] = time
|
269
|
+
elsif doc_cnt == 1 && (@@map_tasks_re =~ ln || @@tasksize_re =~ ln || @@reduce_tasks_re =~ ln)
|
270
|
+
empty, key, value = ln.split(":", 3)
|
271
|
+
new_doc[key] = value.strip().to_i()
|
272
|
+
elsif doc_cnt == 1 && @@params_re =~ ln
|
273
|
+
empty, key, value = ln.split(":", 3)
|
274
|
+
new_doc["params"] = {}
|
275
|
+
elsif doc_cnt == 1 && @@header_re =~ ln
|
276
|
+
empty, key, value = ln.split(":", 3)
|
277
|
+
new_doc[key] = value.strip().gsub(@@quote_re, "")
|
278
|
+
elsif doc_cnt == 1 && @@header_params_re =~ ln
|
279
|
+
empty, key, value = ln.split(":", 3)
|
280
|
+
new_doc["params"][key] = value.strip().gsub(@@quote_re, "")
|
281
|
+
elsif doc_cnt == 2 && @@task_id_re =~ ln
|
282
|
+
task_id, empty = ln.split(":", 2)
|
283
|
+
new_doc[task_id] = {}
|
284
|
+
elsif doc_cnt == 2 && (@@task_allocated_at_re =~ ln || @@task_welapse_re =~ ln || @@task_elapse_re =~ ln)
|
285
|
+
key, value = ln.split(":", 2)
|
286
|
+
new_doc[task_id][key.strip()] = value.strip().to_f()
|
287
|
+
elsif doc_cnt == 2 && @@task_re =~ ln
|
288
|
+
key, value = ln.split(":", 2)
|
289
|
+
new_doc[task_id][key.strip()] = value.strip().gsub(@@quote_re, "")
|
290
|
+
elsif doc_cnt == 3 && @@job_finished_at_re =~ ln
|
291
|
+
empty, key, value = ln.split(":", 3)
|
292
|
+
time = DateTime::parse(value.strip())
|
293
|
+
new_doc[key] = time
|
294
|
+
elsif doc_cnt == 3 && @@futter_re =~ ln
|
295
|
+
empty, key, value = ln.split(":", 3)
|
296
|
+
new_doc[key] = value.strip().gsub(@@quote_re, "")
|
297
|
+
end
|
298
|
+
}
|
299
|
+
if !new_doc.nil?
|
300
|
+
new_doc.delete(task_id) if doc_cnt == 2 && new_doc[task_id].length < 5
|
301
|
+
documents.push(new_doc)
|
302
|
+
end
|
303
|
+
if documents.length == 1
|
304
|
+
documents.push(nil)
|
305
|
+
end
|
306
|
+
if documents.length == 2
|
307
|
+
documents.push(nil)
|
308
|
+
end
|
309
|
+
}
|
310
|
+
return documents
|
311
|
+
end
|
312
|
+
|
313
|
+
def parse_log_job_detail user, job_id
|
314
|
+
file_path = File.expand_path(["~" + user, @@pmux_log_path, job_id + ".yml"].join(File::SEPARATOR))
|
315
|
+
if !File.exist?(file_path)
|
316
|
+
file_path = File.expand_path(["~" + user, @@pmux_old_log_path, job_id + ".yml"].join(File::SEPARATOR))
|
317
|
+
if !File.exist?(file_path)
|
318
|
+
return nil
|
319
|
+
end
|
320
|
+
end
|
321
|
+
#begin
|
322
|
+
#f = File.open(file_path)
|
323
|
+
#docs = YAML::load_stream(f)
|
324
|
+
docs = full_parse(file_path)
|
325
|
+
parse_data = []
|
326
|
+
for idx in [0, 1, 2]
|
327
|
+
if idx == 0 && !docs[idx].nil?
|
328
|
+
docs[idx]["job_started_at_msec"] = docs[idx]["job_started_at"].strftime("%Q") if !docs[idx]["job_started_at"].nil?
|
329
|
+
elsif idx == 2 && !docs[idx].nil?
|
330
|
+
docs[idx]["job_finished_at_msec"] = docs[idx]["job_finished_at"].strftime("%Q") if !docs[idx]["job_finished_at"].nil?
|
331
|
+
end
|
332
|
+
if docs[idx].nil?
|
333
|
+
parse_data.push({})
|
334
|
+
else
|
335
|
+
parse_data.push(docs[idx])
|
336
|
+
end
|
337
|
+
end
|
338
|
+
#ensure
|
339
|
+
# f.close() if !f.nil?
|
340
|
+
#end
|
341
|
+
return parse_data
|
342
|
+
end
|
343
|
+
|
344
|
+
def parse_log_dispatcher user
|
345
|
+
parse_data = []
|
346
|
+
file_path = File.expand_path(["~" + user, @@pmux_log_path, @@dispatcher_log].join(File::SEPARATOR))
|
347
|
+
begin
|
348
|
+
f = File.open(file_path)
|
349
|
+
begin
|
350
|
+
f.seek(-@@max_dispatcher_log_size, IO::SEEK_END)
|
351
|
+
rescue
|
352
|
+
end
|
353
|
+
while ln = f.gets
|
354
|
+
parse_data.push(ln)
|
355
|
+
end
|
356
|
+
partial_line = parse_data.shift()
|
357
|
+
ensure
|
358
|
+
f.close() if !f.nil?
|
359
|
+
end
|
360
|
+
return parse_data
|
361
|
+
end
|
362
|
+
|
363
|
+
def set_cache_dir_path cache_dir_path
|
364
|
+
@cache_dir_path = cache_dir_path
|
365
|
+
end
|
366
|
+
end
|
367
|
+
end
|
368
|
+
end
|
@@ -0,0 +1,129 @@
|
|
1
|
+
require 'singleton'
|
2
|
+
require 'logger'
|
3
|
+
require 'syslog'
|
4
|
+
|
5
|
+
module Pmux
|
6
|
+
module LogView
|
7
|
+
class LoggerWrapper
|
8
|
+
include Singleton
|
9
|
+
|
10
|
+
@@log_level_map = {
|
11
|
+
'debug' => Logger::DEBUG,
|
12
|
+
'info' => Logger::INFO,
|
13
|
+
'warn' => Logger::WARN,
|
14
|
+
'error' => Logger::ERROR,
|
15
|
+
'fatal' => Logger::FATAL
|
16
|
+
}
|
17
|
+
|
18
|
+
@@facility_map = {
|
19
|
+
'user' => Syslog::LOG_USER,
|
20
|
+
'daemon' => Syslog::LOG_DAEMON,
|
21
|
+
'local0' => Syslog::LOG_LOCAL0,
|
22
|
+
'local1' => Syslog::LOG_LOCAL1,
|
23
|
+
'local2' => Syslog::LOG_LOCAL2,
|
24
|
+
'local3' => Syslog::LOG_LOCAL3,
|
25
|
+
'local4' => Syslog::LOG_LOCAL4,
|
26
|
+
'local5' => Syslog::LOG_LOCAL5,
|
27
|
+
'local6' => Syslog::LOG_LOCAL6,
|
28
|
+
'local7' => Syslog::LOG_LOCAL7
|
29
|
+
}
|
30
|
+
|
31
|
+
def init
|
32
|
+
@syslog = false
|
33
|
+
@logger = nil
|
34
|
+
@serverity = Logger::INFO
|
35
|
+
@log_dir_path = nil
|
36
|
+
end
|
37
|
+
|
38
|
+
def foreground foreground
|
39
|
+
@foreground = foreground
|
40
|
+
end
|
41
|
+
|
42
|
+
def fixup_level level
|
43
|
+
return level if @@log_level_map.key?(level)
|
44
|
+
return "info"
|
45
|
+
end
|
46
|
+
|
47
|
+
def get_facility facility
|
48
|
+
if !facility.nil? && @@facility_map.key?(facility)
|
49
|
+
return @@facility_map[facility]
|
50
|
+
end
|
51
|
+
return Syslog::LOG_USER
|
52
|
+
end
|
53
|
+
|
54
|
+
def open log_file_path, log_level, use_syslog, facility_string
|
55
|
+
# ログをオープンする
|
56
|
+
# すでに開いている状態で呼ばれるとリオープンする
|
57
|
+
@serverity = @@log_level_map[fixup_level(log_level)]
|
58
|
+
@log_dir_path = File.dirname(log_file_path)
|
59
|
+
FileUtils.mkdir_p(@log_dir_path) if !File.exist?(@log_dir_path)
|
60
|
+
old_logger = @logger
|
61
|
+
@logger = nil
|
62
|
+
begin
|
63
|
+
@logger = Logger.new(log_file_path, 'daily')
|
64
|
+
@logger.level = @serverity
|
65
|
+
old_logger.close() if !old_logger.nil?
|
66
|
+
rescue Errno::ENOENT => e
|
67
|
+
@logger = old_logger if !old_logger.nil?
|
68
|
+
warn("not found log file (#{log_file_path})")
|
69
|
+
warn("error: #{e}")
|
70
|
+
rescue Errno::EACCES => e
|
71
|
+
@logger = old_logger if !old_logger.nil?
|
72
|
+
warn("can not access log file (#{log_file_path})")
|
73
|
+
warn("error: #{e}")
|
74
|
+
end
|
75
|
+
if @syslog
|
76
|
+
Syslog.close()
|
77
|
+
@syslog = false
|
78
|
+
end
|
79
|
+
if use_syslog
|
80
|
+
facility = get_facility(facility_string)
|
81
|
+
Syslog.open("pmux-logview", Syslog::LOG_PID, facility)
|
82
|
+
@syslog = true
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
def close
|
87
|
+
@logger.close() if !@logger.nil?
|
88
|
+
@logger = nil
|
89
|
+
Syslog.close() if @syslog
|
90
|
+
@syslog = false
|
91
|
+
end
|
92
|
+
|
93
|
+
def write msg
|
94
|
+
@logger.info(msg.rstrip()) if !@logger.nil?
|
95
|
+
Syslog.info("#{msg.rstrip()}") if @syslog
|
96
|
+
end
|
97
|
+
|
98
|
+
def debug msg
|
99
|
+
@logger.debug(msg) if !@logger.nil?
|
100
|
+
Syslog.debug("#{msg}") if @syslog
|
101
|
+
puts "[debug] #{msg}" if @foreground && @@log_level_map["debug"] >= @serverity
|
102
|
+
end
|
103
|
+
|
104
|
+
def info msg
|
105
|
+
@logger.info(msg) if !@logger.nil?
|
106
|
+
Syslog.info("#{msg}") if @syslog
|
107
|
+
puts "[info] #{msg}" if @foreground && @@log_level_map["info"] >= @serverity
|
108
|
+
end
|
109
|
+
|
110
|
+
def warn msg
|
111
|
+
@logger.warn(msg) if !@logger.nil?
|
112
|
+
Syslog.warning("#{msg}") if @syslog
|
113
|
+
puts "[warn] #{msg}" if @foreground && @@log_level_map["warn"] >= @serverity
|
114
|
+
end
|
115
|
+
|
116
|
+
def error msg
|
117
|
+
@logger.error(msg) if !@logger.nil?
|
118
|
+
Syslog.err("#{msg}") if @syslog
|
119
|
+
puts "[error] #{msg}" if @foreground && @@log_level_map["error"] >= @serverity
|
120
|
+
end
|
121
|
+
|
122
|
+
def fatal msg
|
123
|
+
@logger.fatal(msg) if !@logger.nil?
|
124
|
+
Syslog.crit("#{msg}") if @syslog
|
125
|
+
puts "[fatal] #{msg}" if @foreground && @@log_level_map["fatal"] >= @serverity
|
126
|
+
end
|
127
|
+
end
|
128
|
+
end
|
129
|
+
end
|