pmux-logview 0.3.4 → 0.3.10

Sign up to get free protection for your applications and to get access to all the features.
data/LICENSE.txt CHANGED
@@ -1,4 +1,4 @@
1
- Copyright (c) 2013 kakine
1
+ Copyright (c) 2013, Internet Initiative Japan Inc.
2
2
 
3
3
  MIT License
4
4
 
@@ -11,5 +11,6 @@ Pmux::LogView::Controller.setup({ "default_user" => "pmux",
11
11
  "log_dir_path" => "/var/log/pmux-logview",
12
12
  "log_level" => "info",
13
13
  "use_syslog" => true,
14
- "syslog_facility" => "user" })
14
+ "syslog_facility" => "user",
15
+ "cache_expire_time" => 86400 })
15
16
  run Pmux::LogView::Controller
File without changes
@@ -2,11 +2,12 @@
2
2
  host: 0.0.0.0
3
3
  port: 28080
4
4
  pidfile: /var/run/pmux-logview.pid
5
- default_user: pmux
6
- use_basic_auth: true
5
+ default_user: admin
6
+ use_basic_auth: false
7
7
  password_file_path: /etc/pmux-logview/password
8
8
  cache_dir_path: /var/tmp/pmux-logview
9
9
  log_dir_path: /var/log/pmux-logview
10
10
  log_level: info
11
- use_syslog: true
11
+ use_syslog: false
12
12
  syslog_facility: user
13
+ cache_expire_time: 86400
@@ -51,6 +51,10 @@ module Pmux
51
51
  end
52
52
 
53
53
  def run
54
+
55
+ #require 'ruby-prof'
56
+ #RubyProf.start
57
+
54
58
  initialize()
55
59
  parse_args()
56
60
  daemonize()
@@ -62,6 +66,11 @@ module Pmux
62
66
  @port = @config["port"] if @config["port"]
63
67
  Controller.setup(@config)
64
68
  Controller.run! :bind => @host, :host => @host, :port => @port
69
+
70
+ #result = RubyProf.stop
71
+ #printer = RubyProf::FlatPrinter.new(result)
72
+ #printer.print(STDOUT)
73
+
65
74
  end
66
75
  end
67
76
  end
@@ -1,3 +1,5 @@
1
+ # -*- coding: utf-8 -*-
2
+
1
3
  require 'sinatra'
2
4
  require 'sinatra/base'
3
5
  require 'json'
@@ -19,6 +21,7 @@ module Pmux
19
21
  @@use_syslog = false
20
22
  @@use_basic_auth = true
21
23
  @@default_user = "pmux"
24
+ @@cache_expire_time = 86400
22
25
  @@model = nil
23
26
  @@logger = nil
24
27
  @user = nil
@@ -48,13 +51,15 @@ module Pmux
48
51
  @@syslog_facility = args[key]
49
52
  when "default_user"
50
53
  @@default_user = args[key]
54
+ when "cache_expire_time"
55
+ @@cache_expire_time = args[key]
51
56
  end
52
57
  }
53
58
  log_file_path = File.expand_path([@@log_dir_path, @@log_filename].join(File::SEPARATOR))
54
59
  @@logger.foreground(@@foreground)
55
60
  @@logger.open(log_file_path, @@log_level, @@use_syslog, @@syslog_facility)
56
61
  AuthHelper.update(@@password_file_path)
57
- @@model = Model.new(@@cache_dir_path)
62
+ @@model = Model.new(@@cache_dir_path, @@cache_expire_time)
58
63
  end
59
64
 
60
65
  configure do
@@ -64,7 +69,7 @@ module Pmux
64
69
  @@logger.open(log_file_path, @@log_level, @@use_syslog, @@syslog_facility)
65
70
  use Rack::CommonLogger, @@logger
66
71
  AuthHelper.init(@@password_file_path)
67
- @@model = Model.new(@@cache_dir_path)
72
+ @@model = Model.new(@@cache_dir_path, @@cache_expire_time)
68
73
  end
69
74
 
70
75
  def logger
@@ -92,12 +97,11 @@ module Pmux
92
97
  @@logger.info("access user #{@user}")
93
98
  data = {}
94
99
  validations = {
95
- "sort_key" => { "default" => "start_time", "type" => String, "values" => [ "job_id", "mapper", "start_time", "end_time", "elapsed_time"] },
96
100
  "sort_order" => { "default" => "desc", "type" => String, "values" => [ "asc", "desc" ] },
97
- "type" => { "default" => "archive", "type" => String, "values" => [ "archive", "update" ] },
98
101
  "nitems" => { "default" => 20, "type" => Integer, "values" => nil },
99
102
  "page" => { "default" => 0, "type" => Integer, "values" => nil },
100
- "jobs_cookie" => { "default" => 0, "type" => Integer, "values" => nil },
103
+ "start_time_msec" => { "default" => nil, "type" => Integer, "values" => nil },
104
+ "end_time_msec" => { "default" => nil, "type" => Integer, "values" => nil },
101
105
  }
102
106
  validations.each_key{|key|
103
107
  if params.key?(key)
@@ -1,3 +1,5 @@
1
+ # -*- coding: utf-8 -*-
2
+
1
3
  require 'fileutils'
2
4
  require 'date'
3
5
  require 'yaml'
@@ -16,59 +18,77 @@ module Pmux
16
18
  @@task_re = Regexp.new(" [a-z_]+:")
17
19
  @@futter_re = Regexp.new("^:[a-z_]+:")
18
20
  @@mapper_re = Regexp.new("^:mapper:")
19
- @@job_started_at_re = Regexp.new("^:job_started_at:")
20
- @@start_time_re = Regexp.new("^:start_time:")
21
- @@invoked_at_re = Regexp.new("^:invoked_at:")
22
- @@map_tasks_re = Regexp.new("^:map_tasks:")
21
+ @@start_at_re = Regexp.new("^(:job_started_at:)|(:start_time:)|(:invoked_at:)")
22
+ @@tasks_re = Regexp.new("^(:map_tasks:)|(:reduce_tasks:)")
23
23
  @@tasksize_re = Regexp.new("^:tasksize:")
24
- @@reduce_tasks_re = Regexp.new("^:reduce_tasks:")
25
24
  @@params_re = Regexp.new("^:params:")
26
25
  @@task_id_re = Regexp.new("^[0-9]+:")
27
- @@task_allocated_at_re = Regexp.new("^ allocated_at:")
28
- @@task_welapse_re = Regexp.new("^ welapse:")
29
- @@task_elapse_re = Regexp.new("^ elapse:")
30
- @@job_finished_at_re = Regexp.new("^:job_finished_at:")
26
+ @@elapse_re = Regexp.new("^( allocated_at:)|( welapse:)|( elapse:)")
27
+ @@finish_at_re = Regexp.new("^:job_finished_at:")
31
28
  @@error_status_re = Regexp.new("^:error_status:")
32
29
  @@error_message_re = Regexp.new("^:error_message:")
33
30
  @@quote_re = Regexp.new("^['\"]|['\"]$")
34
31
 
35
32
  @logger = nil
33
+ @cache_expire_time = 86400
36
34
 
37
- def initialize cache_dir_path
38
- @cache_dir_path = cache_dir_path
35
+ def initialize jobs_cache_dir_path, cache_expire
36
+ @jobs_cache_dir_path = jobs_cache_dir_path
37
+ @cache_expire_time = cache_expire
39
38
  @logger = LoggerWrapper.instance()
40
39
  end
41
40
 
42
- def get_files user, log_path
43
- return Dir.glob(File.expand_path(["~" + user, log_path, "*.yml"].join(File::SEPARATOR)))
41
+ def get_log_paths
42
+ return [@@pmux_log_path]
43
+ end
44
+
45
+ def get_files user, log_path, data, now
46
+ stime = Time.at(0)
47
+ etime = now
48
+ need_slice = true
49
+ if !data["start_time_msec"].nil? || !data["end_time_msec"].nil?
50
+ need_slice = false
51
+ stime = Time.at(data["start_time_msec"] / 1000, (data["start_time_msec"] % 1000) * 1000) if !data["start_time_msec"].nil?
52
+ etime = Time.at(data["end_time_msec"] / 1000, (data["start_time_msec"] % 1000) * 1000) if !data["end_time_msec"].nil?
53
+ end
54
+ file_mtime_list = Dir.glob(File.expand_path(["~" + user, log_path, "*.yml"].join(File::SEPARATOR))).map{ |f| [f, File.mtime(f)] }
55
+ file_list = file_mtime_list.sort_by{ |fm| fm[1] }.reverse.map{ |fm| fm[0] if stime <= fm[1] && fm[1] <= etime }.compact
56
+ if need_slice
57
+ item_count = (data["page"] + 1) * data["nitems"]
58
+ file_list = file_list.slice(0, item_count)
59
+ end
60
+ return file_list
44
61
  end
45
62
 
46
- def fast_parse file, job_id
63
+ def fast_parse file, job_id, parsed_time
47
64
  cachable = false
48
- job = {"end_time" => nil, "elapsed_time" => nil, "finished_tasks" => 0}
49
- job["job_id"] = job_id
65
+ job = {"end_time" => nil, "elapsed_time" => nil, "finished_tasks" => 0, "job_id" => job_id, "parsed_time" => parsed_time}
50
66
  task_cnt = 0
51
67
  start_time = nil
52
68
  end_time = nil
53
- File.open(file) {|f|
54
- doc_cnt = 0
55
- f.each_line {|ln|
69
+ doc1_buffer = ""
70
+ doc_cnt = 0
71
+ File.open(file) do |f|
72
+ f.each_line do |ln|
56
73
  if @@document_re =~ ln
74
+ if doc_cnt == 1
75
+ new_doc = YAML.load(doc1_buffer)
76
+ job["mapper"] = new_doc[:mapper].encode("UTF-16BE", "UTF-8", :invalid => :replace, :undef => :replace, :replace => '?').encode("UTF-8")
77
+ end
57
78
  doc_cnt += 1
58
- elsif doc_cnt == 1 && (@@job_started_at_re =~ ln || @@start_time_re =~ ln)
79
+ elsif doc_cnt == 1 && @@start_at_re =~ ln
59
80
  empty, key, value = ln.split(":", 3)
60
81
  start_time = DateTime::parse(value.strip())
61
82
  job["start_time_msec"] = start_time.strftime("%Q")
62
83
  job["start_time"] = start_time.strftime("%Y-%m-%d %H:%M:%S")
63
- elsif doc_cnt == 1 && (@@map_tasks_re =~ ln || @@reduce_tasks_re =~ ln)
84
+ elsif doc_cnt == 1 && @@tasks_re =~ ln
64
85
  empty, key, value = ln.split(":", 3)
65
86
  job[key] = value.strip().to_i()
66
- elsif doc_cnt == 1 && @@mapper_re =~ ln
67
- empty, key, value = ln.split(":", 3)
68
- job[key] = value.strip()
87
+ elsif doc_cnt == 1
88
+ doc1_buffer += ln
69
89
  elsif doc_cnt == 2 && @@task_id_re =~ ln
70
90
  task_cnt += 1
71
- elsif doc_cnt == 3 && @@job_finished_at_re =~ ln
91
+ elsif doc_cnt == 3 && @@finish_at_re =~ ln
72
92
  empty, key, value = ln.split(":", 3)
73
93
  end_time = DateTime::parse(value.strip())
74
94
  job["end_time_msec"] = end_time.strftime("%Q")
@@ -80,10 +100,10 @@ module Pmux
80
100
  cachable = true
81
101
  elsif doc_cnt == 3 && @@error_message_re =~ ln
82
102
  empty, key, value = ln.split(":", 3)
83
- job[key] = value.strip().gsub(@@quote_re, "")
103
+ job[key] = value.strip().gsub(@@quote_re, "").encode("UTF-16BE", "UTF-8", :invalid => :replace, :undef => :replace, :replace => '?').encode("UTF-8")
84
104
  end
85
- }
86
- }
105
+ end
106
+ end
87
107
  job["finished_tasks"] = task_cnt
88
108
  job["elapsed_time"] = ((end_time - start_time) * 86400).to_f if !start_time.nil? && !end_time.nil?
89
109
  if end_time.nil?
@@ -99,7 +119,7 @@ module Pmux
99
119
  end
100
120
 
101
121
  def load_cache file_path
102
- cache = {}
122
+ cache = { "jobs" => {} }
103
123
  return cache if !File.exist?(file_path)
104
124
  begin
105
125
  File.open(file_path, "rb") {|f|
@@ -114,180 +134,106 @@ module Pmux
114
134
  end
115
135
  end
116
136
 
117
- def save_cache file_path, jobs, cachable_ids
118
- cache = {}
119
- for job_id in cachable_ids
120
- cache[job_id] = jobs[job_id]
121
- end
122
- FileUtils.mkdir_p(@cache_dir_path) if !File.exist?(@cache_dir_path)
137
+ def save_cache file_path, cache
138
+ FileUtils.mkdir_p(@jobs_cache_dir_path) if !File.exist?(@jobs_cache_dir_path)
123
139
  begin
124
- File.open(file_path, File::RDWR|File::CREAT, 0644) {|f|
140
+ File.open(file_path, File::RDWR|File::CREAT, 0644) do |f|
125
141
  f.flock(File::LOCK_EX)
126
142
  f.rewind()
127
143
  Marshal.dump(cache, f)
128
144
  f.flush()
129
145
  f.truncate(f.pos)
130
146
  f.flock(File::LOCK_UN)
131
- }
147
+ end
132
148
  rescue
133
149
  @logger.warn("cannot save cache file (#{file_path})")
134
150
  end
135
151
  end
136
-
137
- def add_cache_ids cache_ids, jobs, job_id, job, sort_key, sort_order
138
- insert_idx = -1
139
- for idx in 0..(cache_ids.length - 1)
140
- id = cache_ids[idx]
141
- if sort_order == "desc"
142
- case sort_key
143
- when "start_time"
144
- if job["start_time_msec"].to_i > jobs[id]["start_time_msec"].to_i
145
- insert_idx = idx
146
- break
147
- end
148
- when "job_id"
149
- if job["job_id"].to_i > jobs[id]["job_id"].to_i
150
- insert_idx = idx
151
- break
152
- end
153
- when "mapper"
154
- if job["mapper"] > jobs[id]["mapper"]
155
- insert_idx = idx
156
- break
157
- end
158
- when "end_time"
159
- if !job["error_status"].nil?
160
- insert_idx = idx
161
- elsif !jobs[id]["error_status"].nil?
162
- next
163
- elsif job["end_time_msec"].to_i > jobs[id]["end_time_msec"].to_i
164
- insert_idx = idx
165
- break
166
- end
167
- when "elapsed_time"
168
- if !job["error_status"].nil?
169
- insert_idx = idx
170
- elsif !jobs[id]["error_status"].nil?
171
- next
172
- elsif job["elapsed_time"].to_i > jobs[id]["elapsed_time"].to_i
173
- insert_idx = idx
174
- break
175
- end
176
- end
177
- elsif sort_order == "asc"
178
- case sort_key
179
- when "start_time"
180
- if job["start_time_msec"].to_i < jobs[id]["start_time_msec"].to_i
181
- insert_idx = idx
182
- break
183
- end
184
- when "job_id"
185
- if job["job_id"].to_i < jobs[id]["job_id"].to_i
186
- insert_idx = idx
187
- break
188
- end
189
- when "mapper"
190
- if job["mapper"] < jobs[id]["mapper"]
191
- insert_idx = idx
192
- break
193
- end
194
- when "end_time"
195
- if !job["error_status"].nil?
196
- next
197
- elsif !jobs[id]["error_status"].nil?
198
- insert_idx = idx
199
- elsif job["end_time_msec"].to_i < jobs[id]["end_time_msec"].to_i
200
- insert_idx = idx
201
- break
202
- end
203
- when "elapsed_time"
204
- if !job["elapsed_time"].nil?
205
- next
206
- elsif !jobs[id]["elapsed_time"].nil?
207
- insert_idx = idx
208
- elsif job["elapsed_time"].to_i < jobs[id]["elapsed_time"].to_i
209
- insert_idx = idx
210
- break
211
- end
212
- end
152
+
153
+ def expire_cache cache, now
154
+ new_cache = { "jobs" => {} }
155
+ need_save_cache = false
156
+ cache["jobs"].each do |job_id, job|
157
+ if job["parsed_time"] + @cache_expire_time < now
158
+ need_save_cache = true
159
+ next
213
160
  end
161
+ new_cache["jobs"][job_id] = cache["jobs"][job_id]
214
162
  end
215
- if insert_idx != -1
216
- cache_ids.insert(insert_idx, job_id);
217
- else
218
- cache_ids.push(job_id)
219
- end
163
+ return [new_cache, need_save_cache]
220
164
  end
221
165
 
222
- def parse_data_cache_ids parse_data, jobs, cache_ids, nitems, page
223
- start_idx = nitems * page
224
- if start_idx >= (cache_ids.length - 1)
225
- return
166
+ def pickup_job src, src_ids, data
167
+ parse_data = { "jobs" => {} }
168
+ src_ids = src_ids.sort_by{|id_start| id_start[1] }.map{|id_start| id_start[0]}
169
+ src_ids = src_ids.reverse if data["sort_order"] == "desc"
170
+ start_idx = 0;
171
+ if data["start_time_msec"].nil? && data["end_time_msec"].nil?
172
+ start_idx = data["page"] * data["nitems"]
173
+ return parse_data if start_idx >= src_ids.length
226
174
  end
227
- for idx in start_idx..cache_ids.length - 1
228
- parse_data[cache_ids[idx]] = jobs[cache_ids[idx]]
229
- nitems -= 1
230
- if nitems == 0
231
- break
232
- end
175
+ for idx in start_idx..src_ids.length - 1
176
+ job_id = src_ids[idx]
177
+ parse_data["jobs"][job_id] = src["jobs"][job_id]
233
178
  end
179
+ return parse_data
234
180
  end
235
181
 
236
182
  def parse_log_job user, data
237
- new_jobs_cookie = DateTime.now().strftime("%Q").to_i
238
- jobs = {}
239
- new_cache_ids = []
240
- update_ids = []
241
- cache_file_path = [@cache_dir_path, user].join(File::SEPARATOR)
242
- cache = load_cache(cache_file_path)
243
- need_save_cache = false
244
- for log_path in [@@pmux_log_path]
245
- files = get_files(user, log_path)
246
- for file in files
247
- job_id = File::basename(file).sub(".yml", "")
248
- if cache.key?(job_id)
249
- jobs[job_id] = cache[job_id]
250
- add_cache_ids(new_cache_ids, jobs, job_id, cache[job_id], data["sort_key"], data["sort_order"])
251
- update_ids.push(job_id) if data["jobs_cookie"] > 0 && data["jobs_cookie"] <= cache[job_id]["end_time_msec"].to_i
252
- next
253
- else
254
- job, cachable = fast_parse(file, job_id)
255
- jobs[job_id] = job
256
- update_ids.push(job_id)
257
- if cachable
258
- add_cache_ids(new_cache_ids, jobs, job_id, job, data["sort_key"], data["sort_order"])
259
- need_save_cache = true
260
- end
183
+ # data structure
184
+ # {
185
+ # "jobs" => {
186
+ # id1 => { <job> },
187
+ # .
188
+ # .
189
+ # },
190
+ # }
191
+ new = { "jobs" => {} }
192
+ ids = []
193
+ now = Time.now
194
+ jobs_cache_file_path = [@jobs_cache_dir_path, user].join(File::SEPARATOR)
195
+ cache = load_cache(jobs_cache_file_path)
196
+ new_cache, need_save_cache = expire_cache(cache, now.to_i)
197
+ files = get_files(user, get_log_paths, data, now)
198
+ files.each do |file|
199
+ job_id = File::basename(file).sub(".yml", "")
200
+ if new_cache["jobs"].key?(job_id)
201
+ new["jobs"][job_id] = new_cache["jobs"][job_id]
202
+ ids.push([job_id, new["jobs"][job_id]["start_time_msec"]])
203
+ else
204
+ job, cachable = fast_parse(file, job_id, now.to_i)
205
+ if cachable
206
+ new_cache["jobs"][job_id] = job
207
+ need_save_cache = true
261
208
  end
209
+ new["jobs"][job_id] = job
210
+ ids.push([job_id, new["jobs"][job_id]["start_time_msec"]])
262
211
  end
263
212
  end
264
- save_cache(cache_file_path, jobs, new_cache_ids) if need_save_cache || cache.length != new_cache_ids.length
265
- parse_data = { "jobs" => {}, "jobs_cookie" => new_jobs_cookie}
266
- if data["type"] == "archive"
267
- parse_data_cache_ids(parse_data["jobs"], jobs, new_cache_ids, data["nitems"], data["page"])
268
- end
269
- for job_id in update_ids
270
- parse_data["jobs"][job_id] = jobs[job_id] if jobs.key?(job_id)
271
- end
213
+ save_cache(jobs_cache_file_path, new_cache) if need_save_cache
214
+ parse_data = pickup_job(new, ids, data)
272
215
  return parse_data
273
216
  end
274
217
 
275
218
  def full_parse file_path
276
219
  documents = []
277
- File.open(file_path) {|f|
220
+ File.open(file_path) do |f|
278
221
  doc1_buffer = ""
279
222
  doc1_param = {}
280
223
  doc_cnt = 0
281
224
  new_doc = nil
282
225
  task_id = nil
283
- f.each_line {|ln|
226
+ f.each_line do |ln|
284
227
  if @@document_re =~ ln
285
228
  if doc_cnt == 1
286
- new_doc = YAML.load(doc1_buffer)
287
- new_doc[:job_started_at] = doc1_param["job_started_at"] if doc1_param["job_started_at"]
288
- new_doc[:invoked_at] = doc1_param["invoked_at"] if doc1_param["invoked_at"]
289
- new_doc[:start_time] = doc1_param["start_time"] if doc1_param["start_time"]
290
- print new_doc
229
+ new_doc = YAML.load(doc1_buffer)
230
+ new_doc[:job_started_at] = doc1_param["job_started_at"] if doc1_param["job_started_at"]
231
+ new_doc[:invoked_at] = doc1_param["invoked_at"] if doc1_param["invoked_at"]
232
+ new_doc[:start_time] = doc1_param["start_time"] if doc1_param["start_time"]
233
+ new_doc[:mapper] = new_doc[:mapper].to_s.encode("UTF-16BE", "UTF-8", :invalid => :replace, :undef => :replace, :replace => '?').encode("UTF-8") if new_doc[:mapper]
234
+ new_doc[:reducer] = new_doc[:reducer].to_s.encode("UTF-16BE", "UTF-8", :invalid => :replace, :undef => :replace, :replace => '?').encode("UTF-8") if new_doc[:reducer]
235
+ new_doc[:params][:mapper] = new_doc[:params][:mapper].to_s.encode("UTF-16BE", "UTF-8", :invalid => :replace, :undef => :replace, :replace => '?').encode("UTF-8") if new_doc.key?(:params) && new_doc[:params][:mapper]
236
+ new_doc[:params][:job_name] = new_doc[:params][:job_name].to_s.encode("UTF-16BE", "UTF-8", :invalid => :replace, :undef => :replace, :replace => '?').encode("UTF-8") if new_doc.key?(:params) && new_doc[:params][:job_name]
291
237
  end
292
238
  if !new_doc.nil?
293
239
  new_doc.delete(task_id) if doc_cnt == 2 && !task_id.nil? && new_doc[task_id].length < 5
@@ -296,7 +242,7 @@ module Pmux
296
242
  doc_cnt += 1
297
243
  new_doc = {}
298
244
  task_id = nil
299
- elsif doc_cnt == 1 && (@@job_started_at_re =~ ln || @@invoked_at_re =~ ln || @@start_time_re =~ ln)
245
+ elsif doc_cnt == 1 && @@start_at_re =~ ln
300
246
  empty, key, value = ln.split(":", 3)
301
247
  time = DateTime::parse(value.strip())
302
248
  doc1_param[key] = time
@@ -305,13 +251,13 @@ module Pmux
305
251
  elsif doc_cnt == 2 && @@task_id_re =~ ln
306
252
  task_id, empty = ln.split(":", 2)
307
253
  new_doc[task_id] = {}
308
- elsif doc_cnt == 2 && (@@task_allocated_at_re =~ ln || @@task_welapse_re =~ ln || @@task_elapse_re =~ ln)
254
+ elsif doc_cnt == 2 && @@elapse_re =~ ln
309
255
  key, value = ln.split(":", 2)
310
256
  new_doc[task_id][key.strip()] = value.strip().to_f()
311
257
  elsif doc_cnt == 2 && @@task_re =~ ln
312
258
  key, value = ln.split(":", 2)
313
259
  new_doc[task_id][key.strip()] = value.strip().gsub(@@quote_re, "")
314
- elsif doc_cnt == 3 && @@job_finished_at_re =~ ln
260
+ elsif doc_cnt == 3 && @@finish_at_re =~ ln
315
261
  empty, key, value = ln.split(":", 3)
316
262
  time = DateTime::parse(value.strip())
317
263
  new_doc[key] = time
@@ -319,7 +265,7 @@ module Pmux
319
265
  empty, key, value = ln.split(":", 3)
320
266
  new_doc[key] = value.strip().gsub(@@quote_re, "")
321
267
  end
322
- }
268
+ end
323
269
  if !new_doc.nil?
324
270
  new_doc.delete(task_id) if doc_cnt == 2 && new_doc[task_id].length < 5
325
271
  documents.push(new_doc)
@@ -330,7 +276,7 @@ module Pmux
330
276
  if documents.length == 2
331
277
  documents.push(nil)
332
278
  end
333
- }
279
+ end
334
280
  return documents
335
281
  end
336
282
 
@@ -378,8 +324,8 @@ module Pmux
378
324
  return parse_data
379
325
  end
380
326
 
381
- def set_cache_dir_path cache_dir_path
382
- @cache_dir_path = cache_dir_path
327
+ def set_jobs_cache_dir_path jobs_cache_dir_path
328
+ @jobs_cache_dir_path = jobs_cache_dir_path
383
329
  end
384
330
  end
385
331
  end