bricolage 5.28.1 → 6.0.0beta1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.gitignore +1 -3
- data/README.md +4 -0
- data/RELEASE.md +16 -0
- data/bricolage.gemspec +1 -0
- data/jobclass/create.rb +1 -1
- data/jobclass/createview.rb +1 -1
- data/jobclass/load.rb +6 -6
- data/jobclass/rebuild-drop.rb +4 -4
- data/jobclass/rebuild-rename.rb +4 -4
- data/jobclass/sql.rb +1 -1
- data/jobclass/streaming_load.rb +10 -9
- data/lib/bricolage/application.rb +5 -5
- data/lib/bricolage/context.rb +18 -11
- data/lib/bricolage/dao/job.rb +184 -0
- data/lib/bricolage/dao/jobexecution.rb +253 -0
- data/lib/bricolage/dao/jobnet.rb +158 -0
- data/lib/bricolage/datasource.rb +1 -1
- data/lib/bricolage/exception.rb +11 -0
- data/lib/bricolage/job.rb +10 -6
- data/lib/bricolage/jobnet.rb +52 -17
- data/lib/bricolage/jobnetrunner.rb +87 -50
- data/lib/bricolage/logger.rb +2 -2
- data/lib/bricolage/loglocator.rb +19 -1
- data/lib/bricolage/postgresconnection.rb +6 -4
- data/lib/bricolage/psqldatasource.rb +75 -7
- data/lib/bricolage/sqlutils.rb +43 -1
- data/lib/bricolage/taskqueue.rb +221 -63
- data/lib/bricolage/version.rb +1 -1
- data/schema/Dockerfile +13 -0
- data/schema/Gemfile +4 -0
- data/schema/Gemfile.lock +37 -0
- data/schema/Schemafile +57 -0
- data/schema/database.yml +8 -0
- data/schema/ridgepole_dryrun.sh +2 -0
- data/schema/ridgepole_merge.sh +2 -0
- metadata +29 -6
@@ -15,6 +15,8 @@ module Bricolage
|
|
15
15
|
|
16
16
|
include CommandUtils
|
17
17
|
|
18
|
+
DEFAULT_RETRY_LIMIT = 3
|
19
|
+
|
18
20
|
def initialize(
|
19
21
|
host: 'localhost',
|
20
22
|
port: 5439,
|
@@ -24,7 +26,9 @@ module Bricolage
|
|
24
26
|
pgpass: nil,
|
25
27
|
encoding: nil,
|
26
28
|
psql: 'psql',
|
27
|
-
sql_log_level:
|
29
|
+
sql_log_level: nil,
|
30
|
+
query_sql_log_level: nil,
|
31
|
+
update_sql_log_level: nil,
|
28
32
|
tmpdir: Dir.tmpdir)
|
29
33
|
@host = host
|
30
34
|
@port = port
|
@@ -34,8 +38,11 @@ module Bricolage
|
|
34
38
|
@pgpass = pgpass
|
35
39
|
@encoding = encoding
|
36
40
|
@psql = psql
|
37
|
-
@sql_log_level = Logger.intern_severity(sql_log_level)
|
41
|
+
@sql_log_level = Logger.intern_severity(sql_log_level || Logger::DEBUG)
|
42
|
+
@query_sql_log_level = Logger.intern_severity(query_sql_log_level || sql_log_level || Logger::DEBUG)
|
43
|
+
@update_sql_log_level = Logger.intern_severity(update_sql_log_level || sql_log_level || Logger::INFO)
|
38
44
|
@tmpdir = tmpdir
|
45
|
+
@connection_pool = []
|
39
46
|
raise ParameterError, "missing psql host" unless @host
|
40
47
|
raise ParameterError, "missing psql port" unless @port
|
41
48
|
raise ParameterError, "missing psql database" unless @database
|
@@ -51,6 +58,8 @@ module Bricolage
|
|
51
58
|
attr_reader :user
|
52
59
|
|
53
60
|
attr_reader :sql_log_level
|
61
|
+
attr_reader :query_sql_log_level
|
62
|
+
attr_reader :update_sql_log_level
|
54
63
|
|
55
64
|
def new_task
|
56
65
|
PSQLTask.new(self)
|
@@ -71,7 +80,15 @@ module Bricolage
|
|
71
80
|
'--no-password',
|
72
81
|
*options,
|
73
82
|
env: get_psql_env
|
74
|
-
|
83
|
+
unless st.success?
|
84
|
+
begin
|
85
|
+
msg = LogLocator.slice_last_stderr(/^psql:.*?:\d+: ERROR: (.*)/, 1)
|
86
|
+
rescue IOError => ex
|
87
|
+
# slice_last_stderr may fail if stderr is not a file
|
88
|
+
logger.error ex.message
|
89
|
+
msg = nil
|
90
|
+
end
|
91
|
+
end
|
75
92
|
JobResult.for_process_status(st, msg)
|
76
93
|
}
|
77
94
|
end
|
@@ -101,7 +118,48 @@ module Bricolage
|
|
101
118
|
end
|
102
119
|
|
103
120
|
def open(&block)
|
104
|
-
|
121
|
+
retries = (ENV['BRICOLAGE_OPEN_RETRY_LIMIT'] || DEFAULT_RETRY_LIMIT).to_i
|
122
|
+
begin
|
123
|
+
conn = PostgresConnection.open_data_source(self)
|
124
|
+
conn.execute_query('select 1'){}
|
125
|
+
rescue PG::ConnectionBad, PG::UnableToSend => ex
|
126
|
+
retries -= 1
|
127
|
+
if retries >= 0
|
128
|
+
logger.warn "Retry PG connection for execute query: #{ex.message}"
|
129
|
+
sleep 1
|
130
|
+
retry
|
131
|
+
end
|
132
|
+
end
|
133
|
+
if block_given?
|
134
|
+
yield conn
|
135
|
+
else
|
136
|
+
return conn
|
137
|
+
end
|
138
|
+
end
|
139
|
+
|
140
|
+
def open_shared_connection
|
141
|
+
raise ParameterError, 'open_shared_connection require block' unless block_given?
|
142
|
+
conn = nil
|
143
|
+
if @connection_pool.empty?
|
144
|
+
conn = open
|
145
|
+
else
|
146
|
+
begin
|
147
|
+
conn = @connection_pool.shift
|
148
|
+
conn.execute_query('select 1'){}
|
149
|
+
rescue
|
150
|
+
conn.close
|
151
|
+
conn = open
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
yield conn
|
156
|
+
ensure
|
157
|
+
@connection_pool.push(conn)
|
158
|
+
end
|
159
|
+
|
160
|
+
def clear_connection_pool
|
161
|
+
@connection_pool.map(&:close)
|
162
|
+
@connection_pool = []
|
105
163
|
end
|
106
164
|
|
107
165
|
def query_batch(query, batch_size = 5000, &block)
|
@@ -320,7 +378,6 @@ module Bricolage
|
|
320
378
|
unless src_ds.redshift_loader_source?
|
321
379
|
raise ParameterError, "input data source does not support redshift as bulk loading source: #{src_ds.name}"
|
322
380
|
end
|
323
|
-
opts.provide_defaults(src_ds)
|
324
381
|
buf = StringIO.new
|
325
382
|
buf.puts "copy #{dest_table}"
|
326
383
|
buf.puts "from '#{src_ds.url(src_path)}'"
|
@@ -340,12 +397,23 @@ module Bricolage
|
|
340
397
|
when 'csv'
|
341
398
|
%q(delimiter ',')
|
342
399
|
when 'json'
|
343
|
-
|
400
|
+
"json '#{json_param(jsonpath)}'"
|
344
401
|
else
|
345
402
|
raise ParameterError, "unsupported format: #{fmt}"
|
346
403
|
end
|
347
404
|
end
|
348
405
|
|
406
|
+
def json_param(jsonpath)
|
407
|
+
case jsonpath
|
408
|
+
when nil
|
409
|
+
'auto'
|
410
|
+
when %r{\As3://}
|
411
|
+
jsonpath
|
412
|
+
else
|
413
|
+
src_ds.url(jsonpath)
|
414
|
+
end
|
415
|
+
end
|
416
|
+
|
349
417
|
def unload(stmt, dest_ds, dest_path, format, opts)
|
350
418
|
exec unload_statement(stmt, dest_ds, dest_path, format, opts)
|
351
419
|
end
|
@@ -376,7 +444,7 @@ module Bricolage
|
|
376
444
|
end
|
377
445
|
|
378
446
|
def format_query(query)
|
379
|
-
query.gsub(/^--.*/, '').strip.gsub(/[ \t]*\n[ \t]*/, ' ').gsub("'", "\\\\'")
|
447
|
+
query.gsub(/^--.*/, '').strip.gsub(/[ \t]*\n[ \t]*/, ' ').gsub(/\\/,"\\\\\\\\").gsub("'", "\\\\'")
|
380
448
|
end
|
381
449
|
end
|
382
450
|
|
data/lib/bricolage/sqlutils.rb
CHANGED
@@ -11,7 +11,7 @@ module Bricolage
|
|
11
11
|
alias s sql_string_literal
|
12
12
|
|
13
13
|
def escape_sql_string(s)
|
14
|
-
s.gsub(/'/, "''")
|
14
|
+
s.gsub(/'/, "''").gsub(/\\/, '\\\\')
|
15
15
|
end
|
16
16
|
|
17
17
|
def sql_timestamp_literal(time)
|
@@ -24,6 +24,48 @@ module Bricolage
|
|
24
24
|
time.strftime('%Y-%m-%d %H:%M:%S')
|
25
25
|
end
|
26
26
|
|
27
|
+
def compile_set_expr(values_hash)
|
28
|
+
columns = values_hash.keys.map(&:to_s).join(', ')
|
29
|
+
values = values_hash.values.map{|v| convert_value(v) }.join(', ')
|
30
|
+
return columns, values
|
31
|
+
end
|
32
|
+
|
33
|
+
def convert_value(value)
|
34
|
+
if value == :now
|
35
|
+
'now()'
|
36
|
+
elsif value.nil?
|
37
|
+
"null"
|
38
|
+
elsif value == true or value == false
|
39
|
+
"#{value.to_s}"
|
40
|
+
elsif value.instance_of?(Integer) or value.instance_of?(Float)
|
41
|
+
"#{value.to_s}"
|
42
|
+
elsif value.instance_of?(String) or value.instance_of?(Pathname)
|
43
|
+
"#{s(value.to_s)}"
|
44
|
+
else
|
45
|
+
raise "invalid type for 'value' argument in JobExecution#convert_value: #{value} is #{value.class}"
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
def compile_where_expr(conds_hash)
|
50
|
+
conds_hash.map{|k,v| convert_cond(k,v) }.join(' and ')
|
51
|
+
end
|
52
|
+
|
53
|
+
def convert_cond(column, cond)
|
54
|
+
if cond.nil?
|
55
|
+
"#{column} is null"
|
56
|
+
elsif cond.instance_of?(Array) # not support subquery
|
57
|
+
in_clause = cond.map{|c| convert_cond(column, c)}.join(' or ')
|
58
|
+
"(#{in_clause})"
|
59
|
+
elsif cond == true or cond == false
|
60
|
+
"#{column} is #{cond.to_s}"
|
61
|
+
elsif cond.instance_of?(Integer) or cond.instance_of?(Float)
|
62
|
+
"#{column} = #{cond}"
|
63
|
+
elsif cond.instance_of?(String) or cond.instance_of?(Pathname)
|
64
|
+
"#{column} = #{s(cond.to_s)}"
|
65
|
+
else
|
66
|
+
raise "invalid type for 'cond' argument in JobExecution#convert_cond: #{cond} is #{cond.class}"
|
67
|
+
end
|
68
|
+
end
|
27
69
|
end
|
28
70
|
|
29
71
|
end
|
data/lib/bricolage/taskqueue.rb
CHANGED
@@ -1,11 +1,17 @@
|
|
1
1
|
require 'bricolage/jobnet'
|
2
|
+
require 'bricolage/sqlutils'
|
2
3
|
require 'bricolage/exception'
|
4
|
+
require 'bricolage/dao/job'
|
5
|
+
require 'bricolage/dao/jobnet'
|
6
|
+
require 'bricolage/dao/jobexecution'
|
3
7
|
require 'fileutils'
|
4
8
|
require 'pathname'
|
9
|
+
require 'pg'
|
5
10
|
|
6
11
|
module Bricolage
|
7
12
|
|
8
|
-
class
|
13
|
+
class MemoryTaskQueue
|
14
|
+
|
9
15
|
def initialize
|
10
16
|
@queue = []
|
11
17
|
end
|
@@ -18,77 +24,90 @@ module Bricolage
|
|
18
24
|
@queue.size
|
19
25
|
end
|
20
26
|
|
21
|
-
def queued?
|
22
|
-
not empty?
|
23
|
-
end
|
24
|
-
|
25
27
|
def each(&block)
|
26
28
|
@queue.each(&block)
|
27
29
|
end
|
28
30
|
|
29
31
|
def consume_each
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
deq
|
32
|
+
while job = @queue.first
|
33
|
+
result = yield job
|
34
|
+
break unless result.success?
|
35
|
+
@queue.shift
|
35
36
|
end
|
36
|
-
ensure
|
37
|
-
unlock
|
38
37
|
end
|
39
38
|
|
40
|
-
def
|
41
|
-
@queue.push task
|
39
|
+
def restore_jobnet(jobnet)
|
42
40
|
end
|
43
41
|
|
44
|
-
def
|
45
|
-
|
42
|
+
def enqueue_jobnet(jobnet)
|
43
|
+
jobnet.sequential_jobs.each do |job|
|
44
|
+
@queue.push job
|
45
|
+
end
|
46
46
|
end
|
47
47
|
|
48
|
-
def
|
49
|
-
|
50
|
-
save
|
51
|
-
task
|
48
|
+
def locked?(jobnet)
|
49
|
+
false
|
52
50
|
end
|
53
51
|
|
54
|
-
def
|
52
|
+
def unlock_help(jobnet)
|
53
|
+
raise "[BUG] this message must not be shown"
|
55
54
|
end
|
56
55
|
|
57
|
-
def
|
56
|
+
def cancel_jobnet(jobnet, message)
|
57
|
+
@queue.clear
|
58
58
|
end
|
59
59
|
|
60
|
-
|
61
|
-
|
60
|
+
end
|
61
|
+
|
62
|
+
|
63
|
+
class FileTaskQueue
|
64
|
+
|
65
|
+
def initialize(path:)
|
66
|
+
@path = path
|
67
|
+
@queue = []
|
62
68
|
end
|
63
69
|
|
64
|
-
def
|
70
|
+
def empty?
|
71
|
+
@queue.empty?
|
65
72
|
end
|
66
73
|
|
67
|
-
def
|
74
|
+
def size
|
75
|
+
@queue.size
|
68
76
|
end
|
69
77
|
|
70
|
-
def
|
71
|
-
|
78
|
+
def restore_jobnet(jobnet)
|
79
|
+
return unless File.exist?(@path)
|
80
|
+
File.foreach(@path) do |line|
|
81
|
+
@queue.push Task.deserialize(line)
|
82
|
+
end
|
72
83
|
end
|
73
|
-
end
|
74
84
|
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
q
|
85
|
+
def enqueue_jobnet(jobnet)
|
86
|
+
jobnet.sequential_jobs.each do |ref|
|
87
|
+
@queue.push Task.new(ref)
|
88
|
+
end
|
80
89
|
end
|
81
90
|
|
82
|
-
def
|
83
|
-
|
84
|
-
|
91
|
+
def each
|
92
|
+
@queue.each do |task|
|
93
|
+
yield task.job
|
94
|
+
end
|
85
95
|
end
|
86
96
|
|
87
|
-
def
|
88
|
-
|
97
|
+
def consume_each
|
98
|
+
lock
|
99
|
+
save
|
100
|
+
while task = @queue.first
|
101
|
+
task_result = yield task.job
|
102
|
+
break unless task_result.success?
|
103
|
+
@queue.shift
|
104
|
+
save
|
105
|
+
end
|
106
|
+
ensure
|
107
|
+
unlock
|
89
108
|
end
|
90
109
|
|
91
|
-
def save
|
110
|
+
private def save
|
92
111
|
if empty?
|
93
112
|
@path.unlink if @path.exist?
|
94
113
|
return
|
@@ -97,7 +116,7 @@ module Bricolage
|
|
97
116
|
tmpname = "#{@path}.tmp.#{Process.pid}"
|
98
117
|
begin
|
99
118
|
File.open(tmpname, 'w') {|f|
|
100
|
-
each do |task|
|
119
|
+
@queue.each do |task|
|
101
120
|
f.puts task.serialize
|
102
121
|
end
|
103
122
|
}
|
@@ -107,48 +126,187 @@ module Bricolage
|
|
107
126
|
end
|
108
127
|
end
|
109
128
|
|
110
|
-
def
|
111
|
-
File.foreach(@path) do |line|
|
112
|
-
enq JobTask.deserialize(line)
|
113
|
-
end
|
114
|
-
end
|
115
|
-
|
116
|
-
def locked?
|
129
|
+
def locked?(jobnet)
|
117
130
|
lock_file_path.exist?
|
118
131
|
end
|
119
132
|
|
120
|
-
def lock
|
121
|
-
FileUtils.touch
|
133
|
+
private def lock
|
134
|
+
FileUtils.touch(lock_file_path)
|
122
135
|
end
|
123
136
|
|
124
|
-
def unlock
|
125
|
-
FileUtils.rm_f
|
137
|
+
private def unlock
|
138
|
+
FileUtils.rm_f(lock_file_path)
|
126
139
|
end
|
127
140
|
|
128
|
-
def lock_file_path
|
141
|
+
private def lock_file_path
|
129
142
|
Pathname.new("#{@path}.LOCK")
|
130
143
|
end
|
131
144
|
|
132
|
-
def unlock_help
|
145
|
+
def unlock_help(jobnet)
|
133
146
|
"remove the file: #{lock_file_path}"
|
134
147
|
end
|
148
|
+
|
149
|
+
def cancel_jobnet(jobnet, message)
|
150
|
+
unlock
|
151
|
+
FileUtils.rm_f(@path)
|
152
|
+
@queue.clear
|
153
|
+
end
|
154
|
+
|
155
|
+
class Task
|
156
|
+
def initialize(job)
|
157
|
+
@job = job
|
158
|
+
end
|
159
|
+
|
160
|
+
attr_reader :job
|
161
|
+
|
162
|
+
def serialize
|
163
|
+
[@job].join("\t")
|
164
|
+
end
|
165
|
+
|
166
|
+
def Task.deserialize(str)
|
167
|
+
job, * = str.strip.split("\t")
|
168
|
+
new(JobNet::Ref.parse(job))
|
169
|
+
end
|
170
|
+
end
|
171
|
+
|
135
172
|
end
|
136
173
|
|
137
|
-
|
138
|
-
|
139
|
-
|
174
|
+
|
175
|
+
class DatabaseTaskQueue
|
176
|
+
|
177
|
+
def initialize(datasource:, executor_id:, enable_lock: false)
|
178
|
+
@ds = datasource
|
179
|
+
@executor_id = executor_id
|
180
|
+
@enable_lock = enable_lock
|
181
|
+
|
182
|
+
@queue = []
|
183
|
+
@jobnet_dao = DAO::JobNet.new(@ds)
|
184
|
+
@job_dao = DAO::Job.new(@ds)
|
185
|
+
@jobexecution_dao = DAO::JobExecution.new(@ds)
|
186
|
+
@jobnet = nil
|
187
|
+
end
|
188
|
+
|
189
|
+
def empty?
|
190
|
+
@queue.empty?
|
191
|
+
end
|
192
|
+
|
193
|
+
def size
|
194
|
+
@queue.size
|
195
|
+
end
|
196
|
+
|
197
|
+
private def find_or_create_jobnet(ref)
|
198
|
+
@jobnet_rec ||= @jobnet_dao.find_or_create(ref)
|
199
|
+
end
|
200
|
+
|
201
|
+
def restore_jobnet(jobnet)
|
202
|
+
raise "jobnet is already bound to queue" if @jobnet
|
203
|
+
|
204
|
+
job_executions = @jobexecution_dao.enqueued_jobs(jobnet.ref)
|
205
|
+
unless job_executions.empty?
|
206
|
+
job_executions.each do |job_execution|
|
207
|
+
@queue.push Task.for_job_execution(job_execution)
|
208
|
+
end
|
209
|
+
@jobnet = jobnet
|
210
|
+
end
|
211
|
+
end
|
212
|
+
|
213
|
+
def enqueue_jobnet(jobnet)
|
214
|
+
raise "jobnet is already bound to queue" if @jobnet
|
215
|
+
|
216
|
+
jobnet_rec = find_or_create_jobnet(jobnet.ref)
|
217
|
+
jobnet.sequential_jobs.each_with_index do |job_ref, index|
|
218
|
+
job = @job_dao.find_or_create(jobnet_rec.id, job_ref)
|
219
|
+
job_execution = @jobexecution_dao.enqueue_job(job, index + 1)
|
220
|
+
@queue.push Task.for_job_execution(job_execution)
|
221
|
+
end
|
222
|
+
@jobnet = jobnet
|
223
|
+
end
|
224
|
+
|
225
|
+
def each
|
226
|
+
@queue.each do |task|
|
227
|
+
yield task.job
|
228
|
+
end
|
140
229
|
end
|
141
230
|
|
142
|
-
|
231
|
+
def consume_each
|
232
|
+
raise "jobnet is not bound to queue" unless @jobnet
|
233
|
+
|
234
|
+
jobnet_rec = find_or_create_jobnet(@jobnet.ref)
|
235
|
+
@jobnet_dao.lock(jobnet_rec.id, @executor_id) if @enable_lock
|
236
|
+
while task = @queue.first
|
237
|
+
@job_dao.lock(task.job_id, @executor_id) if @enable_lock
|
238
|
+
begin
|
239
|
+
@jobexecution_dao.transition_to_running(task.job_execution_id)
|
240
|
+
|
241
|
+
# Note: fork(2) breaks current connections,
|
242
|
+
# we must close current connections before fork.
|
243
|
+
# (psql datasource forks process)
|
244
|
+
@ds.clear_connection_pool
|
245
|
+
|
246
|
+
job_completed = false
|
247
|
+
begin
|
248
|
+
task_result = yield task.job
|
143
249
|
|
144
|
-
|
145
|
-
|
250
|
+
if task_result.success?
|
251
|
+
@jobexecution_dao.transition_to_succeeded(task.job_execution_id)
|
252
|
+
job_completed = true
|
253
|
+
@queue.shift
|
254
|
+
else
|
255
|
+
@jobexecution_dao.transition_to_failed(task.job_execution_id, task_result.message)
|
256
|
+
job_completed = true
|
257
|
+
break
|
258
|
+
end
|
259
|
+
ensure
|
260
|
+
unless job_completed
|
261
|
+
begin
|
262
|
+
@jobexecution_dao.transition_to_failed(task.job_execution_id, 'unexpected error')
|
263
|
+
rescue => ex
|
264
|
+
$stderr.puts "warning: could not write job state: #{ex.class}: #{ex.message} (this error is ignored)"
|
265
|
+
end
|
266
|
+
end
|
267
|
+
end
|
268
|
+
ensure
|
269
|
+
@job_dao.unlock(task.job_id, @executor_id) if @enable_lock
|
270
|
+
end
|
271
|
+
end
|
272
|
+
ensure
|
273
|
+
@jobnet_dao.unlock(jobnet_rec.id, @executor_id) if @enable_lock
|
146
274
|
end
|
147
275
|
|
148
|
-
def
|
149
|
-
|
150
|
-
new(JobNet::Ref.parse(job))
|
276
|
+
def locked?(jobnet)
|
277
|
+
@jobnet_dao.locked?(jobnet.ref)
|
151
278
|
end
|
279
|
+
|
280
|
+
def unlock_help(jobnet)
|
281
|
+
jobnet_rec = find_or_create_jobnet(jobnet.ref)
|
282
|
+
locked_jobs = @job_dao.locked_jobs(jobnet_rec.id)
|
283
|
+
"clear executor_id of the jobnet (id: #{jobnet_rec.id}) and/or the jobs (id: #{locked_jobs.map(&:id).join(', ')})"
|
284
|
+
end
|
285
|
+
|
286
|
+
def cancel_jobnet(jobnet, message)
|
287
|
+
@jobexecution_dao.cancel_jobnet(jobnet.ref, message)
|
288
|
+
jobnet_rec = find_or_create_jobnet(jobnet.ref)
|
289
|
+
@jobnet_dao.clear_lock(jobnet_rec.id)
|
290
|
+
@job_dao.clear_lock_all(jobnet_rec.id)
|
291
|
+
end
|
292
|
+
|
293
|
+
class Task
|
294
|
+
def Task.for_job_execution(exec)
|
295
|
+
job_ref = JobNet::JobRef.new(exec.subsystem, exec.job_name, JobNet::Location.dummy)
|
296
|
+
new(job_ref, exec)
|
297
|
+
end
|
298
|
+
|
299
|
+
def initialize(job_ref, job_execution)
|
300
|
+
@job = job_ref
|
301
|
+
@job_id = job_execution.job_id
|
302
|
+
@job_execution_id = job_execution.job_execution_id
|
303
|
+
end
|
304
|
+
|
305
|
+
attr_reader :job
|
306
|
+
attr_reader :job_id
|
307
|
+
attr_reader :job_execution_id
|
308
|
+
end
|
309
|
+
|
152
310
|
end
|
153
311
|
|
154
312
|
end
|