bricolage 5.19.1 → 5.20.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/jobclass/streaming_load.rb +27 -37
- data/lib/bricolage/version.rb +1 -1
- data/test/home/Gemfile.lock +1 -1
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: bb66877b6bc66492a6091a11eafbd9bb953f95e5
|
4
|
+
data.tar.gz: a31a4383dad5d6b8b1a319fa3a2d68900f57db55
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: c2b402f7f2392a0dec1b4ed72753abed5a9683cf3860b3c6a906d86a1901abbccd9f88d5a4c7d6e52bd6e4ba235e4de0e8ce355971c3d91e064946224f8c91b7
|
7
|
+
data.tar.gz: c3cf0419ce2a8dc470ed15b9d9837bf8bf32cbfbc440d655813f167d101d2b9ebcb56d21099bd0653e79e44eba85b4062e189179cf4b4ff00e8b98ee1481c2fe
|
data/jobclass/streaming_load.rb
CHANGED
@@ -139,19 +139,19 @@ class StreamingLoadJobClass < RubyJobClass
|
|
139
139
|
end
|
140
140
|
create_load_log_file(objects) {|log_url|
|
141
141
|
@ds.open {|conn|
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
142
|
+
execute_update conn, "delete #{log_table_wk};"
|
143
|
+
execute_update conn, load_log_copy_stmt(log_table_wk, log_url, @src.credential_string)
|
144
|
+
loaded, not_loaded = partition_loaded_objects(conn, objects, log_table_wk)
|
145
|
+
loaded.each do |obj|
|
146
|
+
obj.dequeue(force: true, noop: @noop)
|
147
|
+
end
|
148
148
|
}
|
149
149
|
}
|
150
150
|
end
|
151
151
|
|
152
152
|
def load
|
153
153
|
log_basic_info
|
154
|
-
@logger.info 'load
|
154
|
+
@logger.info 'streaming load start'
|
155
155
|
objects = @src.queued_objects
|
156
156
|
if objects.empty?
|
157
157
|
@logger.info 'no target data files; exit'
|
@@ -159,19 +159,28 @@ class StreamingLoadJobClass < RubyJobClass
|
|
159
159
|
end
|
160
160
|
create_load_log_file(objects) {|log_url|
|
161
161
|
@ds.open {|conn|
|
162
|
-
|
163
|
-
|
162
|
+
execute_update conn, "truncate #{work_table}"
|
163
|
+
conn.transaction {|txn|
|
164
|
+
execute_update conn, "delete #{log_table_wk}"
|
165
|
+
execute_update conn, load_log_copy_stmt(log_table_wk, log_url, @src.credential_string)
|
166
|
+
loaded, not_loaded = partition_loaded_objects(conn, objects, log_table_wk)
|
164
167
|
unless @load_only
|
165
168
|
loaded.each do |obj|
|
166
169
|
obj.dequeue(force: true, noop: @noop)
|
167
170
|
end
|
168
171
|
end
|
169
|
-
|
172
|
+
if not_loaded.empty?
|
173
|
+
@logger.info "no data files to load" unless @noop
|
174
|
+
txn.truncate_and_commit log_table_wk
|
175
|
+
else
|
170
176
|
create_manifest_file(not_loaded) {|manifest_url|
|
171
|
-
init_work_table conn
|
172
177
|
execute_update conn, manifest_copy_stmt(work_table, manifest_url)
|
173
178
|
@logger.info "load succeeded: #{manifest_url}" unless @noop
|
174
|
-
|
179
|
+
unless @load_only
|
180
|
+
commit_work_table conn, work_table
|
181
|
+
commit_load_log conn, log_table_wk
|
182
|
+
end
|
183
|
+
txn.truncate_and_commit log_table_wk
|
175
184
|
}
|
176
185
|
unless @load_only
|
177
186
|
not_loaded.each do |obj|
|
@@ -184,24 +193,11 @@ class StreamingLoadJobClass < RubyJobClass
|
|
184
193
|
}
|
185
194
|
end
|
186
195
|
|
187
|
-
def commit(conn, work_table, tmp_log_table)
|
188
|
-
@end_time = Time.now # commit_load_log writes this, generate before that
|
189
|
-
transaction(conn) {
|
190
|
-
commit_work_table conn, work_table
|
191
|
-
commit_load_log conn, tmp_log_table
|
192
|
-
}
|
193
|
-
end
|
194
|
-
|
195
196
|
private
|
196
197
|
|
197
|
-
def init_work_table(conn)
|
198
|
-
execute_update conn, "truncate #{work_table};"
|
199
|
-
end
|
200
|
-
|
201
198
|
def commit_work_table(conn, work_table)
|
202
199
|
insert_stmt = @sql ? @sql.source : "insert into #{@table} select * from #{work_table};"
|
203
200
|
execute_update conn, insert_stmt
|
204
|
-
# keep work table records for tracing
|
205
201
|
end
|
206
202
|
|
207
203
|
def create_manifest_file(objects)
|
@@ -243,8 +239,11 @@ class StreamingLoadJobClass < RubyJobClass
|
|
243
239
|
csv = make_load_log_csv(objects)
|
244
240
|
@logger.info "load_log:\n" + csv
|
245
241
|
url = @src.put_control_file(log_name, csv, noop: @noop)
|
246
|
-
|
247
|
-
|
242
|
+
begin
|
243
|
+
yield url
|
244
|
+
ensure
|
245
|
+
@src.remove_control_file(File.basename(url), noop: @noop)
|
246
|
+
end
|
248
247
|
end
|
249
248
|
|
250
249
|
def make_load_log_csv(objects)
|
@@ -270,10 +269,6 @@ class StreamingLoadJobClass < RubyJobClass
|
|
270
269
|
LoadLogRecord = Struct.new(:job_process_id, :start_time, :end_time, :target_table, :data_file)
|
271
270
|
|
272
271
|
def create_tmp_log_table(conn, log_url)
|
273
|
-
target_table = log_table_wk
|
274
|
-
execute_update conn, "truncate #{target_table};"
|
275
|
-
execute_update conn, load_log_copy_stmt(target_table, log_url, @src.credential_string)
|
276
|
-
yield target_table
|
277
272
|
end
|
278
273
|
|
279
274
|
def log_table_wk
|
@@ -313,6 +308,7 @@ class StreamingLoadJobClass < RubyJobClass
|
|
313
308
|
end
|
314
309
|
|
315
310
|
def commit_load_log(conn, tmp_table_name)
|
311
|
+
@end_time = Time.now
|
316
312
|
conn.execute(<<-EndSQL)
|
317
313
|
insert into #{@log_table}
|
318
314
|
select
|
@@ -342,12 +338,6 @@ class StreamingLoadJobClass < RubyJobClass
|
|
342
338
|
%Q('#{escaped}')
|
343
339
|
end
|
344
340
|
|
345
|
-
def transaction(conn)
|
346
|
-
execute_update conn, 'begin transaction'
|
347
|
-
yield
|
348
|
-
execute_update conn, 'commit'
|
349
|
-
end
|
350
|
-
|
351
341
|
def execute_update(conn, sql)
|
352
342
|
if @noop
|
353
343
|
log_query(sql)
|
data/lib/bricolage/version.rb
CHANGED
data/test/home/Gemfile.lock
CHANGED