bricolage 5.29.2 → 5.30.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +4 -0
- data/RELEASE.md +4 -0
- data/jobclass/create.rb +1 -1
- data/jobclass/createview.rb +1 -1
- data/jobclass/load.rb +6 -6
- data/jobclass/rebuild-drop.rb +4 -4
- data/jobclass/rebuild-rename.rb +4 -4
- data/jobclass/sql.rb +1 -1
- data/jobclass/streaming_load.rb +8 -7
- data/lib/bricolage/psqldatasource.rb +0 -1
- data/lib/bricolage/version.rb +1 -1
- metadata +3 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: cd34b3e85926b0f85f76d05415341066025a5117c77d1b3b644ac9ce3358f0d2
|
4
|
+
data.tar.gz: 067eed98758a6de824a0ab9133830a637f3cc3790e26974a946d39eaa54032fc
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d4601475e0c9cbd70e2f54a68ddb06e417288fc52230b3ac553ff8a3f1ef79dafa6d83a8e543470d9c5cfd9d040a98df66217e2cd7a39aec6e1cd4b342640b0f
|
7
|
+
data.tar.gz: aa8d9bf658e8f36a84eacd18c3d680a18178d7612c995fbdbb8dfd695720331f953607614c1ad2d467c8a211e6fd6bf29f853257455b72700fe93172846375e3
|
data/README.md
CHANGED
data/RELEASE.md
CHANGED
data/jobclass/create.rb
CHANGED
data/jobclass/createview.rb
CHANGED
data/jobclass/load.rb
CHANGED
@@ -35,9 +35,9 @@ JobClass.define('load') {
|
|
35
35
|
task.transaction {
|
36
36
|
task.drop_force '${dest_table}'
|
37
37
|
task.exec params['table-def']
|
38
|
-
task.load params['src-ds'], params['src-file'],
|
38
|
+
task.load params['src-ds'], params['src-file'], '$dest_table',
|
39
39
|
params['format'], params['jsonpath'], params['options']
|
40
|
-
task.grant_if params['grant'],
|
40
|
+
task.grant_if params['grant'], '$dest_table'
|
41
41
|
}
|
42
42
|
# ANALYZE, VACUUM is needless for newly loaded table, skip always.
|
43
43
|
|
@@ -49,9 +49,9 @@ JobClass.define('load') {
|
|
49
49
|
|
50
50
|
task.truncate_if params['truncate']
|
51
51
|
task.transaction {
|
52
|
-
task.load params['src-ds'], params['src-file'],
|
52
|
+
task.load params['src-ds'], params['src-file'], '$dest_table',
|
53
53
|
params['format'], params['jsonpath'], params['options']
|
54
|
-
task.grant_if params['grant'],
|
54
|
+
task.grant_if params['grant'], '$dest_table'
|
55
55
|
}
|
56
56
|
# ANALYZE, VACUUM is needless for newly loaded table, skip always.
|
57
57
|
|
@@ -59,9 +59,9 @@ JobClass.define('load') {
|
|
59
59
|
# load only pattern
|
60
60
|
|
61
61
|
task.transaction {
|
62
|
-
task.load params['src-ds'], params['src-file'],
|
62
|
+
task.load params['src-ds'], params['src-file'], '$dest_table',
|
63
63
|
params['format'], params['jsonpath'], params['options']
|
64
|
-
task.grant_if params['grant'],
|
64
|
+
task.grant_if params['grant'], '$dest_table'
|
65
65
|
task.analyze_if params['analyze']
|
66
66
|
}
|
67
67
|
# We cannot execute VACUUM in transaction
|
data/jobclass/rebuild-drop.rb
CHANGED
@@ -23,19 +23,19 @@ JobClass.define('rebuild-drop') {
|
|
23
23
|
script.task(params['data-source']) {|task|
|
24
24
|
task.transaction {
|
25
25
|
# CREATE
|
26
|
-
task.drop_force
|
26
|
+
task.drop_force '$dest_table'
|
27
27
|
task.exec params['table-def']
|
28
28
|
|
29
29
|
# INSERT
|
30
30
|
task.exec params['sql-file']
|
31
31
|
|
32
32
|
# GRANT
|
33
|
-
task.grant_if params['grant'],
|
33
|
+
task.grant_if params['grant'], '$dest_table'
|
34
34
|
}
|
35
35
|
|
36
36
|
# VACUUM, ANALYZE
|
37
|
-
task.vacuum_if params['vacuum'], params['vacuum-sort'],
|
38
|
-
task.analyze_if params['analyze'],
|
37
|
+
task.vacuum_if params['vacuum'], params['vacuum-sort'], '$dest_table'
|
38
|
+
task.analyze_if params['analyze'], '$dest_table'
|
39
39
|
}
|
40
40
|
}
|
41
41
|
}
|
data/jobclass/rebuild-rename.rb
CHANGED
@@ -21,6 +21,7 @@ JobClass.define('rebuild-rename') {
|
|
21
21
|
|
22
22
|
script {|params, script|
|
23
23
|
script.task(params['data-source']) {|task|
|
24
|
+
dest_table = '$dest_table'
|
24
25
|
prev_table = '${dest_table}_old'
|
25
26
|
work_table = '${dest_table}_wk'
|
26
27
|
|
@@ -43,10 +44,9 @@ JobClass.define('rebuild-rename') {
|
|
43
44
|
|
44
45
|
# RENAME
|
45
46
|
task.transaction {
|
46
|
-
task.create_dummy_table
|
47
|
-
dest_table
|
48
|
-
task.rename_table
|
49
|
-
task.rename_table "#{dest_table}_wk", dest_table.name
|
47
|
+
task.create_dummy_table dest_table
|
48
|
+
task.rename_table dest_table, prev_table
|
49
|
+
task.rename_table work_table, dest_table
|
50
50
|
}
|
51
51
|
}
|
52
52
|
}
|
data/jobclass/sql.rb
CHANGED
@@ -25,7 +25,7 @@ JobClass.define('sql') {
|
|
25
25
|
task.exec params['sql-file']
|
26
26
|
task.vacuum_if params['vacuum'], params['vacuum-sort']
|
27
27
|
task.analyze_if params['analyze']
|
28
|
-
task.grant_if params['grant'],
|
28
|
+
task.grant_if params['grant'], '$dest_table'
|
29
29
|
}
|
30
30
|
}
|
31
31
|
}
|
data/jobclass/streaming_load.rb
CHANGED
@@ -18,6 +18,7 @@ class StreamingLoadJobClass < RubyJobClass
|
|
18
18
|
optional: true, default: Bricolage::PSQLLoadOptions.new,
|
19
19
|
value_handler: lambda {|value, ctx, vars| Bricolage::PSQLLoadOptions.parse(value) })
|
20
20
|
params.add Bricolage::DataSourceParam.new('s3', 's3-ds', 'S3 data source.')
|
21
|
+
params.add Bricolage::DataSourceParam.new('s3', 'ctl-ds', 'S3 data source for control files. (default: $s3-ds)', optional: true)
|
21
22
|
params.add Bricolage::StringParam.new('ctl-prefix', 'S3_PREFIX', 'S3 object key prefix for control files. (default: ${queue-path}/ctl)', optional: true)
|
22
23
|
params.add Bricolage::OptionalBoolParam.new('keep-ctl', 'Does not delete control files if true.')
|
23
24
|
params.add Bricolage::StringParam.new('queue-path', 'S3_PATH', 'S3 path for data file queue.')
|
@@ -59,8 +60,6 @@ class StreamingLoadJobClass < RubyJobClass
|
|
59
60
|
|
60
61
|
def make_loader(params)
|
61
62
|
ds = params['redshift-ds']
|
62
|
-
load_opts = params['load-options']
|
63
|
-
load_opts.provide_defaults(params['s3-ds'])
|
64
63
|
RedshiftStreamingLoader.new(
|
65
64
|
data_source: ds,
|
66
65
|
queue: make_s3_queue(params),
|
@@ -68,7 +67,7 @@ class StreamingLoadJobClass < RubyJobClass
|
|
68
67
|
table: string(params['dest-table']),
|
69
68
|
work_table: string(params['work-table']),
|
70
69
|
log_table: string(params['log-table']),
|
71
|
-
load_options:
|
70
|
+
load_options: params['load-options'],
|
72
71
|
sql: params['sql-file'],
|
73
72
|
logger: ds.logger,
|
74
73
|
noop: params['noop'],
|
@@ -81,6 +80,7 @@ class StreamingLoadJobClass < RubyJobClass
|
|
81
80
|
ds = params['s3-ds']
|
82
81
|
S3Queue.new(
|
83
82
|
data_source: ds,
|
83
|
+
ctl_ds: (params['ctl-ds'] || params['s3-ds']),
|
84
84
|
ctl_prefix: (params['ctl-prefix'] || "#{params['queue-path']}/ctl"),
|
85
85
|
queue_path: params['queue-path'],
|
86
86
|
persistent_path: params['persistent-path'],
|
@@ -362,8 +362,9 @@ class StreamingLoadJobClass < RubyJobClass
|
|
362
362
|
class S3Queue
|
363
363
|
extend Forwardable
|
364
364
|
|
365
|
-
def initialize(data_source:, ctl_prefix:, queue_path:, persistent_path:, file_name:, logger:)
|
365
|
+
def initialize(data_source:, ctl_ds:, ctl_prefix:, queue_path:, persistent_path:, file_name:, logger:)
|
366
366
|
@ds = data_source
|
367
|
+
@ctl_ds = ctl_ds
|
367
368
|
@ctl_prefix = ctl_prefix
|
368
369
|
@queue_path = queue_path
|
369
370
|
@persistent_path = persistent_path
|
@@ -388,18 +389,18 @@ class StreamingLoadJobClass < RubyJobClass
|
|
388
389
|
end
|
389
390
|
|
390
391
|
def control_file_url(name)
|
391
|
-
@
|
392
|
+
@ctl_ds.url(control_file_path(name))
|
392
393
|
end
|
393
394
|
|
394
395
|
def put_control_file(name, data, noop: false)
|
395
396
|
@logger.info "s3 put: #{control_file_url(name)}"
|
396
|
-
@
|
397
|
+
@ctl_ds.object(control_file_path(name)).put(body: data) unless noop
|
397
398
|
control_file_url(name)
|
398
399
|
end
|
399
400
|
|
400
401
|
def remove_control_file(name, noop: false)
|
401
402
|
@logger.info "s3 delete: #{control_file_url(name)}"
|
402
|
-
@
|
403
|
+
@ctl_ds.object(control_file_path(name)).delete unless noop
|
403
404
|
end
|
404
405
|
|
405
406
|
def control_file_path(name)
|
@@ -328,7 +328,6 @@ module Bricolage
|
|
328
328
|
unless src_ds.redshift_loader_source?
|
329
329
|
raise ParameterError, "input data source does not support redshift as bulk loading source: #{src_ds.name}"
|
330
330
|
end
|
331
|
-
opts.provide_defaults(src_ds)
|
332
331
|
buf = StringIO.new
|
333
332
|
buf.puts "copy #{dest_table}"
|
334
333
|
buf.puts "from '#{src_ds.url(src_path)}'"
|
data/lib/bricolage/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: bricolage
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 5.
|
4
|
+
version: 5.30.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Minero Aoki
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2019-05-29 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: pg
|
@@ -185,8 +185,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
185
185
|
- !ruby/object:Gem::Version
|
186
186
|
version: '0'
|
187
187
|
requirements: []
|
188
|
-
|
189
|
-
rubygems_version: 2.7.6
|
188
|
+
rubygems_version: 3.0.3
|
190
189
|
signing_key:
|
191
190
|
specification_version: 4
|
192
191
|
summary: SQL Batch Framework
|