etna 0.1.19 → 0.1.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/etna.completion +78 -3
- data/lib/commands.rb +39 -4
- data/lib/etna.rb +1 -0
- data/lib/etna/clients/magma/formatting/models_csv.rb +280 -271
- data/lib/etna/clients/magma/models.rb +51 -0
- data/lib/etna/clients/magma/workflows/add_project_models_workflow.rb +8 -19
- data/lib/etna/clients/magma/workflows/create_project_workflow.rb +20 -14
- data/lib/etna/clients/magma/workflows/crud_workflow.rb +2 -2
- data/lib/etna/clients/magma/workflows/json_validators.rb +8 -3
- data/lib/etna/clients/magma/workflows/model_synchronization_workflow.rb +1 -1
- data/lib/etna/clients/magma/workflows/update_attributes_from_csv_workflow.rb +75 -7
- data/lib/etna/csvs.rb +159 -0
- data/lib/etna/errors.rb +6 -0
- data/lib/etna/logger.rb +9 -1
- data/lib/etna/spec/vcr.rb +1 -0
- data/lib/helpers.rb +9 -6
- metadata +3 -2
@@ -368,12 +368,56 @@ module Etna
|
|
368
368
|
Attributes.new(raw['attributes'] ||= {})
|
369
369
|
end
|
370
370
|
|
371
|
+
def dictionary
|
372
|
+
Dictionary.new(raw['dictionary'] ||= {})
|
373
|
+
end
|
374
|
+
|
375
|
+
def build_dictionary
|
376
|
+
Dictionary.new(raw['dictionary'] ||= {})
|
377
|
+
end
|
378
|
+
|
371
379
|
def all_linked_model_names
|
372
380
|
models = [ self.parent, ] + build_attributes.all.map { |v| v.link_model_name }
|
373
381
|
models.select { |m| !m.nil? }.uniq
|
374
382
|
end
|
375
383
|
end
|
376
384
|
|
385
|
+
class Dictionary
|
386
|
+
attr_reader :raw
|
387
|
+
|
388
|
+
def initialize(raw = {})
|
389
|
+
@raw = raw
|
390
|
+
end
|
391
|
+
|
392
|
+
def dictionary_keys
|
393
|
+
raw.keys
|
394
|
+
end
|
395
|
+
|
396
|
+
def dictionary_model
|
397
|
+
raw['dictionary_model']
|
398
|
+
end
|
399
|
+
|
400
|
+
def dictionary_model=(val)
|
401
|
+
@raw['dictionary_model'] = val
|
402
|
+
end
|
403
|
+
|
404
|
+
def model_name
|
405
|
+
raw['model_name']
|
406
|
+
end
|
407
|
+
|
408
|
+
def model_name=(val)
|
409
|
+
@raw['model_name'] = val
|
410
|
+
end
|
411
|
+
|
412
|
+
def attributes
|
413
|
+
raw['attributes']
|
414
|
+
end
|
415
|
+
|
416
|
+
def attributes=(val)
|
417
|
+
@raw['attributes'] = val
|
418
|
+
end
|
419
|
+
end
|
420
|
+
|
377
421
|
class Attributes
|
378
422
|
attr_reader :raw
|
379
423
|
|
@@ -436,6 +480,12 @@ module Etna
|
|
436
480
|
@raw['attribute_type'] && AttributeType.new(@raw['attribute_type'])
|
437
481
|
end
|
438
482
|
|
483
|
+
def is_project_name_reference?(model_name)
|
484
|
+
return true if model_name == 'project' && attribute_type == AttributeType::IDENTIFIER
|
485
|
+
return true if link_model_name == 'project'
|
486
|
+
false
|
487
|
+
end
|
488
|
+
|
439
489
|
def attribute_type=(val)
|
440
490
|
val = val.to_s if val
|
441
491
|
@raw['attribute_type'] = val
|
@@ -557,6 +607,7 @@ module Etna
|
|
557
607
|
CHILD = AttributeType.new("child")
|
558
608
|
COLLECTION = AttributeType.new("collection")
|
559
609
|
FILE = AttributeType.new("file")
|
610
|
+
FILE_COLLECTION = AttributeType.new("file_collection")
|
560
611
|
FLOAT = AttributeType.new("float")
|
561
612
|
IDENTIFIER = AttributeType.new("identifier")
|
562
613
|
IMAGE = AttributeType.new("image")
|
@@ -28,27 +28,18 @@ module Etna
|
|
28
28
|
next unless (validation = attribute&.validation)
|
29
29
|
next unless (value = validation['value'])
|
30
30
|
|
31
|
-
if validation['type'] == 'Array' && value&.first.start_with?(ModelsCsv::COPY_OPTIONS_SENTINEL)
|
32
|
-
digest = value&.first.slice((ModelsCsv::COPY_OPTIONS_SENTINEL.length)..-1)
|
31
|
+
if validation['type'] == 'Array' && value&.first.start_with?(Etna::Clients::Magma::ModelsCsv::COPY_OPTIONS_SENTINEL)
|
32
|
+
digest = value&.first.slice((Etna::Clients::Magma::ModelsCsv::COPY_OPTIONS_SENTINEL.length)..-1)
|
33
33
|
attribute.validation = { 'type' => 'Array', 'value' => changeset.matrix_constants[digest] }
|
34
34
|
end
|
35
35
|
end
|
36
36
|
end
|
37
37
|
end
|
38
38
|
|
39
|
-
def prepare_changeset_from_csv(
|
40
|
-
|
41
|
-
|
42
|
-
changeset = csv_lines.inject(ModelsCsv::ModelsChangeset.new) do |acc, n|
|
43
|
-
line_no += 1
|
44
|
-
ModelsCsv.apply_csv_row(acc, n) do |err|
|
45
|
-
err_block.call("Error detected on line #{line_no + 1}: #{err}")
|
46
|
-
return
|
47
|
-
end
|
48
|
-
end
|
49
|
-
|
39
|
+
def prepare_changeset_from_csv(filename: nil, io: nil, &err_block)
|
40
|
+
importer = ModelsCsv::Importer.new
|
41
|
+
changeset = importer.prepare_changeset(filename: filename, input_io: io, &err_block)
|
50
42
|
self.class.validate_changeset(changeset, &err_block)
|
51
|
-
|
52
43
|
changeset
|
53
44
|
end
|
54
45
|
|
@@ -64,13 +55,11 @@ module Etna
|
|
64
55
|
validator.errors.each(&err_block)
|
65
56
|
end
|
66
57
|
|
67
|
-
def write_models_template_csv(
|
58
|
+
def write_models_template_csv(project_name, target_model = 'project', filename: nil, io: nil)
|
68
59
|
models = magma_client.retrieve(RetrievalRequest.new(project_name: project_name, model_name: 'all')).models
|
69
60
|
descendants = models.to_directed_graph.descendants(target_model)
|
70
|
-
|
71
|
-
|
72
|
-
csv << row
|
73
|
-
end
|
61
|
+
exporter = ModelsCsv::Exporter.new
|
62
|
+
exporter.write_models(models, [target_model] + descendants.keys, filename: filename, output_io: io)
|
74
63
|
end
|
75
64
|
end
|
76
65
|
end
|
@@ -74,6 +74,20 @@ module Etna
|
|
74
74
|
promote_to_administrator(user['email'])
|
75
75
|
update_magma_client_token!
|
76
76
|
|
77
|
+
puts "Done with setting up the project in Janus!"
|
78
|
+
end
|
79
|
+
|
80
|
+
def setup_magma_project!
|
81
|
+
puts "Creating the project in Magma."
|
82
|
+
create_magma_project!
|
83
|
+
puts "Done! Adding your new project record."
|
84
|
+
create_magma_project_record!
|
85
|
+
end
|
86
|
+
|
87
|
+
def create!
|
88
|
+
setup_janus_project!
|
89
|
+
setup_magma_project!
|
90
|
+
|
77
91
|
while true
|
78
92
|
puts "Add more users? Y/n"
|
79
93
|
break unless STDIN.gets.chomp == 'Y'
|
@@ -87,22 +101,14 @@ module Etna
|
|
87
101
|
puts "Confirm? Y/n"
|
88
102
|
break unless STDIN.gets.chomp == 'Y'
|
89
103
|
|
90
|
-
|
104
|
+
if role == 'administrator'
|
105
|
+
add_janus_user(email, name, 'editor')
|
106
|
+
promote_to_administrator(email)
|
107
|
+
else
|
108
|
+
add_janus_user(email, name, role)
|
109
|
+
end
|
91
110
|
end
|
92
111
|
|
93
|
-
puts "Done with setting up the project in Janus!"
|
94
|
-
end
|
95
|
-
|
96
|
-
def setup_magma_project!
|
97
|
-
puts "Creating the project in Magma."
|
98
|
-
create_magma_project!
|
99
|
-
puts "Done! Adding your new project record."
|
100
|
-
create_magma_project_record!
|
101
|
-
end
|
102
|
-
|
103
|
-
def create!
|
104
|
-
setup_janus_project!
|
105
|
-
setup_magma_project!
|
106
112
|
puts "All complete!"
|
107
113
|
puts "You need to visit Janus to refresh your token."
|
108
114
|
puts "You can now log into any app to manage your data."
|
@@ -47,7 +47,7 @@ module Etna
|
|
47
47
|
|
48
48
|
# Todo: Introduce associative concatenation operations for response objects and return
|
49
49
|
# one response that munges the batched responses together.
|
50
|
-
def update_records
|
50
|
+
def update_records(method: :update)
|
51
51
|
@recorded_updates ||= UpdateRequest.new(project_name: project_name)
|
52
52
|
|
53
53
|
request = UpdateRequest.new(project_name: project_name)
|
@@ -72,7 +72,7 @@ module Etna
|
|
72
72
|
responses = []
|
73
73
|
revisions.to_a.each_slice(batch_size) do |batch|
|
74
74
|
request.revisions = batch.to_h
|
75
|
-
magma_client.
|
75
|
+
magma_client.send(method, request) unless read_only
|
76
76
|
responses << @recorded_updates.revisions.update(request.revisions)
|
77
77
|
end
|
78
78
|
|
@@ -24,6 +24,10 @@ module Etna
|
|
24
24
|
@errors << "Invalid empty #{key} for #{label}: \"#{raw[key]}\"." if raw.dig(key) && nil_or_empty?(raw[key])
|
25
25
|
end
|
26
26
|
|
27
|
+
def check_key_empty(label, raw, key)
|
28
|
+
@errors << "Invalid key for #{label}: \"#{key}\"." if raw.dig(key)
|
29
|
+
end
|
30
|
+
|
27
31
|
def check_in_set(label, raw, key, valid_values)
|
28
32
|
@errors << "Invalid #{key} for #{label}: \"#{raw[key]}\".\nShould be one of #{valid_values}." if raw.dig(key) && !valid_values.include?(raw[key])
|
29
33
|
end
|
@@ -139,11 +143,12 @@ module Etna
|
|
139
143
|
model.template.attributes.attribute_keys.each do |attribute_name|
|
140
144
|
attribute = model.template.attributes.attribute(attribute_name)
|
141
145
|
|
142
|
-
|
146
|
+
reciprocal = @models.find_reciprocal(model: model, attribute: attribute)
|
147
|
+
if attribute_name == model.template.identifier && reciprocal&.attribute_type != AttributeType::TABLE
|
143
148
|
attribute_types = [AttributeType::IDENTIFIER]
|
144
149
|
elsif attribute_name == model.template.parent
|
145
150
|
attribute_types = [AttributeType::PARENT]
|
146
|
-
elsif
|
151
|
+
elsif reciprocal&.attribute_type == AttributeType::PARENT
|
147
152
|
attribute_types = AttributeValidator.valid_parent_link_attribute_types
|
148
153
|
else
|
149
154
|
attribute_types = AttributeValidator.valid_add_row_attribute_types
|
@@ -184,7 +189,7 @@ module Etna
|
|
184
189
|
|
185
190
|
def link_attributes
|
186
191
|
@model.template.attributes.all.select do |attribute|
|
187
|
-
attribute.
|
192
|
+
attribute.link_model_name
|
188
193
|
end
|
189
194
|
end
|
190
195
|
end
|
@@ -85,7 +85,7 @@ module Etna
|
|
85
85
|
if action.parent_link_type != Etna::Clients::Magma::AttributeType::TABLE
|
86
86
|
template.build_attributes.build_attribute(template.identifier).tap do |attr|
|
87
87
|
attr.attribute_name = template.identifier
|
88
|
-
attr.attribute_type = Etna::Clients::Magma::AttributeType::
|
88
|
+
attr.attribute_type = Etna::Clients::Magma::AttributeType::IDENTIFIER
|
89
89
|
end
|
90
90
|
end
|
91
91
|
when RenameAttributeAction
|
@@ -5,7 +5,7 @@ require_relative './crud_workflow'
|
|
5
5
|
module Etna
|
6
6
|
module Clients
|
7
7
|
class Magma
|
8
|
-
class UpdateAttributesFromCsvWorkflowBase < Struct.new(:magma_crud, :project_name, :filepath, :model_name, keyword_init: true)
|
8
|
+
class UpdateAttributesFromCsvWorkflowBase < Struct.new(:magma_crud, :project_name, :filepath, :model_name, :json_values, :hole_value, keyword_init: true)
|
9
9
|
def initialize(opts)
|
10
10
|
super(**{}.update(opts))
|
11
11
|
end
|
@@ -37,7 +37,8 @@ module Etna
|
|
37
37
|
end
|
38
38
|
|
39
39
|
def update_attributes
|
40
|
-
|
40
|
+
method = json_values ? :update_json : :update
|
41
|
+
magma_crud.update_records(method: method) do |update_request|
|
41
42
|
each_revision do |model_name, record_name, revision|
|
42
43
|
update_request.update_revision(model_name, record_name, revision)
|
43
44
|
end
|
@@ -53,7 +54,12 @@ module Etna
|
|
53
54
|
|
54
55
|
class RowBase
|
55
56
|
def stripped_value(attribute_value)
|
56
|
-
attribute_value
|
57
|
+
attribute_value = attribute_value&.strip
|
58
|
+
|
59
|
+
if attribute_value && @workflow.json_values && attribute_value != @workflow.hole_value
|
60
|
+
attribute_value = JSON.parse(attribute_value)
|
61
|
+
end
|
62
|
+
attribute_value
|
57
63
|
end
|
58
64
|
|
59
65
|
def nil_or_empty?(value)
|
@@ -74,6 +80,7 @@ module Etna
|
|
74
80
|
|
75
81
|
class Row < RowBase
|
76
82
|
attr_reader :model_name, :record_name
|
83
|
+
|
77
84
|
def initialize(raw, workflow)
|
78
85
|
# Assumes rows are in pairs, where
|
79
86
|
# [0] = model_name
|
@@ -112,9 +119,70 @@ module Etna
|
|
112
119
|
raise "Invalid attribute name: \"#{attribute_name}\"." if nil_or_empty?(attribute_name)
|
113
120
|
attribute_name.strip!
|
114
121
|
|
115
|
-
|
122
|
+
unless (attribute = @workflow.find_attribute(model_name, attribute_name))
|
123
|
+
raise "Invalid attribute #{attribute_name} for model #{model_name}."
|
124
|
+
end
|
125
|
+
|
126
|
+
stripped = stripped_value(@raw[index + 1])
|
127
|
+
unless @workflow.hole_value.nil?
|
128
|
+
next if stripped == @workflow.hole_value
|
129
|
+
end
|
130
|
+
|
131
|
+
if attribute.is_project_name_reference?(model_name)
|
132
|
+
stripped&.downcase!
|
133
|
+
end
|
134
|
+
|
135
|
+
attributes[attribute_name] = stripped
|
136
|
+
end
|
137
|
+
end
|
138
|
+
end
|
139
|
+
end
|
140
|
+
end
|
141
|
+
end
|
142
|
+
|
143
|
+
class SimpleFileLinkingWorkflow < Struct.new(:metis_client, :project_name, :bucket_name, :folder, :extension, :attribute_name, :regex, :file_collection, keyword_init: true)
|
144
|
+
def write_csv_io(filename: nil, output_io: nil)
|
145
|
+
exporter = Etna::CsvExporter.new([:identifier, attribute_name.to_sym])
|
146
|
+
exporter.with_row_writeable(filename: filename, output_io: output_io) do |row_writeable|
|
147
|
+
find_matching_revisions.each do |identifier, value|
|
148
|
+
row_writeable << { identifier: identifier, attribute_name.to_sym => value.to_json }
|
149
|
+
end
|
150
|
+
end
|
151
|
+
end
|
152
|
+
|
153
|
+
def find_matching_revisions
|
154
|
+
{}.tap do |revisions|
|
155
|
+
metis_client.find(
|
156
|
+
Etna::Clients::Metis::FindRequest.new(
|
157
|
+
project_name: project_name,
|
158
|
+
bucket_name: bucket_name,
|
159
|
+
params: [Etna::Clients::Metis::FindParam.new(
|
160
|
+
attribute: 'name',
|
161
|
+
predicate: 'glob',
|
162
|
+
value: "#{folder}/**/*.#{extension}",
|
163
|
+
type: 'file'
|
164
|
+
)]
|
165
|
+
)).files.all.each do |file|
|
166
|
+
puts "Checking #{file.file_path}"
|
167
|
+
match = regex.match(file.file_path)
|
168
|
+
if match
|
169
|
+
match_map = match.names.zip(match.captures).to_h
|
170
|
+
if !match_map.include?('identifier')
|
171
|
+
raise "Regex #{regex.source} does not include a ?<identifier> named matcher, please add one to regulate how identifiers are created."
|
172
|
+
end
|
116
173
|
|
117
|
-
|
174
|
+
puts "Found match"
|
175
|
+
|
176
|
+
revision = { 'path' => "metis://#{project_name}/#{bucket_name}/#{file.file_path}", 'original_filename' => "#{File.basename(file.file_path)}" }
|
177
|
+
if file_collection
|
178
|
+
collection = revisions[match_map['identifier']] ||= []
|
179
|
+
collection << revision
|
180
|
+
else
|
181
|
+
record = revisions[match_map['identifier']] ||= {}
|
182
|
+
unless record.empty?
|
183
|
+
raise "Multiple files match #{match_map['identifier']}, found #{record['path']} and #{revision['path']}"
|
184
|
+
end
|
185
|
+
record.update(revision)
|
118
186
|
end
|
119
187
|
end
|
120
188
|
end
|
@@ -125,7 +193,7 @@ module Etna
|
|
125
193
|
class UpdateAttributesFromCsvWorkflowSingleModel < UpdateAttributesFromCsvWorkflowBase
|
126
194
|
def initialize(opts)
|
127
195
|
super(**{}.update(opts))
|
128
|
-
raise "Single Model
|
196
|
+
raise "Single Model invocation must include keyword :model_name." if !opts[:model_name]
|
129
197
|
raise "Invalid model #{model_name} for project #{project_name}." unless model_exists?(model_name)
|
130
198
|
end
|
131
199
|
|
@@ -139,6 +207,7 @@ module Etna
|
|
139
207
|
|
140
208
|
class Row < RowBase
|
141
209
|
attr_reader :record_name
|
210
|
+
|
142
211
|
def initialize(raw, model_name, workflow)
|
143
212
|
# Assumes CSV includes a column header to identify the attribute_name
|
144
213
|
# Assumes index 0 is the record_name
|
@@ -160,7 +229,6 @@ module Etna
|
|
160
229
|
row_hash = @raw.to_h
|
161
230
|
row_keys = row_hash.keys
|
162
231
|
row_keys[1..row_keys.length - 1].each do |attribute_name|
|
163
|
-
|
164
232
|
raise "Invalid attribute name: \"#{attribute_name}\"." if nil_or_empty?(attribute_name)
|
165
233
|
|
166
234
|
attribute_name_clean = attribute_name.strip
|
data/lib/etna/csvs.rb
ADDED
@@ -0,0 +1,159 @@
|
|
1
|
+
module Etna
|
2
|
+
class CsvImporter
|
3
|
+
def initialize(
|
4
|
+
strip: true,
|
5
|
+
filter_empties: true,
|
6
|
+
&row_formatter
|
7
|
+
)
|
8
|
+
# Removes any columns from a row that are empty strings. Allows for some simpler 'empty' processing. This occurs
|
9
|
+
# post stripping as well.
|
10
|
+
@filter_empties = filter_empties
|
11
|
+
@row_formatter = row_formatter
|
12
|
+
end
|
13
|
+
|
14
|
+
COLUMN_AS_BOOLEAN = -> (s) { ['true', 't', 'y', 'yes'].include?(s&.downcase) }
|
15
|
+
|
16
|
+
def each_csv_row(filename: nil, input_io: nil, &block)
|
17
|
+
if input_io.nil?
|
18
|
+
unless filename.nil?
|
19
|
+
File.open(filename, 'r') do |io|
|
20
|
+
return each_csv_row(input_io: io, &block)
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
lineno = 1
|
26
|
+
CSV.parse(input_io, headers: true, header_converters: :symbol) do |row|
|
27
|
+
lineno += 1
|
28
|
+
row = row.to_hash
|
29
|
+
row.keys.each { |k| row[k].strip! if row[k] =~ /^\s+$/ } if @strip
|
30
|
+
row.select! { |k, v| !v.empty? } if @filter_empties
|
31
|
+
@row_formatter.call(row) unless @row_formatter.nil?
|
32
|
+
yield row, lineno if block_given?
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def replace_row_column(row, column, &block)
|
37
|
+
if !row[column].nil? || block.arity == 0
|
38
|
+
row[column] = yield row[column]
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
class ImportError < StandardError
|
43
|
+
attr_accessor :lineno
|
44
|
+
|
45
|
+
def initialize(msg, lineno = nil)
|
46
|
+
@lineno = lineno
|
47
|
+
super(msg)
|
48
|
+
end
|
49
|
+
|
50
|
+
def message
|
51
|
+
"line #{lineno}: #{super}"
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
class NestedRowProcessor
|
56
|
+
attr_reader :row, :lineno, :context, :errors
|
57
|
+
|
58
|
+
def initialize(row, lineno, context)
|
59
|
+
@row = row
|
60
|
+
@lineno = lineno
|
61
|
+
@context = context
|
62
|
+
@errors = []
|
63
|
+
|
64
|
+
# If a parent context changes, all child contexts are invalidated. But since parent contexts are changed
|
65
|
+
# before the relationship of child contexts are declared, we have to track that so that when a child context
|
66
|
+
# dependency is declared we can clear it based on wether parents have changed.
|
67
|
+
@changed = {}
|
68
|
+
end
|
69
|
+
|
70
|
+
def process(column, *parents, &block)
|
71
|
+
if parents.any? { |p| @changed.include?(p) }
|
72
|
+
@changed[column] = true
|
73
|
+
@context[column] = nil
|
74
|
+
end
|
75
|
+
|
76
|
+
return self if (next_val = row[column]).nil?
|
77
|
+
@changed[column] = true
|
78
|
+
|
79
|
+
parent_values = parents.map do |p|
|
80
|
+
if @context[p].nil?
|
81
|
+
raise ImportError.new("Found a #{column} value, but no previous #{p} had been given!", @lineno)
|
82
|
+
end
|
83
|
+
|
84
|
+
@context[p]
|
85
|
+
end
|
86
|
+
|
87
|
+
begin
|
88
|
+
next_val = yield next_val, *parent_values, self if block_given?
|
89
|
+
rescue ImportError => e
|
90
|
+
e.lineno = @lineno
|
91
|
+
raise e
|
92
|
+
end
|
93
|
+
|
94
|
+
@context[column] = next_val
|
95
|
+
self
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
100
|
+
class CsvExporter
|
101
|
+
# column_headers should be an array of symbols, mapping the column heading names and ordering to export
|
102
|
+
# column_serializer is an optional block that takes column (string), column_value (string) and should
|
103
|
+
# return a string representation of column_value to write to the csv. By default, when nil, the exporter
|
104
|
+
# will attempt to convert the value to a string via to_s or simply write an empty string for nil.
|
105
|
+
|
106
|
+
attr_reader :column_headers
|
107
|
+
def initialize(column_headers, &column_serializer)
|
108
|
+
@column_headers = column_headers
|
109
|
+
@column_serializer = column_serializer
|
110
|
+
end
|
111
|
+
|
112
|
+
def header_row
|
113
|
+
@column_headers.map(&:to_s)
|
114
|
+
end
|
115
|
+
|
116
|
+
def map_column_value(column, column_value)
|
117
|
+
@column_serializer&.call(column, column_value) || column_value&.to_s || ''
|
118
|
+
end
|
119
|
+
|
120
|
+
def row_from_columns(columns)
|
121
|
+
@column_headers.map { |c| self.map_column_value(c, columns[c] || '') }
|
122
|
+
end
|
123
|
+
|
124
|
+
def with_row_writeable(filename: nil, output_io: nil, &block)
|
125
|
+
if output_io.nil? && !filename.nil?
|
126
|
+
File.open(filename, 'w') do |io|
|
127
|
+
return with_row_writeable(output_io: io, &block)
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
writeable = self.class::RowWriteable.new(self, CSV.new(output_io))
|
132
|
+
yield writeable
|
133
|
+
writeable.ensure_headers
|
134
|
+
nil
|
135
|
+
end
|
136
|
+
|
137
|
+
class RowWriteable
|
138
|
+
def initialize(exporter, csv)
|
139
|
+
@exporter = exporter
|
140
|
+
@csv = csv
|
141
|
+
@written_headers = false
|
142
|
+
end
|
143
|
+
|
144
|
+
def <<(columns)
|
145
|
+
self.ensure_headers
|
146
|
+
@csv << @exporter.row_from_columns(columns)
|
147
|
+
end
|
148
|
+
|
149
|
+
def write(**columns)
|
150
|
+
self.<<(columns)
|
151
|
+
end
|
152
|
+
|
153
|
+
def ensure_headers
|
154
|
+
@csv << @exporter.header_row unless @written_headers
|
155
|
+
@written_headers = true
|
156
|
+
end
|
157
|
+
end
|
158
|
+
end
|
159
|
+
end
|