gd_bam 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- data/README.md +169 -0
- data/bin/bam +218 -0
- data/lib/bam/version.rb +3 -0
- data/lib/bam.rb +8 -0
- data/lib/dsl/project_dsl.rb +259 -0
- data/lib/graphs/docentize.grf +47 -0
- data/lib/graphs/dummy.grf +46 -0
- data/lib/graphs/load_history.grf +579 -0
- data/lib/graphs/process_account.grf +47 -0
- data/lib/graphs/process_activity.grf +222 -0
- data/lib/graphs/process_activity_dim.grf +88 -0
- data/lib/graphs/process_activity_owner.grf +48 -0
- data/lib/graphs/process_opportunity.grf +46 -0
- data/lib/graphs/process_opportunity_line_item.grf +179 -0
- data/lib/graphs/process_opportunity_snapshot.grf +94 -0
- data/lib/graphs/process_owner.grf +48 -0
- data/lib/graphs/process_stage.grf +51 -0
- data/lib/graphs/process_stage_history.grf +184 -0
- data/lib/graphs/process_velocity_duration.grf +140 -0
- data/lib/nodes/clover_gen.rb +1283 -0
- data/lib/nodes/dependency.rb +96 -0
- data/lib/nodes/nodes.rb +371 -0
- data/lib/repo/1_config.json +8 -0
- data/lib/repository/repo.rb +21 -0
- data/lib/runtime.rb +517 -0
- data/templates/dataset.json.erb +13 -0
- data/templates/flow.rb.erb +12 -0
- data/templates/params.json.erb +7 -0
- data/templates/project.erb +18 -0
- data/templates/source.json.erb +22 -0
- data/templates/tap.json.erb +16 -0
- data/templates/update_dataset.script.erb +4 -0
- data/templates/update_dataset_dry.script.erb +3 -0
- data/templates/workspace.prm.erb +25 -0
- metadata +412 -0
@@ -0,0 +1,1283 @@
|
|
1
|
+
module Enumerable
|
2
|
+
def uniq_by
|
3
|
+
seen = Hash.new { |h,k| h[k] = true; false }
|
4
|
+
reject { |v| seen[yield(v)] }
|
5
|
+
end
|
6
|
+
end
|
7
|
+
|
8
|
+
class Hash
|
9
|
+
#pass single or array of keys, which will be removed, returning the remaining hash
|
10
|
+
def remove!(*keys)
|
11
|
+
keys.each{|key| self.delete(key) }
|
12
|
+
self
|
13
|
+
end
|
14
|
+
|
15
|
+
#non-destructive version
|
16
|
+
def remove(*keys)
|
17
|
+
self.dup.remove!(*keys)
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
|
22
|
+
module GoodData
|
23
|
+
module CloverGenerator
|
24
|
+
|
25
|
+
ADD_NA_TRANSFORMATION = <<-trasnf
|
26
|
+
function integer generate() {
|
27
|
+
integer number_of_fields = length($out.0);
|
28
|
+
integer i = 0;
|
29
|
+
string type = "";
|
30
|
+
for (i; i< number_of_fields; ++i) {
|
31
|
+
type = getFieldType($out.0, i);
|
32
|
+
switch(type) {
|
33
|
+
case "string": setStringValue($out.0, i, 'N/A'); break;
|
34
|
+
case "number": setNumValue($out.0, i, 0); break;
|
35
|
+
case "date": setDateValue($out.0, i, null); break;
|
36
|
+
}
|
37
|
+
};
|
38
|
+
|
39
|
+
return OK;
|
40
|
+
}
|
41
|
+
trasnf
|
42
|
+
|
43
|
+
def self.parse_json(file)
|
44
|
+
begin
|
45
|
+
JSON.parse(File.read(file), :symbolize_names => true)
|
46
|
+
rescue RuntimeError => e
|
47
|
+
puts "Error parsing \"#{file}\": #{e.inspect}"
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
|
52
|
+
def self.metadata(builder, description)
|
53
|
+
builder.Record({
|
54
|
+
:fieldDelimiter => description[:fieldDelimiter],
|
55
|
+
:name => description[:name],
|
56
|
+
:recordDelimiter => description[:recordDelimiter],
|
57
|
+
:type => description[:type]
|
58
|
+
}) do |record|
|
59
|
+
description[:fields].each do |field|
|
60
|
+
builder.Field :name => field[:name], :type => field[:type], :nullable => "true"
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
def self.csv_metadata(builder, description)
|
66
|
+
|
67
|
+
sf_description = description.merge({
|
68
|
+
:fieldDelimiter => ",",
|
69
|
+
:recordDelimiter => "\\n",
|
70
|
+
:type => "delimited",
|
71
|
+
})
|
72
|
+
metadata(builder, sf_description)
|
73
|
+
end
|
74
|
+
|
75
|
+
|
76
|
+
def self.sf_connection(builder, data)
|
77
|
+
builder.Connection({
|
78
|
+
:clientId => "${SFDC_CLIENT_ID}",
|
79
|
+
:id => "SFDC",
|
80
|
+
:loginHostname => "${SFDC_LOGIN_HOSTNAME}",
|
81
|
+
:name => "${SFDC_NAME}",
|
82
|
+
:password => "${SFDC_PASSWORD}",
|
83
|
+
:passwordEncrypted => "true",
|
84
|
+
:token => "${SFDC_TOKEN}",
|
85
|
+
:type => "SFDC",
|
86
|
+
:username => "${SFDC_USERNAME}",
|
87
|
+
:passwordEncrypted => "false"
|
88
|
+
})
|
89
|
+
end
|
90
|
+
|
91
|
+
def self.external_metadata_link(builder, data)
|
92
|
+
builder.Metadata({
|
93
|
+
:fileURL => data[:fileURL],
|
94
|
+
:id => data[:id]
|
95
|
+
})
|
96
|
+
end
|
97
|
+
|
98
|
+
def self.property(builder, data)
|
99
|
+
builder.Property({
|
100
|
+
:id => data[:id],
|
101
|
+
:name => data[:name] || data[:id],
|
102
|
+
:value => data[:value]
|
103
|
+
})
|
104
|
+
end
|
105
|
+
|
106
|
+
|
107
|
+
def self.property_file(builder, data)
|
108
|
+
builder.Property({
|
109
|
+
:id => data[:id],
|
110
|
+
:fileURL => data[:fileURL]
|
111
|
+
})
|
112
|
+
end
|
113
|
+
|
114
|
+
def graph2(file, stuff)
|
115
|
+
metadata = stuff[:metadata]
|
116
|
+
nodes = stuff[:nodes]
|
117
|
+
connections = stuff[:connections]
|
118
|
+
File.open(file, "w") do |file|
|
119
|
+
builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
|
120
|
+
builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
121
|
+
builder.Graph({
|
122
|
+
:name => "Goodsales Downloader"
|
123
|
+
}) do
|
124
|
+
builder.Global do
|
125
|
+
metadata.each do |m|
|
126
|
+
build_metadata2(builder, m)
|
127
|
+
end
|
128
|
+
connections.each do |conn|
|
129
|
+
build_node2(builder, conn)
|
130
|
+
end
|
131
|
+
|
132
|
+
end
|
133
|
+
builder.Phase(:number => 0) do
|
134
|
+
nodes.each do |node|
|
135
|
+
build_node2(builder, node)
|
136
|
+
end
|
137
|
+
end
|
138
|
+
end
|
139
|
+
end
|
140
|
+
end
|
141
|
+
|
142
|
+
def self.build_node2(builder, node)
|
143
|
+
if node[:type] == GoodData::CloverGenerator::Nodes::EDGE
|
144
|
+
builder.tag!("Edge", node)
|
145
|
+
elsif node[:type] == GoodData::CloverGenerator::Nodes::SF_CONNECTION
|
146
|
+
builder.tag!("Connection", node)
|
147
|
+
elsif node[:type] == GoodData::CloverGenerator::Nodes::FILE_LIST
|
148
|
+
builder.tag!("Node", node.remove(:transformation)) do |xml|
|
149
|
+
xml.attr({:name => "outputMapping"}) do |attr|
|
150
|
+
transformation = node[:transformation]
|
151
|
+
attr.cdata! transformation
|
152
|
+
end
|
153
|
+
end
|
154
|
+
elsif node[:type] == GoodData::CloverGenerator::Nodes::REFORMAT || node[:type] == GoodData::CloverGenerator::Nodes::EXT_HASH_JOIN
|
155
|
+
builder.tag!("Node", node.remove(:transformation)) do |xml|
|
156
|
+
xml.attr({:name => "transform"}) do |attr|
|
157
|
+
transformation = node[:transformation]
|
158
|
+
attr.cdata! transformation
|
159
|
+
end
|
160
|
+
end
|
161
|
+
elsif node[:type] == GoodData::CloverGenerator::Nodes::DATA_GENERATOR
|
162
|
+
builder.tag!("Node", node.remove(:transformation)) do |xml|
|
163
|
+
xml.attr({:name => "generate"}) do |attr|
|
164
|
+
transformation = node[:generate]
|
165
|
+
attr.cdata! transformation
|
166
|
+
end
|
167
|
+
end
|
168
|
+
elsif node[:type] == GoodData::CloverGenerator::Nodes::PERSISTENT_LOOKUP || node[:type] == GoodData::CloverGenerator::Nodes::GD_LOOKUP
|
169
|
+
builder.tag!("LookupTable", node)
|
170
|
+
else
|
171
|
+
builder.tag!("Node", node)
|
172
|
+
end
|
173
|
+
end
|
174
|
+
|
175
|
+
def self.run_ctl(builder, ctl)
|
176
|
+
id1 = rand(10000000000)
|
177
|
+
id2 = rand(10000000000)
|
178
|
+
id3 = rand(10000000000)
|
179
|
+
|
180
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.data_generator2({:name => "#{id1}", :id => "#{id1}", :generate => ctl}))
|
181
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.trash2({:name => "#{id2}", :id => "#{id2}"}))
|
182
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{id2}:0", :fromNode => "#{id1}:0", :metadata => "trash_metadata", :id => "#{id3}"}))
|
183
|
+
end
|
184
|
+
|
185
|
+
def build_metadata2(builder, node)
|
186
|
+
builder.Metadata({:id => node[:id]}) do
|
187
|
+
csv_metadata(builder, {
|
188
|
+
:name => "account_sf_reformat",
|
189
|
+
:fields => translate(node[:fields])
|
190
|
+
})
|
191
|
+
end
|
192
|
+
end
|
193
|
+
|
194
|
+
def sf_field_mapping(stuff)
|
195
|
+
fields = stuff["fields"]
|
196
|
+
mapping = "{\"xmlFieldsMapping\":{\"xmlFields\":["
|
197
|
+
add = fields.map do |f|
|
198
|
+
"{\"xmlFieldMapping\":{\"name\":\"#{f[:name]}\",\"label\":\"#{f[:name]}\",\"xmlPath\":\"#{f[:name]}\",\"metadataField\":\"#{f[:name]}\"}}"
|
199
|
+
end
|
200
|
+
stuff = mapping + add.join(",") + "]}}"
|
201
|
+
stuff
|
202
|
+
end
|
203
|
+
|
204
|
+
def self.validate_gd_datasets(datasets, gd_datasets)
|
205
|
+
dataset_names = datasets.map {|d| d[:gd_name] || d[:id]}
|
206
|
+
gd_dataset_names = gd_datasets.map {|d| d["dataSet"]["meta"]["identifier"].gsub("dataset.", "")}.find_all {|d| d.index(".dt").nil?}
|
207
|
+
{
|
208
|
+
:not_in_gd => dataset_names - gd_dataset_names,
|
209
|
+
:not_loaded => gd_dataset_names - dataset_names
|
210
|
+
}
|
211
|
+
end
|
212
|
+
|
213
|
+
def self.validate_gd_datasets_metadata(datasets, gd_datasets)
|
214
|
+
|
215
|
+
init = {
|
216
|
+
:datasets => {}
|
217
|
+
}
|
218
|
+
|
219
|
+
datasets.reduce(init) do |memo, dataset|
|
220
|
+
gd_name = dataset[:gd_name] || dataset[:id]
|
221
|
+
gd_dataset = gd_datasets.find {|d| d["dataSet"]["meta"]["identifier"].gsub("dataset.", "") == gd_name}
|
222
|
+
gd_dataset_names = gd_datasets.map {|d| d["dataSet"]["meta"]["identifier"].gsub("dataset.", "")}.find_all {|d| d.index(".dt").nil?}
|
223
|
+
|
224
|
+
next(memo) if gd_dataset.nil?
|
225
|
+
|
226
|
+
refs = dataset[:fields].find_all {|f| f[:type] == "reference"}
|
227
|
+
refs.each do |ref|
|
228
|
+
(memo[:datasets][gd_name] ||= []) << ref unless gd_dataset_names.include?(ref[:schema])
|
229
|
+
ref_attr_indentifier = "attr.#{ref[:schema]}.#{ref[:ref]}"
|
230
|
+
(memo[:datasets][gd_name] ||= []) << ref if GoodData::MdObject[ref_attr_indentifier].nil?
|
231
|
+
end
|
232
|
+
|
233
|
+
labels = dataset[:fields].find_all {|f| f[:type] == "label"}
|
234
|
+
gd_labels = gd_dataset['dataSet']['content']['dataLoadingColumns'].map {|label| GoodData.get(label)}
|
235
|
+
labels.each do |label|
|
236
|
+
label_name = label[:name]
|
237
|
+
label_identifier = "label.#{gd_name}.#{label[:for]}.#{label_name}"
|
238
|
+
(memo[:datasets][gd_name] ||= []) << label if GoodData::MdObject[label_identifier].nil?
|
239
|
+
end
|
240
|
+
|
241
|
+
facts = dataset[:fields].find_all {|f| f[:type] == "fact"}
|
242
|
+
gd_facts = gd_dataset['dataSet']['content']['facts'].map {|fact| GoodData.get(fact)}
|
243
|
+
facts.each do |fact|
|
244
|
+
fact_name = fact[:name]
|
245
|
+
unless gd_facts.any? {|a| a['fact']['meta']['identifier'] == "fact.#{gd_name}.#{fact_name}"}
|
246
|
+
(memo[:datasets][gd_name] ||= []) << fact
|
247
|
+
end
|
248
|
+
end
|
249
|
+
|
250
|
+
attributes = dataset[:fields].find_all {|f| f[:type] == "attribute" || f[:type] == "connection_point"}
|
251
|
+
gd_attributes = gd_dataset['dataSet']['content']['attributes'].map {|attr| GoodData.get(attr)}
|
252
|
+
attributes.each do |attr|
|
253
|
+
attr_name = attr[:name]
|
254
|
+
unless gd_attributes.any? {|a| a['attribute']['meta']['identifier'] == "attr.#{gd_name}.#{attr_name}"}
|
255
|
+
(memo[:datasets][gd_name] ||= []) << attr
|
256
|
+
end
|
257
|
+
end
|
258
|
+
memo
|
259
|
+
end
|
260
|
+
end
|
261
|
+
|
262
|
+
|
263
|
+
def self.validate_sf_metadata(sf_client, sources)
|
264
|
+
sources.reduce({}) do |memo, source|
|
265
|
+
puts "Checking #{source[:object]}"
|
266
|
+
sf_object = source[:object]
|
267
|
+
u = sf_client.describe(sf_object)
|
268
|
+
sf_fields = u[:describeSObjectResponse][:result][:fields].map {|field| field[:name]}
|
269
|
+
fields_to_validate = source[:fields].map {|field| field[:name]}
|
270
|
+
memo[sf_object] = (fields_to_validate - sf_fields)
|
271
|
+
pp fields_to_validate - sf_fields
|
272
|
+
memo
|
273
|
+
end
|
274
|
+
end
|
275
|
+
|
276
|
+
def self.get_sf_client(params)
|
277
|
+
Salesforce::Client.new(params[:sf_login], params[:sf_password] + params[:sf_token], :server => params[:sf_server])
|
278
|
+
end
|
279
|
+
|
280
|
+
def self.download_metadata(downloaders_spec)
|
281
|
+
mods = []
|
282
|
+
c = Salesforce::Client.new(downloaders_spec["login"], downloaders_spec["password"] + downloaders_spec["token"])
|
283
|
+
downloaders_spec["input"].each do |mod|
|
284
|
+
sf_module = mod["module"]
|
285
|
+
output = mod["file"]
|
286
|
+
fields = mod["fields"]
|
287
|
+
|
288
|
+
u = c.describe(sf_module)
|
289
|
+
sf_fields = u[:describeSObjectResponse][:result][:fields]
|
290
|
+
mods << mod.merge({
|
291
|
+
"fields" => fields.map do |f|
|
292
|
+
found_field = sf_fields.find {|sf| sf[:name] == f["name"]}
|
293
|
+
# begin
|
294
|
+
if found_field.nil?
|
295
|
+
fail "Field \"#{f["name"]}\" was not found in the \"#{sf_module}\" module"
|
296
|
+
end
|
297
|
+
f.merge(found_field)
|
298
|
+
end
|
299
|
+
})
|
300
|
+
end
|
301
|
+
downloaders_spec.merge({"input" => mods})
|
302
|
+
end
|
303
|
+
|
304
|
+
def self.save_metadata(filename, data)
|
305
|
+
File.open(filename, "w") do |file|
|
306
|
+
builder = Builder::XmlMarkup.new(:target => file, :indent=>2)
|
307
|
+
builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
308
|
+
csv_metadata(builder, data)
|
309
|
+
end
|
310
|
+
end
|
311
|
+
|
312
|
+
def self.translate_field(what)
|
313
|
+
dict = {
|
314
|
+
nil => "string",
|
315
|
+
"id" => "string",
|
316
|
+
"string" => "string",
|
317
|
+
"textarea" => "string",
|
318
|
+
"email" => "string",
|
319
|
+
"phone" => "string",
|
320
|
+
"boolean" => "string",
|
321
|
+
"picklist" => "string",
|
322
|
+
"reference" => "string",
|
323
|
+
"datetime" => "date",
|
324
|
+
"date" => "date",
|
325
|
+
"currency" => "string",
|
326
|
+
"percent" => "string",
|
327
|
+
"int" => "string",
|
328
|
+
"multipicklist" => "string",
|
329
|
+
"combobox" => "string",
|
330
|
+
"attribute" => "string",
|
331
|
+
"fact" => "string"
|
332
|
+
}
|
333
|
+
fail "#{what} not included in translation dictionary from SF to Clover Types" unless dict.include?(what)
|
334
|
+
dict[what]
|
335
|
+
end
|
336
|
+
|
337
|
+
def self.translate(fields)
|
338
|
+
fields
|
339
|
+
# fields.map do |f|
|
340
|
+
# f.merge({
|
341
|
+
# :type => translate_field(f[:type])
|
342
|
+
# })
|
343
|
+
# end
|
344
|
+
end
|
345
|
+
|
346
|
+
|
347
|
+
|
348
|
+
def self.find_by_name(fields, name)
|
349
|
+
fields.find do |m|
|
350
|
+
xname = m["module"]
|
351
|
+
output = m["file"] || xname
|
352
|
+
output == name
|
353
|
+
end
|
354
|
+
end
|
355
|
+
|
356
|
+
def self.find_by_dataset(fields, name)
|
357
|
+
fields.find do |m|
|
358
|
+
md = m["module"]
|
359
|
+
file = m["file"]
|
360
|
+
dataset = m["dataset"]
|
361
|
+
(dataset || file || md) == name
|
362
|
+
end
|
363
|
+
end
|
364
|
+
|
365
|
+
|
366
|
+
def self.merge_modules(spec, base_spec)
|
367
|
+
modules = spec["input"]
|
368
|
+
resolved_modules = modules.map do |mod|
|
369
|
+
mod_name = mod["file"] || mod["module"]
|
370
|
+
fields = mod["fields"]
|
371
|
+
default = find_by_name(base_spec["input"], mod_name)
|
372
|
+
if default.nil?
|
373
|
+
mod
|
374
|
+
else
|
375
|
+
clashing_fields = ((default["fields"] || []) & (mod["fields"]))
|
376
|
+
unless clashing_fields.empty?
|
377
|
+
fail "There are fields \"#{clashing_fields.join(', ')}\" that are clashing with the default definition"
|
378
|
+
end
|
379
|
+
master_fields = default["fields"]
|
380
|
+
redefinitions = fields.find_all {|f| f.has_key?(:acts_as)}
|
381
|
+
exclusions = redefinitions.reduce([]) {|memo, e| memo.concat([*e[:acts_as]])}
|
382
|
+
mod.merge({
|
383
|
+
"fields" => (master_fields.reject {|mf| exclusions.include? mf["name"]}) + fields
|
384
|
+
})
|
385
|
+
end
|
386
|
+
end
|
387
|
+
spec.merge({
|
388
|
+
"input" => resolved_modules
|
389
|
+
})
|
390
|
+
end
|
391
|
+
|
392
|
+
def self.generate_incremental_select(spec)
|
393
|
+
if spec[:condition].nil? || spec[:condition].empty?
|
394
|
+
spec[:condition] = "SystemModstamp > ${#{spec[:id]}_START} AND SystemModstamp <= ${#{spec[:id]}_END}"
|
395
|
+
else
|
396
|
+
spec[:condition] += "AND SystemModstamp > ${#{spec[:id]}_START} AND SystemModstamp <= ${#{spec[:id]}_END}"
|
397
|
+
end
|
398
|
+
generate_select(spec)
|
399
|
+
end
|
400
|
+
|
401
|
+
def self.generate_select(spec)
|
402
|
+
fields = spec[:fields].map do |f|
|
403
|
+
f.has_key?(:multi_currency) ? "convertCurrency(#{f[:name]})" : f[:name]
|
404
|
+
end
|
405
|
+
condition = spec[:condition].nil?() ? "" : "WHERE #{spec[:condition]}"
|
406
|
+
limit = spec[:limit].nil?() ? "" : "LIMIT #{spec[:limit]}"
|
407
|
+
"SELECT #{fields.join(', ')} FROM #{spec[:object]} #{condition} #{limit}"
|
408
|
+
end
|
409
|
+
|
410
|
+
def self.normalize_module(spec)
|
411
|
+
fields = spec.has_key?("fields") ? spec["fields"].map {|f| f.kind_of?(Hash) ? f : {"name" => f} } : []
|
412
|
+
spec.merge({
|
413
|
+
"fields" => fields
|
414
|
+
})
|
415
|
+
end
|
416
|
+
|
417
|
+
def self.normalize_description(spec)
|
418
|
+
spec.merge({
|
419
|
+
"input" => spec["input"].map {|f| normalize_module(f) }
|
420
|
+
})
|
421
|
+
end
|
422
|
+
|
423
|
+
def self.sf_metadata_for_module(spec)
|
424
|
+
sf_metadata_for_fields(spec[:fields])
|
425
|
+
end
|
426
|
+
|
427
|
+
def self.sf_metadata_for_fields(spec)
|
428
|
+
spec.reduce([]) {|acc, f| acc.concat([{
|
429
|
+
:name => f[:name],
|
430
|
+
:type => f[:type] || "string"
|
431
|
+
}])}
|
432
|
+
end
|
433
|
+
|
434
|
+
def self.clover_metadata_for_module(spec)
|
435
|
+
clover_metadata_for_fields(spec[:fields])
|
436
|
+
end
|
437
|
+
|
438
|
+
|
439
|
+
def self.clover_metadata_for_fields(spec)
|
440
|
+
spec.reduce([]) do |acc, f|
|
441
|
+
transformation = f[:acts_as] ? f[:acts_as].map {|i| {:type => f[:type] || "string", :name => i}} : [{:type => f[:type] || "string", :name => f[:name]}]
|
442
|
+
acc.concat(transformation)
|
443
|
+
end
|
444
|
+
end
|
445
|
+
|
446
|
+
def self.transformation_acts_as(spec)
|
447
|
+
spec[:fields].reduce([]) do |acc, f|
|
448
|
+
transformation = f[:acts_as] ? f[:acts_as].map {|i| [f[:name], i]} : [[f[:name], f[:name]]]
|
449
|
+
acc.concat(transformation)
|
450
|
+
end
|
451
|
+
end
|
452
|
+
|
453
|
+
# def self.to_port(node, port)
|
454
|
+
# "#{to_id(node)}:#{port.to_s}"
|
455
|
+
# end
|
456
|
+
|
457
|
+
def self.to_id(node)
|
458
|
+
node[:id]
|
459
|
+
end
|
460
|
+
|
461
|
+
$id = 0
|
462
|
+
def self.get_id()
|
463
|
+
$id += 1
|
464
|
+
end
|
465
|
+
|
466
|
+
def self.create_trash_meta(builder)
|
467
|
+
builder.Metadata({:id => "trash_metadata"}) do |builder|
|
468
|
+
csv_metadata(builder, {
|
469
|
+
:name => "trash_metadata",
|
470
|
+
:fields => [{:type => "string", :name => "all"}]
|
471
|
+
})
|
472
|
+
end
|
473
|
+
end
|
474
|
+
|
475
|
+
def self.create_lookup_meta(builder)
|
476
|
+
builder.Metadata({:id => "lookup_metadata"}) do |builder|
|
477
|
+
csv_metadata(builder, {
|
478
|
+
:name => "lookup_metadata",
|
479
|
+
:fields => [{:type => "string", :name => "key"}, {:type => "string", :name => "value"}]
|
480
|
+
})
|
481
|
+
end
|
482
|
+
end
|
483
|
+
|
484
|
+
|
485
|
+
def self.create_run_graph(file, options={})
|
486
|
+
subgraphs = options[:subgraphs]
|
487
|
+
|
488
|
+
File.open(file, "w") do |file|
|
489
|
+
builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
|
490
|
+
builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
491
|
+
builder.Graph({
|
492
|
+
:name => "Run graph"
|
493
|
+
}) do
|
494
|
+
builder.Global do
|
495
|
+
property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
|
496
|
+
property_file(builder, {:id => "params_params", :fileURL => "params.txt"})
|
497
|
+
create_trash_meta(builder)
|
498
|
+
create_lookup_meta(builder)
|
499
|
+
|
500
|
+
end
|
501
|
+
phase = 0
|
502
|
+
subgraphs.each do |subgraph|
|
503
|
+
builder.Phase(:number => phase+1) do
|
504
|
+
id1 = get_id
|
505
|
+
id2 = get_id
|
506
|
+
ctl = "function integer generate() {$out.0.all = \"FLOW=#{subgraph[:flow]}\";return OK;}"
|
507
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.data_generator2({:name => id1, :id => id1, :generate => ctl}))
|
508
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{id2}:0", :fromNode => "#{id1}:0", :metadata => "trash_metadata", :id => get_id()}))
|
509
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:name => "PARAMS CSV Writer", :id => "#{id2}", :fileURL => "params.txt", :outputFieldNames => "false", :quotedStrings => "false"}))
|
510
|
+
end
|
511
|
+
builder.Phase(:number => phase+2) do
|
512
|
+
|
513
|
+
id1 = get_id
|
514
|
+
id2 = get_id
|
515
|
+
ctl = "function integer generate() {$out.0.all = \"NAME=#{subgraph[:name]}\";return OK;}"
|
516
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.data_generator2({:name => id1, :id => id1, :generate => ctl}))
|
517
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{id2}:0", :fromNode => "#{id1}:0", :metadata => "trash_metadata", :id => get_id()}))
|
518
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:name => "PARAMS CSV Writer", :id => "#{id2}", :fileURL => "params.txt", :outputFieldNames => "false", :append => "true", :quotedStrings => "false"}))
|
519
|
+
end
|
520
|
+
|
521
|
+
builder.Phase(:number => phase+3) do
|
522
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.run_graph2({:guiName => subgraph[:name], :name => subgraph[:name], :id => subgraph[:flow], :graphName => subgraph[:file]}))
|
523
|
+
end
|
524
|
+
phase += 4
|
525
|
+
end
|
526
|
+
end
|
527
|
+
end
|
528
|
+
end
|
529
|
+
|
530
|
+
def self.create_incremental_downloader_run_graph(file, sources, options={})
|
531
|
+
# subgraphs = options[:subgraphs]
|
532
|
+
|
533
|
+
merged_sources = sources.reduce([]) do |memo, source|
|
534
|
+
merged_source = memo.find {|s| s[:object] == source[:object]}
|
535
|
+
if merged_source
|
536
|
+
merged_source[:fields] = (merged_source[:fields] + source[:fields]).uniq_by {|f| f[:name]}
|
537
|
+
else
|
538
|
+
memo.push(source)
|
539
|
+
end
|
540
|
+
memo
|
541
|
+
end
|
542
|
+
|
543
|
+
File.open(file, "w") do |file|
|
544
|
+
builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
|
545
|
+
builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
546
|
+
builder.Graph({
|
547
|
+
:name => "Run graph"
|
548
|
+
}) do
|
549
|
+
builder.Global do
|
550
|
+
property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
|
551
|
+
property_file(builder, {:id => "params_params", :fileURL => "params.txt"})
|
552
|
+
create_trash_meta(builder)
|
553
|
+
create_lookup_meta(builder)
|
554
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.lookup2({:name => "gdLookup0", :id => "gdLookup0", :type => GoodData::CloverGenerator::Nodes::GD_LOOKUP, :metadata => "lookup_metadata"}))
|
555
|
+
|
556
|
+
end
|
557
|
+
phase = 0
|
558
|
+
|
559
|
+
|
560
|
+
merged_sources.each do |source|
|
561
|
+
module_name = source[:object]
|
562
|
+
file = source[:id] || module_name
|
563
|
+
dataset = file || module_name
|
564
|
+
|
565
|
+
|
566
|
+
normalize_code = <<HEREDOC
|
567
|
+
boolean done = false;
|
568
|
+
function integer count() {
|
569
|
+
|
570
|
+
if (indexOf($in.0.key, "#{dataset}_LAST_RUN") != -1) {
|
571
|
+
return 4;
|
572
|
+
}
|
573
|
+
else {
|
574
|
+
return 0;
|
575
|
+
}
|
576
|
+
}
|
577
|
+
|
578
|
+
string last_run = null;
|
579
|
+
string end_date = null;
|
580
|
+
|
581
|
+
function integer transform(integer idx) {
|
582
|
+
if (last_run == null) {
|
583
|
+
last_run = $in.0.value;
|
584
|
+
}
|
585
|
+
if (end_date == null) {
|
586
|
+
end_date = jodaDate2str(today(), "yyyy-MM-dd'T'HH:mm:ss.SSSZZ", "en_US", 'UTC');
|
587
|
+
}
|
588
|
+
|
589
|
+
|
590
|
+
if (idx == 1) {
|
591
|
+
$out.0.all = "#{dataset}_TRUNCATE_DATE=" + jodaDate2str(jodaStr2date(last_run, ["yyyy-MM-dd'T'HH:mm:ss.SSSZZ"], 'en_US', 'UTC', 'UTC'), "yyyy-MM-dd HH:mm:ss", 'en_US', 'UTC');
|
592
|
+
} else if (idx == 2) {
|
593
|
+
$out.0.all = "#{dataset}_START=" + last_run;
|
594
|
+
} else if (idx == 3) {
|
595
|
+
$out.0.all = "#{dataset}_END=" + end_date;
|
596
|
+
} else {
|
597
|
+
$out.0.all = "#{dataset}_LAST_RUN=" + end_date;
|
598
|
+
}
|
599
|
+
|
600
|
+
return OK;
|
601
|
+
}
|
602
|
+
|
603
|
+
HEREDOC
|
604
|
+
|
605
|
+
|
606
|
+
|
607
|
+
|
608
|
+
builder.Phase(:number => phase += 1) do
|
609
|
+
generate_func = <<HEREDOC
|
610
|
+
function integer generate() {
|
611
|
+
$out.0.key = "#{dataset}_LAST_RUN";
|
612
|
+
$out.0.value = "1970-01-01T00:00:00.000+00:00";
|
613
|
+
return OK;
|
614
|
+
}
|
615
|
+
HEREDOC
|
616
|
+
|
617
|
+
join_func = <<HEREDOC
|
618
|
+
function integer transform() {
|
619
|
+
$out.0.key = nvl2($in.1.value, $in.1.key, $in.0.key);
|
620
|
+
$out.0.value = nvl2($in.1.value, $in.1.value, $in.0.value);
|
621
|
+
return OK;
|
622
|
+
}
|
623
|
+
HEREDOC
|
624
|
+
|
625
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.data_generator2({:name => "generator_#{dataset}", :id => "generator_#{dataset}", :generate => generate_func}))
|
626
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.lookup_reader_writer2({:lookupTable => "gdLookup0", :id => "gd_lookup_reader_#{dataset}" }))
|
627
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.hash_join2({:id => "join_#{dataset}", :joinType => "leftOuter", :joinKey => "$key=$key", :transformation => join_func}))
|
628
|
+
|
629
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "join_#{dataset}:0", :fromNode => "generator_#{dataset}:0", :metadata => "lookup_metadata", :id => get_id()}))
|
630
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "join_#{dataset}:1", :fromNode => "gd_lookup_reader_#{dataset}:0", :metadata => "lookup_metadata", :id => get_id()}))
|
631
|
+
|
632
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.normalizer2({:name => "normalizer_#{dataset}", :id => "normalizer_#{dataset}", :normalize => normalize_code }))
|
633
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "normalizer_#{dataset}:0", :fromNode => "join_#{dataset}:0", :metadata => "lookup_metadata", :id => get_id()}))
|
634
|
+
|
635
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:quotedStrings => "false", :name => "params_writer_#{dataset}", :id => "params_writer_#{dataset}", :fileURL => "params.txt", :outputFieldNames => "false", :append => "true"}))
|
636
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "params_writer_#{dataset}:0", :fromNode => "normalizer_#{dataset}:0", :metadata => "trash_metadata", :id => get_id()}))
|
637
|
+
end
|
638
|
+
end
|
639
|
+
builder.Phase(:number => phase += 1) do
|
640
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.run_graph2({:guiName => "incremental", :name => "incremental_downloaders", :id => "downlaoders", :graphName => "graphs/incremental.grf"}))
|
641
|
+
end
|
642
|
+
|
643
|
+
end
|
644
|
+
end
|
645
|
+
end
|
646
|
+
|
647
|
+
|
648
|
+
|
649
|
+
def self.build_attribute_df(dataset, attribute)
|
650
|
+
{
|
651
|
+
:gd_dataset_attribute_display_form => {
|
652
|
+
:id => "label.#{dataset[:gd_name] || dataset[:id]}.#{attribute[:name]}",
|
653
|
+
:title => "label",
|
654
|
+
:prettyId => "label_#{dataset[:gd_name] || dataset[:id]}_#{attribute[:name]}",
|
655
|
+
:referenceKey => true,
|
656
|
+
:assignedMetadataField => attribute[:meta] || attribute[:name]
|
657
|
+
}
|
658
|
+
}
|
659
|
+
end
|
660
|
+
|
661
|
+
def self.build_label_df(dataset, attribute, label)
|
662
|
+
{
|
663
|
+
:gd_dataset_attribute_display_form => {
|
664
|
+
:id => "label.#{dataset[:gd_name] || dataset[:id]}.#{attribute[:name]}.#{label[:name]}",
|
665
|
+
:title => "label",
|
666
|
+
:prettyId => "label_#{dataset[:gd_name] || dataset[:id]}_#{attribute[:name]}_#{label[:name]}",
|
667
|
+
:referenceKey => false,
|
668
|
+
:assignedMetadataField => label[:meta] || label[:name]
|
669
|
+
}
|
670
|
+
}
|
671
|
+
end
|
672
|
+
|
673
|
+
def self.build_gd_dataset_loader_json(dataset)
|
674
|
+
# binding.pry
|
675
|
+
{
|
676
|
+
:gd_dataset => {
|
677
|
+
:attributes => dataset[:fields].find_all {|d| d[:type] == "attribute" || d[:type] == "connection_point"}.map do |attribute|
|
678
|
+
{
|
679
|
+
:gd_dataset_attribute => {
|
680
|
+
:id => "attr.#{dataset[:gd_name] || dataset[:id]}.#{attribute[:name]}",
|
681
|
+
:title => "XXX",
|
682
|
+
:prettyId => "attr_#{dataset[:gd_name] || dataset[:id]}_#{attribute[:name]}",
|
683
|
+
:selectedDisplayForm => build_attribute_df(dataset, attribute),
|
684
|
+
:assignedMetadataField => attribute[:meta] || attribute[:name],
|
685
|
+
:displayForms => dataset[:fields].find_all {|f| f[:type] == "label" && f[:for] == attribute[:name]}.map do |label|
|
686
|
+
build_label_df(dataset, attribute, label)
|
687
|
+
end.concat([build_attribute_df(dataset, attribute)])
|
688
|
+
}
|
689
|
+
}
|
690
|
+
end,
|
691
|
+
:simpleFacts => dataset[:fields].find_all {|d| d[:type] == "fact"}.map do |fact|
|
692
|
+
{
|
693
|
+
:gd_dataset_fact => {
|
694
|
+
:id => "fact.#{dataset[:gd_name] || dataset[:id]}.#{fact[:name]}",
|
695
|
+
:title => "FFF",
|
696
|
+
:prettyId => "fact_#{dataset[:gd_name] || dataset[:id]}_#{fact[:name]}",
|
697
|
+
:type => "DECIMAL",
|
698
|
+
:assignedMetadataField => fact[:meta]
|
699
|
+
}
|
700
|
+
}
|
701
|
+
end,
|
702
|
+
:dateFacts => dataset[:fields].find_all {|d| d[:type] == "date"}.map do |date_attribute|
|
703
|
+
{
|
704
|
+
:gd_dataset_fact => {
|
705
|
+
:id => "dt.#{dataset[:gd_name] || dataset[:id]}.#{date_attribute[:fact] || date_attribute[:name]}",
|
706
|
+
:title => "TTT",
|
707
|
+
:prettyId => "dt_#{dataset[:gd_name] || dataset[:id]}_#{date_attribute[:fact] || date_attribute[:name]}",
|
708
|
+
:type => "DATE",
|
709
|
+
:assignedMetadataField => nil
|
710
|
+
}
|
711
|
+
}
|
712
|
+
end,
|
713
|
+
:dateAttributes => dataset[:fields].find_all {|d| d[:type] == "date"}.map do |date_attribute|
|
714
|
+
{
|
715
|
+
:gd_dataset_date_attribute => {
|
716
|
+
:id => "#{date_attribute[:dd]}.date",
|
717
|
+
:title => "DDD",
|
718
|
+
:prettyId => "#{date_attribute[:name]}",
|
719
|
+
:assignedMetadataField => date_attribute[:meta],
|
720
|
+
:associatedFact => {
|
721
|
+
:gd_dataset_fact => {
|
722
|
+
:id => "dt.#{dataset[:gd_name] || dataset[:id]}.#{date_attribute[:name]}",
|
723
|
+
:title => "TTT",
|
724
|
+
:prettyId => "dt_#{dataset[:gd_name] || dataset[:id]}_#{date_attribute[:name]}",
|
725
|
+
:type => "DATE",
|
726
|
+
:assignedMetadataField => nil
|
727
|
+
}
|
728
|
+
},
|
729
|
+
:displayForms => [
|
730
|
+
{
|
731
|
+
:gd_dataset_attribute_display_form => {
|
732
|
+
:id => "#{date_attribute[:dd]}.date.yyyymmdd",
|
733
|
+
:title => "yyyy-mm-dd (#{date_attribute[:dd]})",
|
734
|
+
:prettyId => "#{date_attribute[:dd]}_date_yyyymmdd",
|
735
|
+
:referenceKey => true,
|
736
|
+
:assignedMetadataField => nil
|
737
|
+
}
|
738
|
+
}
|
739
|
+
],
|
740
|
+
:selectedDisplayForm => {
|
741
|
+
:gd_dataset_attribute_display_form => {
|
742
|
+
:id => "#{date_attribute[:dd]}.date.yyyymmdd",
|
743
|
+
:title => "yyyy-mm-dd (#{date_attribute[:dd]})",
|
744
|
+
:prettyId => "#{date_attribute[:dd]}_date_yyyymmdd",
|
745
|
+
:referenceKey => true,
|
746
|
+
:assignedMetadataField => nil
|
747
|
+
}
|
748
|
+
}
|
749
|
+
}
|
750
|
+
}
|
751
|
+
end,
|
752
|
+
:referencedAttributes => dataset[:fields].find_all {|d| d[:type] == "reference"}.map do |ref_attribute|
|
753
|
+
{
|
754
|
+
:gd_dataset_attribute => {
|
755
|
+
:id => "attr.#{ref_attribute[:schema]}.#{ref_attribute[:ref]}",
|
756
|
+
:title => "attr.#{ref_attribute[:schema]}.#{ref_attribute[:ref]}",
|
757
|
+
:prettyId => "attr_#{ref_attribute[:schema]}_#{ref_attribute[:ref]}",
|
758
|
+
:displayForms => [],
|
759
|
+
:selectedDisplayForm => {
|
760
|
+
:gd_dataset_attribute_display_form => {
|
761
|
+
:id => "label.#{ref_attribute[:schema]}.#{ref_attribute[:ref]}",
|
762
|
+
:title => "attr.#{ref_attribute[:schema]}.#{ref_attribute[:ref]}",
|
763
|
+
:prettyId => "attr_#{ref_attribute[:schema]}_#{ref_attribute[:ref]}",
|
764
|
+
:referenceKey => false,
|
765
|
+
:assignedMetadataField => nil
|
766
|
+
}
|
767
|
+
},
|
768
|
+
:assignedMetadataField => ref_attribute[:meta]
|
769
|
+
}
|
770
|
+
}
|
771
|
+
end,
|
772
|
+
:fieldsWithMetadataConflict => [],
|
773
|
+
:entitiesWithoutMetadata => [],
|
774
|
+
:entitiesWithoutValue => [],
|
775
|
+
:datesWithFactConflict => [],
|
776
|
+
:unassigneddateFacts => [],
|
777
|
+
:datasetTitle => dataset[:gd_name] || dataset[:id],
|
778
|
+
:datasetId => "dataset.#{dataset[:gd_name] || dataset[:id]}"
|
779
|
+
}
|
780
|
+
}
|
781
|
+
end
|
782
|
+
|
783
|
+
def self.create_es_write_json(spec)
|
784
|
+
{
|
785
|
+
:entityName => spec[:id] || spec[:object],
|
786
|
+
:fieldsMapping => spec[:fields].reduce({}) do |memo, f|
|
787
|
+
|
788
|
+
if f.has_key?(:acts_as)
|
789
|
+
|
790
|
+
f[:acts_as].each do |a|
|
791
|
+
type = case a
|
792
|
+
when "Id"
|
793
|
+
"recordid"
|
794
|
+
when "timestamp"
|
795
|
+
"timestamp"
|
796
|
+
else
|
797
|
+
f[:type] || "attribute"
|
798
|
+
end
|
799
|
+
memo[a] = {
|
800
|
+
:name => a,
|
801
|
+
:type => type
|
802
|
+
}
|
803
|
+
end
|
804
|
+
else
|
805
|
+
type = case f[:name]
|
806
|
+
when "Id"
|
807
|
+
"recordid"
|
808
|
+
when "timestamp"
|
809
|
+
"timestamp"
|
810
|
+
else
|
811
|
+
f[:type] || "attribute"
|
812
|
+
end
|
813
|
+
|
814
|
+
memo[f[:name]] = {
|
815
|
+
:name => f[:name],
|
816
|
+
:type => (f[:name] == "Id" ? "recordid" : f[:type] || "attribute")
|
817
|
+
}
|
818
|
+
end
|
819
|
+
memo
|
820
|
+
end
|
821
|
+
|
822
|
+
|
823
|
+
}
|
824
|
+
end
|
825
|
+
|
826
|
+
|
827
|
+
def self.create_moving_graph(file, options={})
|
828
|
+
source = options[:source]
|
829
|
+
target = options[:target]
|
830
|
+
operation = options[:operation]
|
831
|
+
force = options[:force] || false
|
832
|
+
|
833
|
+
File.open(file, "w") do |file|
|
834
|
+
builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
|
835
|
+
builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
836
|
+
builder.Graph({
|
837
|
+
:name => "File Copy"
|
838
|
+
}) do
|
839
|
+
builder.Global do
|
840
|
+
builder.Metadata({:id => "list_metadata"}) do |builder|
|
841
|
+
csv_metadata(builder, {
|
842
|
+
:name => "list_metadata",
|
843
|
+
:fields => [{:name=>"filePath", :type=>"string"}]
|
844
|
+
})
|
845
|
+
end
|
846
|
+
property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
|
847
|
+
end
|
848
|
+
builder.Phase(:number => 0) do
|
849
|
+
|
850
|
+
transformation_source = "function integer transform() {\n" + ([["filePath", "filePath"]].map {|t| "$out.0.#{t.last} = $in.0.#{t.first};"}.join("\n")) + "\nreturn OK;\n}"
|
851
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.file_list2(:baseURL => target, :id => "file_list", :transformation => transformation_source))
|
852
|
+
|
853
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.file_delete2(:baseURL => "${filePath}", :id => "file_delete"))
|
854
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "file_delete:0", :fromNode => "file_list:0", :metadata => "list_metadata", :id => get_id()}))
|
855
|
+
end
|
856
|
+
builder.Phase(:number => 1) do
|
857
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.file_copy2({:sourcePath => source, :targetPath => target, :operation => operation, :id => "file_copy"}))
|
858
|
+
end
|
859
|
+
end
|
860
|
+
end
|
861
|
+
end
|
862
|
+
|
863
|
+
def self.create_uploading_graph(file, options={})
|
864
|
+
metadata = options[:metadata]
|
865
|
+
dataset_infos = [options[:datasets]]
|
866
|
+
input_file = options[:input_file]
|
867
|
+
|
868
|
+
File.open(file, "w") do |file|
|
869
|
+
builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
|
870
|
+
builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
871
|
+
builder.Graph({
|
872
|
+
:name => "Goodsales Downloader"
|
873
|
+
}) do
|
874
|
+
builder.Global do
|
875
|
+
property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
|
876
|
+
dataset_infos.each do |dataset_info|
|
877
|
+
dataset = dataset_info[:id]
|
878
|
+
builder.Metadata({:id => "#{dataset}_load"}) do |builder|
|
879
|
+
csv_metadata(builder, metadata)
|
880
|
+
end
|
881
|
+
end
|
882
|
+
end
|
883
|
+
builder.Phase(:number => 0) do
|
884
|
+
dataset_infos.each do |dataset_info|
|
885
|
+
dataset = dataset_info[:id]
|
886
|
+
gd_dataset = dataset_info[:gd_name] || dataset_info[:id]
|
887
|
+
to_svinstvo = build_gd_dataset_loader_json(dataset_info)
|
888
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.reader2({:name => "#{dataset} CSV Loader", :id => "#{dataset}_loader", :fileURL => "${PROJECT}/data/#{dataset}.csv"}))
|
889
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{dataset}_load:0", :fromNode => "#{dataset}_loader:0", :metadata => "#{dataset}_load", :id => get_id()}))
|
890
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.gd_loader2({:name => "#{dataset} Loader", :id => "#{dataset}_load", :dataset => "dataset.#{gd_dataset}", :datasetFieldMappings => to_svinstvo.to_json}))
|
891
|
+
end
|
892
|
+
end
|
893
|
+
end
|
894
|
+
end
|
895
|
+
end
|
896
|
+
|
897
|
+
def self.create_es_uploading_graph(file, options={})
|
898
|
+
metadata = options[:metadata]
|
899
|
+
dataset_infos = [options[:datasets]]
|
900
|
+
input_file = options[:input_file]
|
901
|
+
|
902
|
+
File.open(file, "w") do |file|
|
903
|
+
builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
|
904
|
+
builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
905
|
+
builder.Graph({
|
906
|
+
:name => "Goodsales Downloader"
|
907
|
+
}) do
|
908
|
+
builder.Global do
|
909
|
+
property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
|
910
|
+
dataset_infos.each do |dataset_info|
|
911
|
+
dataset = dataset_info[:id]
|
912
|
+
builder.Metadata({:id => "#{dataset}_load"}) do |builder|
|
913
|
+
csv_metadata(builder, metadata)
|
914
|
+
end
|
915
|
+
end
|
916
|
+
end
|
917
|
+
builder.Phase(:number => 0) do
|
918
|
+
dataset_infos.each do |dataset_info|
|
919
|
+
dataset = dataset_info[:id]
|
920
|
+
gd_dataset = dataset_info[:gd_name] || dataset_info[:id]
|
921
|
+
to_svinstvo = build_gd_dataset_loader_json(dataset_info)
|
922
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.reader2({:name => "#{dataset} CSV Loader", :id => "#{dataset}_loader", :fileURL => "${PROJECT}/data/#{dataset}.csv"}))
|
923
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{dataset}_load:0", :fromNode => "#{dataset}_loader:0", :metadata => "#{dataset}_load", :id => get_id()}))
|
924
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.gd_loader2({:name => "#{dataset} Loader", :id => "#{dataset}_load", :dataset => "dataset.#{gd_dataset}", :datasetFieldMappings => to_svinstvo.to_json}))
|
925
|
+
end
|
926
|
+
end
|
927
|
+
end
|
928
|
+
end
|
929
|
+
end
|
930
|
+
|
931
|
+
def self.create_metadata(mod)
|
932
|
+
module_name = mod[:object]
|
933
|
+
file = mod["file"] || module_name
|
934
|
+
# dataset = mod["dataset"] || file || module_name
|
935
|
+
{
|
936
|
+
:name => file,
|
937
|
+
:fields => translate(clover_metadata_for_module(mod))
|
938
|
+
}
|
939
|
+
end
|
940
|
+
|
941
|
+
|
942
|
+
def self.create_sf_downloading_graph(file, sources, options={})
|
943
|
+
metadata = options[:metadata]
|
944
|
+
s3_backup = true && options[:s3_backup]
|
945
|
+
|
946
|
+
File.open(file, "w") do |file|
|
947
|
+
builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
|
948
|
+
builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
949
|
+
builder.Graph({
|
950
|
+
:name => "Goodsales Downloader"
|
951
|
+
}) do
|
952
|
+
builder.Global do
|
953
|
+
sources.each do |mod|
|
954
|
+
module_name = mod[:object]
|
955
|
+
file = mod[:id] || module_name
|
956
|
+
dataset = file || module_name
|
957
|
+
|
958
|
+
builder.Metadata({:id => "#{file}_sf_metadata"}) do |builder|
|
959
|
+
csv_metadata(builder, {
|
960
|
+
:name => "#{file}_sf_metadata",
|
961
|
+
:fields => translate(sf_metadata_for_module(mod))
|
962
|
+
})
|
963
|
+
end
|
964
|
+
builder.Metadata({:id => "#{file}_clover_metadata"}) do |builder|
|
965
|
+
csv_metadata(builder, {
|
966
|
+
:name => "#{file}_clover_metadata",
|
967
|
+
:fields => translate(clover_metadata_for_module(mod))
|
968
|
+
})
|
969
|
+
end
|
970
|
+
end
|
971
|
+
|
972
|
+
sf_connection(builder, {})
|
973
|
+
property(builder, {:id => "SFDC_CLIENT_ID", :value => "gooddata/gooddata/"})
|
974
|
+
property(builder, {:id => "SFDC_LOGIN_HOSTNAME", :value => options[:sf_server] || "login.salesforce.com"})
|
975
|
+
property(builder, {:id => "SFDC_NAME", :value => "Salesforce connection"})
|
976
|
+
property(builder, {:id => "SFDC_PASSWORD", :value => options[:password]})
|
977
|
+
property(builder, {:id => "SFDC_TOKEN", :value => options[:token]})
|
978
|
+
property(builder, {:id => "SFDC_USERNAME", :value => options[:login]})
|
979
|
+
property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
|
980
|
+
end
|
981
|
+
|
982
|
+
builder.Phase(:number => 0) do
|
983
|
+
sources.each do |mod|
|
984
|
+
module_name = mod[:object]
|
985
|
+
file = mod[:id] || module_name
|
986
|
+
dataset = file || module_name
|
987
|
+
|
988
|
+
fields = mod[:fields]
|
989
|
+
mapping = "{\"xmlFieldsMapping\":{\"xmlFields\":["
|
990
|
+
add = fields.map do |f|
|
991
|
+
"{\"xmlFieldMapping\":{\"name\":\"#{f[:name]}\",\"label\":\"#{f[:label]}\",\"xmlPath\":\"#{f[:name]}\",\"metadataField\":\"#{f[:name]}\"}}"
|
992
|
+
end
|
993
|
+
|
994
|
+
stuff = mapping + add.join(",") + "]}}"
|
995
|
+
|
996
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.sfdc_reader2({:name => "#{file} SF Writer", :id => "#{file}_sf", :soql => generate_select(mod), :sfdcConnection => "SFDC", :fieldsMapping => stuff}))
|
997
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_reformat:0", :fromNode => "#{file}_sf:0", :metadata => "#{file}_sf_metadata", :id => get_id()}))
|
998
|
+
|
999
|
+
transformation_source = "function integer transform() {\n" + (transformation_acts_as(mod).map {|t| "$out.0.#{t.last} = $in.0.#{t.first};"}.join("\n")) + "\nreturn OK;\n}"
|
1000
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.reformat2({:name => "#{file} Reformat", :id => "#{file}_reformat", :transformation => transformation_source}))
|
1001
|
+
|
1002
|
+
|
1003
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.copy2({:name => "#{file} copy", :id => "#{file}_copy"}))
|
1004
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_copy:0", :fromNode => "#{file}_reformat:0", :metadata => "#{file}_clover_metadata", :id => get_id()}))
|
1005
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:name => "#{file} CSV Writer", :id => "#{file}_csv", :fileURL => "data/#{dataset.downcase}.csv", :outputFieldNames => "true"}))
|
1006
|
+
if s3_backup then build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:name => "#{file} s3 Writer", :id => "#{file}_s3", :fileURL => "bucket", :outputFieldNames => true, :quotedStrings => false})) end
|
1007
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_csv:0", :fromNode => "#{file}_copy:0", :metadata => "#{file}_clover_metadata", :id => get_id()}))
|
1008
|
+
if s3_backup then build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_s3:0", :fromNode => "#{file}_copy:1", :metadata => "#{file}_clover_metadata", :id => get_id()})) end
|
1009
|
+
end
|
1010
|
+
end
|
1011
|
+
end
|
1012
|
+
end
|
1013
|
+
end
|
1014
|
+
|
1015
|
+
def self.create_es_downloading_graph(file, sources, options={})
|
1016
|
+
metadata = options[:metadata]
|
1017
|
+
s3_backup = true && options[:s3_backup]
|
1018
|
+
|
1019
|
+
File.open(file, "w") do |file|
|
1020
|
+
builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
|
1021
|
+
builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
1022
|
+
builder.Graph({
|
1023
|
+
:name => "Goodsales Downloader"
|
1024
|
+
}) do
|
1025
|
+
builder.Global do
|
1026
|
+
sources.each do |mod|
|
1027
|
+
module_name = mod[:object]
|
1028
|
+
file = mod[:id] || module_name
|
1029
|
+
dataset = file || module_name
|
1030
|
+
|
1031
|
+
es_metadata = GoodData::CloverGenerator::DSL::Metadata.new({
|
1032
|
+
:name => "#{file}_es_metadata",
|
1033
|
+
:fields => clover_metadata_for_module(mod)
|
1034
|
+
})
|
1035
|
+
|
1036
|
+
builder.Metadata({:id => "#{file}_es_metadata"}) do |builder|
|
1037
|
+
csv_metadata(builder, es_metadata.change do |m|
|
1038
|
+
m.remove("timestamp")
|
1039
|
+
end.to_hash)
|
1040
|
+
end
|
1041
|
+
end
|
1042
|
+
|
1043
|
+
property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
|
1044
|
+
end
|
1045
|
+
|
1046
|
+
builder.Phase(:number => 0) do
|
1047
|
+
sources.each do |mod|
|
1048
|
+
module_name = mod[:object]
|
1049
|
+
file = mod[:id] || module_name
|
1050
|
+
dataset = file || module_name
|
1051
|
+
|
1052
|
+
es_metadata = GoodData::CloverGenerator::DSL::Metadata.new({
|
1053
|
+
:name => "#{file}_es_metadata",
|
1054
|
+
:fields => clover_metadata_for_module(mod)
|
1055
|
+
})
|
1056
|
+
es_metadata = es_metadata.change do |m|
|
1057
|
+
m.remove("timestamp")
|
1058
|
+
end.to_hash
|
1059
|
+
|
1060
|
+
fields = es_metadata[:fields]
|
1061
|
+
|
1062
|
+
e = Es::Entity.new("x", {
|
1063
|
+
:file => "none",
|
1064
|
+
:fields => fields.map do |f|
|
1065
|
+
name = f[:name]
|
1066
|
+
if name == "Id"
|
1067
|
+
Es::Field.new('Id', 'recordid')
|
1068
|
+
else
|
1069
|
+
Es::Field.new(name, 'attribute')
|
1070
|
+
end
|
1071
|
+
end,
|
1072
|
+
:timeframe => Es::Timeframe::parse("latest")
|
1073
|
+
})
|
1074
|
+
|
1075
|
+
|
1076
|
+
stuff = {
|
1077
|
+
:entityName => dataset,
|
1078
|
+
:fieldsMapping => fields.inject({}) do |memo, field|
|
1079
|
+
name = field[:name]
|
1080
|
+
memo[name] = name
|
1081
|
+
memo
|
1082
|
+
end,
|
1083
|
+
:eventStoreFieldToTypeMapping => fields.inject({}) do |memo, field|
|
1084
|
+
name = field[:name]
|
1085
|
+
if name == "Id"
|
1086
|
+
memo[name] = "recordid"
|
1087
|
+
else
|
1088
|
+
memo[name] = "attribute"
|
1089
|
+
end
|
1090
|
+
|
1091
|
+
memo
|
1092
|
+
end,
|
1093
|
+
:outputMetadataName => "#{file}_es_metadata"
|
1094
|
+
}
|
1095
|
+
|
1096
|
+
readmap = {
|
1097
|
+
:columns => e.to_extract_fragment('pid')["readTask"]["readMap"].first[:columns],
|
1098
|
+
:populates => e.to_extract_fragment('pid')["readTask"]["readMap"].first[:populates]
|
1099
|
+
}
|
1100
|
+
|
1101
|
+
# binding.pry
|
1102
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.es_reader2({:name => "#{file} ES Reader", :id => "#{file}_es", :entityFieldsMapping => stuff.to_json , :readMap => readmap.to_json}))
|
1103
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.copy2({:name => "#{file} copy", :id => "#{file}_copy"}))
|
1104
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_copy:0", :fromNode => "#{file}_es:0", :metadata => "#{file}_es_metadata", :id => get_id()}))
|
1105
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:name => "#{file} CSV Writer", :id => "#{file}_csv", :fileURL => "${PROJECT}/data/#{dataset.downcase}.csv", :outputFieldNames => "true", :makeDirs => "true"}))
|
1106
|
+
if s3_backup then build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:name => "#{file} s3 Writer", :id => "#{file}_s3", :fileURL => "bucket", :outputFieldNames => true, :quotedStrings => false})) end
|
1107
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_csv:0", :fromNode => "#{file}_copy:0", :metadata => "#{file}_es_metadata", :id => get_id()}))
|
1108
|
+
if s3_backup then build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_s3:0", :fromNode => "#{file}_copy:1", :metadata => "#{file}_es_metadata", :id => get_id()})) end
|
1109
|
+
end
|
1110
|
+
end
|
1111
|
+
end
|
1112
|
+
end
|
1113
|
+
end
|
1114
|
+
|
1115
|
+
def self.create_incremental_downloading_graph(file, sources, options={})
|
1116
|
+
metadata = options[:metadata]
|
1117
|
+
store = options[:store] || "${GDC_EVENTSTORE}"
|
1118
|
+
|
1119
|
+
|
1120
|
+
merged_sources = sources.reduce([]) do |memo, source|
|
1121
|
+
merged_source = memo.find {|s| s[:object] == source[:object]}
|
1122
|
+
if merged_source
|
1123
|
+
merged_source[:fields] = (merged_source[:fields] + source[:fields]).uniq_by {|f| f[:name]}
|
1124
|
+
else
|
1125
|
+
memo.push(source)
|
1126
|
+
end
|
1127
|
+
memo
|
1128
|
+
end
|
1129
|
+
|
1130
|
+
File.open(file, "w") do |file|
|
1131
|
+
builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
|
1132
|
+
builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
|
1133
|
+
builder.Graph({:name => "Goodsales incremental Downloader"}) do
|
1134
|
+
builder.Global do
|
1135
|
+
property_file(builder, {:id => "params_params", :fileURL => "params.txt"})
|
1136
|
+
property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
|
1137
|
+
|
1138
|
+
create_lookup_meta(builder)
|
1139
|
+
merged_sources.each do |mod|
|
1140
|
+
module_name = mod[:object]
|
1141
|
+
file = mod[:id] || module_name
|
1142
|
+
|
1143
|
+
sf_metadata = GoodData::CloverGenerator::DSL::Metadata.new({
|
1144
|
+
:name => "#{file}_sf_metadata",
|
1145
|
+
:fields => sf_metadata_for_module(mod)
|
1146
|
+
})
|
1147
|
+
|
1148
|
+
clover_metadata = GoodData::CloverGenerator::DSL::Metadata.new({
|
1149
|
+
:name => "#{file}_clover_metadata",
|
1150
|
+
:fields => clover_metadata_for_module(mod)
|
1151
|
+
})
|
1152
|
+
|
1153
|
+
|
1154
|
+
builder.Metadata({:id => "#{file}_sf_metadata"}) do |builder|
|
1155
|
+
csv_metadata(builder, sf_metadata.to_hash)
|
1156
|
+
end
|
1157
|
+
|
1158
|
+
builder.Metadata({:id => "#{file}_clover_metadata"}) do |builder|
|
1159
|
+
csv_metadata(builder, clover_metadata.to_hash)
|
1160
|
+
end
|
1161
|
+
|
1162
|
+
begin
|
1163
|
+
builder.Metadata({:id => "#{file}_es_metadata"}) do |builder|
|
1164
|
+
csv_metadata(builder, clover_metadata.change do |m|
|
1165
|
+
m.remove("timestamp")
|
1166
|
+
m.add(:name => "timestamp", :type => "date")
|
1167
|
+
end.to_hash)
|
1168
|
+
end
|
1169
|
+
rescue GoodData::CloverGenerator::DSL::RemoveMetadataFieldError => e
|
1170
|
+
exit_now!("Removing field \"#{e.field}\" failed from metadata \"#{e.metadata.name}\" for source \"#{file}\".")
|
1171
|
+
end
|
1172
|
+
# build_node2(builder, GoodData::CloverGenerator::Nodes.lookup2({:lookupTable => "params_lookup", :fileURL => "${PROJECT}/params.txt", :id => "params_lookup_id", :key => "key", :metadata => "lookup_metadata", :name => "params_lookup"}))
|
1173
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.lookup2({:name => "gdLookup0", :id => "gdLookup0", :type => GoodData::CloverGenerator::Nodes::GD_LOOKUP, :metadata => "lookup_metadata"}))
|
1174
|
+
|
1175
|
+
end
|
1176
|
+
|
1177
|
+
sf_connection(builder, {})
|
1178
|
+
property(builder, {:id => "SFDC_CLIENT_ID", :value => "gooddata/gooddata/"})
|
1179
|
+
property(builder, {:id => "SFDC_LOGIN_HOSTNAME", :value => options[:sf_server] || "login.salesforce.com"})
|
1180
|
+
property(builder, {:id => "SFDC_NAME", :value => "Salesforce connection"})
|
1181
|
+
property(builder, {:id => "SFDC_PASSWORD", :value => options[:password]})
|
1182
|
+
property(builder, {:id => "SFDC_TOKEN", :value => options[:token]})
|
1183
|
+
property(builder, {:id => "SFDC_USERNAME", :value => options[:login]})
|
1184
|
+
property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
|
1185
|
+
end
|
1186
|
+
|
1187
|
+
|
1188
|
+
|
1189
|
+
|
1190
|
+
phase = 1
|
1191
|
+
|
1192
|
+
merged_sources.each do |mod|
|
1193
|
+
|
1194
|
+
module_name = mod[:object]
|
1195
|
+
file = mod[:id] || module_name
|
1196
|
+
dataset = file || module_name
|
1197
|
+
s3_backup = true
|
1198
|
+
|
1199
|
+
|
1200
|
+
builder.Phase(:number => phase += 1) do
|
1201
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.es_truncate2({:guiName => dataset, :store => store, :entity => dataset, :timestamp => "${#{dataset}_TRUNCATE_DATE}", :name => "#{module_name} es truncate", :id => "#{module_name}_es_truncate"}))
|
1202
|
+
end
|
1203
|
+
|
1204
|
+
builder.Phase(:number => phase += 1) do
|
1205
|
+
|
1206
|
+
|
1207
|
+
fields = mod[:fields]
|
1208
|
+
mapping = "{\"xmlFieldsMapping\":{\"xmlFields\":["
|
1209
|
+
add = fields.map do |f|
|
1210
|
+
"{\"xmlFieldMapping\":{\"name\":\"#{f[:name]}\",\"label\":\"#{f[:label]}\",\"xmlPath\":\"#{f[:name]}\",\"metadataField\":\"#{f[:name]}\"}}"
|
1211
|
+
end
|
1212
|
+
|
1213
|
+
stuff = mapping + add.join(",") + "]}}"
|
1214
|
+
|
1215
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.sfdc_reader2({:name => "#{file} SF Writer", :id => "#{file}_sf", :soql => generate_incremental_select(mod), :sfdcConnection => "SFDC", :fieldsMapping => stuff, :mandatoryFields => fields.map {|f| f[:name] + ";"}.join}))
|
1216
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_reformat:0", :fromNode => "#{file}_sf:0", :metadata => "#{file}_sf_metadata", :id => get_id()}))
|
1217
|
+
|
1218
|
+
transformation_source = "function integer transform() {\n" + (transformation_acts_as(mod).map {|t| "$out.0.#{t.last} = $in.0.#{t.first};"}.join("\n")) + "\nreturn OK;\n}"
|
1219
|
+
es_transformation_source = "function integer transform() {\n$out.0.* = $in.0.*;\n$out.0.timestamp = str2date($in.0.timestamp,\"joda:yyyy-MM-dd'T'HH:mm:ss.SSSZZ\");;\nreturn OK;\n}"
|
1220
|
+
|
1221
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.reformat2({:name => "#{file} Reformat", :id => "#{file}_reformat", :transformation => transformation_source}))
|
1222
|
+
|
1223
|
+
|
1224
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.copy2({:name => "#{file} copy", :id => "#{file}_copy"}))
|
1225
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_copy:0", :fromNode => "#{file}_reformat:0", :metadata => "#{file}_clover_metadata", :id => get_id()}))
|
1226
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:enabled => "disabled", :name => "#{file} CSV Writer", :id => "#{file}_csv", :fileURL => "data/#{dataset.downcase}.csv", :outputFieldNames => "true"}))
|
1227
|
+
|
1228
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.reformat2({:name => "#{file} Reformat", :id => "#{file}_es_reformat", :transformation => es_transformation_source}))
|
1229
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_es_reformat:0", :fromNode => "#{file}_copy:1", :metadata => "#{file}_clover_metadata", :id => get_id()}))
|
1230
|
+
|
1231
|
+
if s3_backup then
|
1232
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:enabled => "disabled", :name => "#{file} s3 Writer", :id => "#{file}_s3", :fileURL => "https://${S3_ACCESS_KEY_ID}:\`replace(\"${S3_SECRET_ACCESS_KEY}\",\"/\",\"%2F\")\`@${S3_BUCKETNAME}.s3.amazonaws.com/${GDC_PROJECT_ID}/#{file}", :outputFieldNames => true, :quotedStrings => false}))
|
1233
|
+
end
|
1234
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_csv:0", :fromNode => "#{file}_copy:0", :metadata => "#{file}_clover_metadata", :id => get_id()}))
|
1235
|
+
if s3_backup then
|
1236
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_s3:0", :fromNode => "#{file}_copy:2", :metadata => "#{file}_clover_metadata", :id => get_id()}))
|
1237
|
+
end
|
1238
|
+
|
1239
|
+
|
1240
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.sort2({:sortKey => "timestamp(a)",:name => "#{file} es Sort", :id => "#{file}_es_sort"}))
|
1241
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_es_sort:0", :fromNode => "#{file}_es_reformat:0", :metadata => "#{file}_es_metadata", :id => get_id()}))
|
1242
|
+
|
1243
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.es_writer2({:name => "#{file} es Writer", :id => "#{file}_es", :store => store, :entityFieldsMapping => create_es_write_json(mod).to_json}))
|
1244
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_es:0", :fromNode => "#{file}_es_sort:0", :metadata => "#{file}_es_metadata", :id => get_id()}))
|
1245
|
+
|
1246
|
+
|
1247
|
+
end
|
1248
|
+
|
1249
|
+
builder.Phase(:number => phase += 1) do
|
1250
|
+
generate_func = <<HEREDOC
|
1251
|
+
function integer generate() {
|
1252
|
+
date time_end = jodaStr2date("${#{dataset}_END}",["yyyy-MM-dd'T'HH:mm:ss.SSSZZ"], 'en_US', 'UTC', 'UTC');
|
1253
|
+
$out.0.key = "#{dataset}_LAST_RUN";
|
1254
|
+
$out.0.value = jodaDate2str(time_end,"yyyy-MM-dd'T'HH:mm:ss.SSSZZ", 'en_US', 'UTC');
|
1255
|
+
return OK;
|
1256
|
+
}
|
1257
|
+
HEREDOC
|
1258
|
+
|
1259
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.data_generator2({:guiName => dataset, :name => "generator_#{phase}", :id => "generator_#{phase}", :generate => generate_func}))
|
1260
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.lookup_reader_writer2({:guiName => dataset, :lookupTable => "gdLookup0", :id => "gd_lookup_reader_#{phase}" }))
|
1261
|
+
build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "gd_lookup_reader_#{phase}:0", :fromNode => "generator_#{phase}:0", :metadata => "lookup_metadata", :id => get_id()}))
|
1262
|
+
|
1263
|
+
|
1264
|
+
end
|
1265
|
+
|
1266
|
+
end
|
1267
|
+
|
1268
|
+
|
1269
|
+
|
1270
|
+
# builder.Phase(:number => phase += 1) do
|
1271
|
+
# build_node2(builder, GoodData::CloverGenerator::Nodes.data_generator2({:guiName => "generator1", :name => "generator1", :id => "generator1", :generate => generate_func}))
|
1272
|
+
# build_node2(builder, GoodData::CloverGenerator::Nodes.lookup_reader_writer2({:lookupTable => "gdLookup0", :id => "gd_lookup_reader", :graphName => "incremental.grf"}))
|
1273
|
+
# build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "gd_lookup_reader:0", :fromNode => "generator1:0", :metadata => "lookup_metadata", :id => get_id()}))
|
1274
|
+
|
1275
|
+
# end
|
1276
|
+
|
1277
|
+
|
1278
|
+
end
|
1279
|
+
end
|
1280
|
+
end
|
1281
|
+
|
1282
|
+
end
|
1283
|
+
end
|