gd_bam 0.0.15 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. data/README.md +313 -5
  2. data/bin/bam +126 -48
  3. data/lib/bam/version.rb +1 -1
  4. data/lib/bam.rb +51 -0
  5. data/lib/base/errors.rb +15 -0
  6. data/lib/base/flow.rb +37 -0
  7. data/lib/base/graph.rb +23 -0
  8. data/lib/base/metadata.rb +107 -0
  9. data/lib/base/project.rb +95 -0
  10. data/lib/base/repo.rb +35 -0
  11. data/lib/base/sink.rb +44 -0
  12. data/lib/base/step.rb +47 -0
  13. data/lib/base/tap.rb +167 -0
  14. data/lib/base/taps.rb +19 -0
  15. data/lib/cloud_connect/dsl/cc.rb +42 -0
  16. data/lib/cloud_connect/dsl/es_helpers.rb +49 -0
  17. data/lib/cloud_connect/dsl/helpers.rb +199 -0
  18. data/lib/{nodes → cloud_connect/dsl}/nodes.rb +106 -16
  19. data/lib/cloud_connect/dsl/sf_helpers.rb +39 -0
  20. data/lib/cloud_connect/dsl/structure_helpers.rb +94 -0
  21. data/lib/commands/commands.rb +110 -0
  22. data/lib/commands/deployment.rb +217 -0
  23. data/lib/commands/docs_commands.rb +41 -0
  24. data/lib/commands/gd_commands.rb +95 -0
  25. data/lib/commands/scaffold_commands.rb +103 -0
  26. data/lib/commands/sf_commands.rb +37 -0
  27. data/lib/commands/validators.rb +19 -0
  28. data/lib/compatibility.rb +19 -0
  29. data/lib/compiler/compiler.rb +76 -0
  30. data/lib/compiler/etl_visitor.rb +165 -0
  31. data/lib/dsl/dsl.rb +125 -0
  32. data/lib/generators/downloaders.rb +449 -0
  33. data/lib/generators/etl.rb +261 -0
  34. data/lib/generators/validators.rb +445 -0
  35. data/lib/graphs/docentize.grf +1 -1
  36. data/lib/graphs/dummy.grf +1 -1
  37. data/lib/graphs/goodsales_v2/docentize.grf +47 -0
  38. data/lib/graphs/goodsales_v2/dummy.grf +46 -0
  39. data/lib/graphs/goodsales_v2/load_history.grf +579 -0
  40. data/lib/graphs/goodsales_v2/process_account.grf +47 -0
  41. data/lib/graphs/goodsales_v2/process_activity.grf +222 -0
  42. data/lib/graphs/goodsales_v2/process_activity_dim.grf +88 -0
  43. data/lib/graphs/goodsales_v2/process_activity_owner.grf +48 -0
  44. data/lib/graphs/goodsales_v2/process_forecast.grf +20 -0
  45. data/lib/graphs/goodsales_v2/process_opp_records.grf +84 -0
  46. data/lib/graphs/goodsales_v2/process_opportunity.grf +46 -0
  47. data/lib/graphs/goodsales_v2/process_opportunity_line_item.grf +171 -0
  48. data/lib/graphs/goodsales_v2/process_opportunity_snapshot.grf +94 -0
  49. data/lib/graphs/goodsales_v2/process_owner.grf +48 -0
  50. data/lib/graphs/goodsales_v2/process_stage.grf +51 -0
  51. data/lib/graphs/goodsales_v2/process_stage_history.grf +184 -0
  52. data/lib/graphs/goodsales_v2/process_velocity_duration.grf +140 -0
  53. data/lib/graphs/process_account.grf +1 -1
  54. data/lib/graphs/process_activity.grf +1 -1
  55. data/lib/graphs/process_activity_dim.grf +1 -1
  56. data/lib/graphs/process_activity_owner.grf +1 -1
  57. data/lib/graphs/process_forecast.grf +1 -1
  58. data/lib/graphs/process_opp_records.grf +1 -1
  59. data/lib/graphs/process_opportunity.grf +1 -1
  60. data/lib/graphs/process_opportunity_line_item.grf +1 -1
  61. data/lib/graphs/process_opportunity_snapshot.grf +1 -1
  62. data/lib/graphs/process_owner.grf +1 -1
  63. data/lib/graphs/process_stage.grf +1 -1
  64. data/lib/graphs/process_stage_history.grf +1 -1
  65. data/lib/graphs/process_velocity_duration.grf +1 -1
  66. data/lib/nodes/clover_gen.rb +59 -946
  67. data/lib/nodes/dependency.rb +95 -96
  68. data/lib/runtime.rb +7 -648
  69. data/lib/utils/utils.rb +66 -0
  70. data/templates/flow.rb.erb +7 -6
  71. data/templates/join_template.grf.erb +1 -1
  72. data/templates/reformat_template.grf.erb +1 -1
  73. data/templates/sink.json.erb +28 -0
  74. data/templates/tap.json.erb +3 -5
  75. data/templates/workspace.prm.erb +4 -0
  76. metadata +50 -8
  77. data/lib/contract_checkers/contract_checkers.rb +0 -53
  78. data/lib/dsl/project_dsl.rb +0 -259
  79. data/lib/repo/1_config.json +0 -8
  80. data/templates/dataset.json.erb +0 -13
  81. data/templates/source.json.erb +0 -22
@@ -1,27 +1,8 @@
1
- module Enumerable
2
- def uniq_by
3
- seen = Hash.new { |h,k| h[k] = true; false }
4
- reject { |v| seen[yield(v)] }
5
- end
6
- end
7
-
8
- class Hash
9
- #pass single or array of keys, which will be removed, returning the remaining hash
10
- def remove!(*keys)
11
- keys.each{|key| self.delete(key) }
12
- self
13
- end
14
-
15
- #non-destructive version
16
- def remove(*keys)
17
- self.dup.remove!(*keys)
18
- end
19
- end
20
-
21
-
22
1
  module GoodData
23
2
  module CloverGenerator
24
3
 
4
+ include GoodData::Bam
5
+
25
6
  ADD_NA_TRANSFORMATION = <<-trasnf
26
7
  function integer generate() {
27
8
  integer number_of_fields = length($out.0);
@@ -40,53 +21,6 @@ module GoodData
40
21
  }
41
22
  trasnf
42
23
 
43
- def self.parse_json(file)
44
- begin
45
- JSON.parse(File.read(file), :symbolize_names => true)
46
- rescue RuntimeError => e
47
- puts "Error parsing \"#{file}\": #{e.inspect}"
48
- end
49
- end
50
-
51
-
52
- def self.metadata(builder, description)
53
- builder.Record({
54
- :fieldDelimiter => description[:fieldDelimiter],
55
- :name => description[:name],
56
- :recordDelimiter => description[:recordDelimiter],
57
- :type => description[:type]
58
- }) do |record|
59
- description[:fields].each do |field|
60
- builder.Field :name => field[:name], :type => field[:type], :nullable => "true"
61
- end
62
- end
63
- end
64
-
65
- def self.csv_metadata(builder, description)
66
-
67
- sf_description = description.merge({
68
- :fieldDelimiter => ",",
69
- :recordDelimiter => "\\n",
70
- :type => "delimited",
71
- })
72
- metadata(builder, sf_description)
73
- end
74
-
75
-
76
- def self.sf_connection(builder, data)
77
- builder.Connection({
78
- :clientId => "${SFDC_CLIENT_ID}",
79
- :id => "SFDC",
80
- :loginHostname => "${SFDC_LOGIN_HOSTNAME}",
81
- :name => "${SFDC_NAME}",
82
- :password => "${SFDC_PASSWORD}",
83
- :passwordEncrypted => "true",
84
- :token => "${SFDC_TOKEN}",
85
- :type => "SFDC",
86
- :username => "${SFDC_USERNAME}",
87
- :passwordEncrypted => "false"
88
- })
89
- end
90
24
 
91
25
  def self.external_metadata_link(builder, data)
92
26
  builder.Metadata({
@@ -95,83 +29,6 @@ module GoodData
95
29
  })
96
30
  end
97
31
 
98
- def self.property(builder, data)
99
- builder.Property({
100
- :id => data[:id],
101
- :name => data[:name] || data[:id],
102
- :value => data[:value]
103
- })
104
- end
105
-
106
-
107
- def self.property_file(builder, data)
108
- builder.Property({
109
- :id => data[:id],
110
- :fileURL => data[:fileURL]
111
- })
112
- end
113
-
114
- def graph2(file, stuff)
115
- metadata = stuff[:metadata]
116
- nodes = stuff[:nodes]
117
- connections = stuff[:connections]
118
- File.open(file, "w") do |file|
119
- builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
120
- builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
121
- builder.Graph({
122
- :name => "Goodsales Downloader"
123
- }) do
124
- builder.Global do
125
- metadata.each do |m|
126
- build_metadata2(builder, m)
127
- end
128
- connections.each do |conn|
129
- build_node2(builder, conn)
130
- end
131
-
132
- end
133
- builder.Phase(:number => 0) do
134
- nodes.each do |node|
135
- build_node2(builder, node)
136
- end
137
- end
138
- end
139
- end
140
- end
141
-
142
- def self.build_node2(builder, node)
143
- if node[:type] == GoodData::CloverGenerator::Nodes::EDGE
144
- builder.tag!("Edge", node)
145
- elsif node[:type] == GoodData::CloverGenerator::Nodes::SF_CONNECTION
146
- builder.tag!("Connection", node)
147
- elsif node[:type] == GoodData::CloverGenerator::Nodes::FILE_LIST
148
- builder.tag!("Node", node.remove(:transformation)) do |xml|
149
- xml.attr({:name => "outputMapping"}) do |attr|
150
- transformation = node[:transformation]
151
- attr.cdata! transformation
152
- end
153
- end
154
- elsif node[:type] == GoodData::CloverGenerator::Nodes::REFORMAT || node[:type] == GoodData::CloverGenerator::Nodes::EXT_HASH_JOIN
155
- builder.tag!("Node", node.remove(:transformation)) do |xml|
156
- xml.attr({:name => "transform"}) do |attr|
157
- transformation = node[:transformation]
158
- attr.cdata! transformation
159
- end
160
- end
161
- elsif node[:type] == GoodData::CloverGenerator::Nodes::DATA_GENERATOR
162
- builder.tag!("Node", node.remove(:transformation)) do |xml|
163
- xml.attr({:name => "generate"}) do |attr|
164
- transformation = node[:generate]
165
- attr.cdata! transformation
166
- end
167
- end
168
- elsif node[:type] == GoodData::CloverGenerator::Nodes::PERSISTENT_LOOKUP || node[:type] == GoodData::CloverGenerator::Nodes::GD_LOOKUP
169
- builder.tag!("LookupTable", node)
170
- else
171
- builder.tag!("Node", node)
172
- end
173
- end
174
-
175
32
  def self.run_ctl(builder, ctl)
176
33
  id1 = rand(10000000000)
177
34
  id2 = rand(10000000000)
@@ -182,15 +39,6 @@ module GoodData
182
39
  build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{id2}:0", :fromNode => "#{id1}:0", :metadata => "trash_metadata", :id => "#{id3}"}))
183
40
  end
184
41
 
185
- def build_metadata2(builder, node)
186
- builder.Metadata({:id => node[:id]}) do
187
- csv_metadata(builder, {
188
- :name => "account_sf_reformat",
189
- :fields => translate(node[:fields])
190
- })
191
- end
192
- end
193
-
194
42
  def sf_field_mapping(stuff)
195
43
  fields = stuff["fields"]
196
44
  mapping = "{\"xmlFieldsMapping\":{\"xmlFields\":["
@@ -210,70 +58,57 @@ module GoodData
210
58
  }
211
59
  end
212
60
 
213
- def self.validate_gd_datasets_metadata(datasets, gd_datasets)
214
-
215
- init = {
216
- :datasets => {}
217
- }
218
-
219
- datasets.reduce(init) do |memo, dataset|
220
- gd_name = dataset[:gd_name] || dataset[:id]
221
- gd_dataset = gd_datasets.find {|d| d["dataSet"]["meta"]["identifier"].gsub("dataset.", "") == gd_name}
222
- gd_dataset_names = gd_datasets.map {|d| d["dataSet"]["meta"]["identifier"].gsub("dataset.", "")}.find_all {|d| d.index(".dt").nil?}
223
-
224
- next(memo) if gd_dataset.nil?
225
-
226
- refs = dataset[:fields].find_all {|f| f[:type] == "reference"}
227
- refs.each do |ref|
228
- (memo[:datasets][gd_name] ||= []) << ref unless gd_dataset_names.include?(ref[:schema])
229
- ref_attr_indentifier = "attr.#{ref[:schema]}.#{ref[:ref]}"
230
- (memo[:datasets][gd_name] ||= []) << ref if GoodData::MdObject[ref_attr_indentifier].nil?
231
- end
232
-
233
- labels = dataset[:fields].find_all {|f| f[:type] == "label"}
234
- gd_labels = gd_dataset['dataSet']['content']['dataLoadingColumns'].map {|label| GoodData.get(label)}
235
- labels.each do |label|
236
- label_name = label[:name]
237
- label_identifier = "label.#{gd_name}.#{label[:for]}.#{label_name}"
238
- (memo[:datasets][gd_name] ||= []) << label if GoodData::MdObject[label_identifier].nil?
239
- end
240
-
241
- facts = dataset[:fields].find_all {|f| f[:type] == "fact"}
242
- gd_facts = gd_dataset['dataSet']['content']['facts'].map {|fact| GoodData.get(fact)}
243
- facts.each do |fact|
244
- fact_name = fact[:name]
245
- unless gd_facts.any? {|a| a['fact']['meta']['identifier'] == "fact.#{gd_name}.#{fact_name}"}
246
- (memo[:datasets][gd_name] ||= []) << fact
247
- end
248
- end
249
-
250
- attributes = dataset[:fields].find_all {|f| f[:type] == "attribute" || f[:type] == "connection_point"}
251
- gd_attributes = gd_dataset['dataSet']['content']['attributes'].map {|attr| GoodData.get(attr)}
252
- attributes.each do |attr|
253
- attr_name = attr[:name]
254
- unless gd_attributes.any? {|a| a['attribute']['meta']['identifier'] == "attr.#{gd_name}.#{attr_name}"}
255
- (memo[:datasets][gd_name] ||= []) << attr
256
- end
257
- end
258
- memo
259
- end
260
- end
261
-
61
+ # def self.validate_gd_datasets_metadata(datasets, gd_datasets)
62
+ #
63
+ # init = {
64
+ # :datasets => {}
65
+ # }
66
+ #
67
+ # datasets.reduce(init) do |memo, dataset|
68
+ # gd_name = dataset[:gd_name] || dataset[:id]
69
+ # gd_dataset = gd_datasets.find {|d| d["dataSet"]["meta"]["identifier"].gsub("dataset.", "") == gd_name}
70
+ # gd_dataset_names = gd_datasets.map {|d| d["dataSet"]["meta"]["identifier"].gsub("dataset.", "")}.find_all {|d| d.index(".dt").nil?}
71
+ #
72
+ # next(memo) if gd_dataset.nil?
73
+ #
74
+ # refs = dataset[:fields].find_all {|f| f[:type] == "reference"}
75
+ # refs.each do |ref|
76
+ # (memo[:datasets][gd_name] ||= []) << ref unless gd_dataset_names.include?(ref[:schema])
77
+ # ref_attr_indentifier = "attr.#{ref[:schema]}.#{ref[:ref]}"
78
+ # (memo[:datasets][gd_name] ||= []) << ref if GoodData::MdObject[ref_attr_indentifier].nil?
79
+ # end
80
+ #
81
+ # labels = dataset[:fields].find_all {|f| f[:type] == "label"}
82
+ # gd_labels = gd_dataset['dataSet']['content']['dataLoadingColumns'].map {|label| GoodData.get(label)}
83
+ # labels.each do |label|
84
+ # label_name = label[:name]
85
+ # label_identifier = "label.#{gd_name}.#{label[:for]}.#{label_name}"
86
+ # (memo[:datasets][gd_name] ||= []) << label if GoodData::MdObject[label_identifier].nil?
87
+ # end
88
+ #
89
+ # facts = dataset[:fields].find_all {|f| f[:type] == "fact"}
90
+ # gd_facts = gd_dataset['dataSet']['content']['facts'].map {|fact| GoodData.get(fact)}
91
+ # facts.each do |fact|
92
+ # fact_name = fact[:name]
93
+ # unless gd_facts.any? {|a| a['fact']['meta']['identifier'] == "fact.#{gd_name}.#{fact_name}"}
94
+ # (memo[:datasets][gd_name] ||= []) << fact
95
+ # end
96
+ # end
97
+ #
98
+ # attributes = dataset[:fields].find_all {|f| f[:type] == "attribute" || f[:type] == "connection_point"}
99
+ # gd_attributes = gd_dataset['dataSet']['content']['attributes'].map {|attr| GoodData.get(attr)}
100
+ # attributes.each do |attr|
101
+ # attr_name = attr[:name]
102
+ # unless gd_attributes.any? {|a| a['attribute']['meta']['identifier'] == "attr.#{gd_name}.#{attr_name}"}
103
+ # (memo[:datasets][gd_name] ||= []) << attr
104
+ # end
105
+ # end
106
+ # memo
107
+ # end
108
+ # end
262
109
 
263
- def self.validate_sf_metadata(sf_client, sources)
264
- sources.reduce({}) do |memo, source|
265
- sf_object = source[:object]
266
- u = sf_client.describe(sf_object)
267
- sf_fields = u[:describeSObjectResponse][:result][:fields].map {|field| field[:name]}
268
- fields_to_validate = source[:fields].map {|field| field[:name]}
269
- memo[sf_object] = (fields_to_validate - sf_fields)
270
- memo
271
- end
272
- end
273
110
 
274
- def self.get_sf_client(params)
275
- Salesforce::Client.new(params[:sf_login], params[:sf_password] + params[:sf_token], :server => params[:sf_server])
276
- end
111
+
277
112
 
278
113
  def self.download_metadata(downloaders_spec)
279
114
  mods = []
@@ -299,14 +134,6 @@ module GoodData
299
134
  downloaders_spec.merge({"input" => mods})
300
135
  end
301
136
 
302
- def self.save_metadata(filename, data)
303
- File.open(filename, "w") do |file|
304
- builder = Builder::XmlMarkup.new(:target => file, :indent=>2)
305
- builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
306
- csv_metadata(builder, data)
307
- end
308
- end
309
-
310
137
  def self.translate_field(what)
311
138
  dict = {
312
139
  nil => "string",
@@ -332,79 +159,6 @@ module GoodData
332
159
  dict[what]
333
160
  end
334
161
 
335
- def self.translate(fields)
336
- fields
337
- # fields.map do |f|
338
- # f.merge({
339
- # :type => translate_field(f[:type])
340
- # })
341
- # end
342
- end
343
-
344
-
345
-
346
- def self.find_by_name(fields, name)
347
- fields.find do |m|
348
- xname = m["module"]
349
- output = m["file"] || xname
350
- output == name
351
- end
352
- end
353
-
354
- def self.find_by_dataset(fields, name)
355
- fields.find do |m|
356
- md = m["module"]
357
- file = m["file"]
358
- dataset = m["dataset"]
359
- (dataset || file || md) == name
360
- end
361
- end
362
-
363
-
364
- def self.merge_modules(spec, base_spec)
365
- modules = spec["input"]
366
- resolved_modules = modules.map do |mod|
367
- mod_name = mod["file"] || mod["module"]
368
- fields = mod["fields"]
369
- default = find_by_name(base_spec["input"], mod_name)
370
- if default.nil?
371
- mod
372
- else
373
- clashing_fields = ((default["fields"] || []) & (mod["fields"]))
374
- unless clashing_fields.empty?
375
- fail "There are fields \"#{clashing_fields.join(', ')}\" that are clashing with the default definition"
376
- end
377
- master_fields = default["fields"]
378
- redefinitions = fields.find_all {|f| f.has_key?(:acts_as)}
379
- exclusions = redefinitions.reduce([]) {|memo, e| memo.concat([*e[:acts_as]])}
380
- mod.merge({
381
- "fields" => (master_fields.reject {|mf| exclusions.include? mf["name"]}) + fields
382
- })
383
- end
384
- end
385
- spec.merge({
386
- "input" => resolved_modules
387
- })
388
- end
389
-
390
- def self.generate_incremental_select(spec)
391
- if spec[:condition].nil? || spec[:condition].empty?
392
- spec[:condition] = "SystemModstamp > ${#{spec[:id]}_START} AND SystemModstamp <= ${#{spec[:id]}_END}"
393
- else
394
- spec[:condition] += " AND SystemModstamp > ${#{spec[:id]}_START} AND SystemModstamp <= ${#{spec[:id]}_END}"
395
- end
396
- generate_select(spec)
397
- end
398
-
399
- def self.generate_select(spec)
400
- fields = spec[:fields].map do |f|
401
- f.has_key?(:multi_currency) ? "convertCurrency(#{f[:name]})" : f[:name]
402
- end
403
- condition = spec[:condition].nil?() ? "" : "WHERE #{spec[:condition]}"
404
- limit = spec[:limit].nil?() ? "" : "LIMIT #{spec[:limit]}"
405
- "SELECT #{fields.join(', ')} FROM #{spec[:object]} #{condition} #{limit}"
406
- end
407
-
408
162
  def self.normalize_module(spec)
409
163
  fields = spec.has_key?("fields") ? spec["fields"].map {|f| f.kind_of?(Hash) ? f : {"name" => f} } : []
410
164
  spec.merge({
@@ -434,20 +188,13 @@ module GoodData
434
188
  end
435
189
 
436
190
 
437
- def self.clover_metadata_for_fields(spec)
438
- spec.reduce([]) do |acc, f|
191
+ def self.clover_metadata_for_fields(fields)
192
+ fields.reduce([]) do |acc, f|
439
193
  transformation = f[:acts_as] ? f[:acts_as].map {|i| {:type => f[:type] || "string", :name => i}} : [{:type => f[:type] || "string", :name => f[:name]}]
440
194
  acc.concat(transformation)
441
195
  end
442
196
  end
443
197
 
444
- def self.transformation_acts_as(spec)
445
- spec[:fields].reduce([]) do |acc, f|
446
- transformation = f[:acts_as] ? f[:acts_as].map {|i| [f[:name], i]} : [[f[:name], f[:name]]]
447
- acc.concat(transformation)
448
- end
449
- end
450
-
451
198
  # def self.to_port(node, port)
452
199
  # "#{to_id(node)}:#{port.to_s}"
453
200
  # end
@@ -456,192 +203,11 @@ module GoodData
456
203
  node[:id]
457
204
  end
458
205
 
459
- $id = 0
460
- def self.get_id()
461
- $id += 1
462
- end
463
-
464
- def self.create_trash_meta(builder)
465
- builder.Metadata({:id => "trash_metadata"}) do |builder|
466
- csv_metadata(builder, {
467
- :name => "trash_metadata",
468
- :fields => [{:type => "string", :name => "all"}]
469
- })
470
- end
471
- end
472
206
 
473
- def self.create_lookup_meta(builder)
474
- builder.Metadata({:id => "lookup_metadata"}) do |builder|
475
- csv_metadata(builder, {
476
- :name => "lookup_metadata",
477
- :fields => [{:type => "string", :name => "key"}, {:type => "string", :name => "value"}]
478
- })
479
- end
480
- end
481
-
482
-
483
- def self.create_run_graph(file, options={})
484
- subgraphs = options[:subgraphs]
485
207
 
486
- File.open(file, "w") do |file|
487
- builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
488
- builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
489
- builder.Graph({
490
- :name => "Run graph"
491
- }) do
492
- builder.Global do
493
- property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
494
- property_file(builder, {:id => "params_params", :fileURL => "params.txt"})
495
- create_trash_meta(builder)
496
- create_lookup_meta(builder)
497
-
498
- end
499
- phase = 0
500
- subgraphs.each do |subgraph|
501
- builder.Phase(:number => phase+1) do
502
- id1 = get_id
503
- id2 = get_id
504
- ctl = "function integer generate() {$out.0.all = \"FLOW=#{subgraph[:flow]}\";return OK;}"
505
- build_node2(builder, GoodData::CloverGenerator::Nodes.data_generator2({:name => id1, :id => id1, :generate => ctl}))
506
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{id2}:0", :fromNode => "#{id1}:0", :metadata => "trash_metadata", :id => get_id()}))
507
- build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:name => "PARAMS CSV Writer", :id => "#{id2}", :fileURL => "params.txt", :outputFieldNames => "false", :quotedStrings => "false"}))
508
- end
509
- builder.Phase(:number => phase+2) do
510
-
511
- id1 = get_id
512
- id2 = get_id
513
- ctl = "function integer generate() {$out.0.all = \"NAME=#{subgraph[:name]}\";return OK;}"
514
- build_node2(builder, GoodData::CloverGenerator::Nodes.data_generator2({:name => id1, :id => id1, :generate => ctl}))
515
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{id2}:0", :fromNode => "#{id1}:0", :metadata => "trash_metadata", :id => get_id()}))
516
- build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:name => "PARAMS CSV Writer", :id => "#{id2}", :fileURL => "params.txt", :outputFieldNames => "false", :append => "true", :quotedStrings => "false"}))
517
- end
518
-
519
- builder.Phase(:number => phase+3) do
520
- build_node2(builder, GoodData::CloverGenerator::Nodes.run_graph2({:guiName => subgraph[:name], :name => subgraph[:name], :id => subgraph[:flow], :graphName => subgraph[:file]}))
521
- end
522
- phase += 4
523
- end
524
- end
525
- end
526
- end
527
208
 
528
- def self.create_incremental_downloader_run_graph(file, sources, options={})
529
- # subgraphs = options[:subgraphs]
530
-
531
- merged_sources = sources.reduce([]) do |memo, source|
532
- merged_source = memo.find {|s| s[:object] == source[:object]}
533
- if merged_source
534
- merged_source[:fields] = (merged_source[:fields] + source[:fields]).uniq_by {|f| f[:name]}
535
- else
536
- memo.push(source)
537
- end
538
- memo
539
- end
540
-
541
- File.open(file, "w") do |file|
542
- builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
543
- builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
544
- builder.Graph({
545
- :name => "Run graph"
546
- }) do
547
- builder.Global do
548
- property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
549
- property_file(builder, {:id => "params_params", :fileURL => "params.txt"})
550
- create_trash_meta(builder)
551
- create_lookup_meta(builder)
552
- build_node2(builder, GoodData::CloverGenerator::Nodes.lookup2({:name => "gdLookup0", :id => "gdLookup0", :type => GoodData::CloverGenerator::Nodes::GD_LOOKUP, :metadata => "lookup_metadata"}))
553
-
554
- end
555
- phase = 0
556
-
557
-
558
- merged_sources.each do |source|
559
- module_name = source[:object]
560
- file = source[:id] || module_name
561
- dataset = file || module_name
562
-
563
-
564
- normalize_code = <<HEREDOC
565
- boolean done = false;
566
- function integer count() {
567
-
568
- if (indexOf($in.0.key, "#{dataset}_LAST_RUN") != -1) {
569
- return 4;
570
- }
571
- else {
572
- return 0;
573
- }
574
- }
575
-
576
- string last_run = null;
577
- string end_date = null;
578
-
579
- function integer transform(integer idx) {
580
- if (last_run == null) {
581
- last_run = $in.0.value;
582
- }
583
- if (end_date == null) {
584
- end_date = jodaDate2str(today(), "yyyy-MM-dd'T'HH:mm:ss.SSSZZ", "en_US", 'UTC');
585
- }
586
-
587
-
588
- if (idx == 1) {
589
- $out.0.all = "#{dataset}_TRUNCATE_DATE=" + jodaDate2str(jodaStr2date(last_run, ["yyyy-MM-dd'T'HH:mm:ss.SSSZZ"], 'en_US', 'UTC', 'UTC'), "yyyy-MM-dd HH:mm:ss", 'en_US', 'UTC');
590
- } else if (idx == 2) {
591
- $out.0.all = "#{dataset}_START=" + last_run;
592
- } else if (idx == 3) {
593
- $out.0.all = "#{dataset}_END=" + end_date;
594
- } else {
595
- $out.0.all = "#{dataset}_LAST_RUN=" + end_date;
596
- }
597
-
598
- return OK;
599
- }
600
-
601
- HEREDOC
602
-
603
-
604
-
605
-
606
- builder.Phase(:number => phase += 1) do
607
- generate_func = <<HEREDOC
608
- function integer generate() {
609
- $out.0.key = "#{dataset}_LAST_RUN";
610
- $out.0.value = "1970-01-01T00:00:00.000+00:00";
611
- return OK;
612
- }
613
- HEREDOC
614
-
615
- join_func = <<HEREDOC
616
- function integer transform() {
617
- $out.0.key = nvl2($in.1.value, $in.1.key, $in.0.key);
618
- $out.0.value = nvl2($in.1.value, $in.1.value, $in.0.value);
619
- return OK;
620
- }
621
- HEREDOC
622
-
623
- build_node2(builder, GoodData::CloverGenerator::Nodes.data_generator2({:name => "generator_#{dataset}", :id => "generator_#{dataset}", :generate => generate_func}))
624
- build_node2(builder, GoodData::CloverGenerator::Nodes.lookup_reader_writer2({:lookupTable => "gdLookup0", :id => "gd_lookup_reader_#{dataset}" }))
625
- build_node2(builder, GoodData::CloverGenerator::Nodes.hash_join2({:id => "join_#{dataset}", :joinType => "leftOuter", :joinKey => "$key=$key", :transformation => join_func}))
626
-
627
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "join_#{dataset}:0", :fromNode => "generator_#{dataset}:0", :metadata => "lookup_metadata", :id => get_id()}))
628
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "join_#{dataset}:1", :fromNode => "gd_lookup_reader_#{dataset}:0", :metadata => "lookup_metadata", :id => get_id()}))
629
-
630
- build_node2(builder, GoodData::CloverGenerator::Nodes.normalizer2({:name => "normalizer_#{dataset}", :id => "normalizer_#{dataset}", :normalize => normalize_code }))
631
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "normalizer_#{dataset}:0", :fromNode => "join_#{dataset}:0", :metadata => "lookup_metadata", :id => get_id()}))
632
-
633
- build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:quotedStrings => "false", :name => "params_writer_#{dataset}", :id => "params_writer_#{dataset}", :fileURL => "params.txt", :outputFieldNames => "false", :append => "true"}))
634
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "params_writer_#{dataset}:0", :fromNode => "normalizer_#{dataset}:0", :metadata => "trash_metadata", :id => get_id()}))
635
- end
636
- end
637
- builder.Phase(:number => phase += 1) do
638
- build_node2(builder, GoodData::CloverGenerator::Nodes.run_graph2({:guiName => "incremental", :name => "incremental_downloaders", :id => "downlaoders", :graphName => "graphs/incremental.grf"}))
639
- end
640
-
641
- end
642
- end
643
- end
644
-
209
+
210
+
645
211
 
646
212
 
647
213
  def self.build_attribute_df(dataset, attribute)
@@ -777,120 +343,8 @@ HEREDOC
777
343
  }
778
344
  end
779
345
 
780
- def self.create_es_write_json(spec)
781
- {
782
- :entityName => spec[:id] || spec[:object],
783
- :fieldsMapping => spec[:fields].reduce({}) do |memo, f|
784
-
785
- if f.has_key?(:acts_as)
786
-
787
- f[:acts_as].each do |a|
788
- type = case a
789
- when "Id"
790
- "recordid"
791
- when "timestamp"
792
- "timestamp"
793
- else
794
- f[:type] || "attribute"
795
- end
796
- memo[a] = {
797
- :name => a,
798
- :type => type
799
- }
800
- end
801
- else
802
- type = case f[:name]
803
- when "Id"
804
- "recordid"
805
- when "timestamp"
806
- "timestamp"
807
- else
808
- f[:type] || "attribute"
809
- end
810
-
811
- memo[f[:name]] = {
812
- :name => f[:name],
813
- :type => (f[:name] == "Id" ? "recordid" : f[:type] || "attribute")
814
- }
815
- end
816
- memo
817
- end
818
-
819
-
820
- }
821
- end
822
-
823
-
824
- def self.create_moving_graph(file, options={})
825
- source = options[:source]
826
- target = options[:target]
827
- operation = options[:operation]
828
- force = options[:force] || false
829
-
830
- File.open(file, "w") do |file|
831
- builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
832
- builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
833
- builder.Graph({
834
- :name => "File Copy"
835
- }) do
836
- builder.Global do
837
- builder.Metadata({:id => "list_metadata"}) do |builder|
838
- csv_metadata(builder, {
839
- :name => "list_metadata",
840
- :fields => [{:name=>"filePath", :type=>"string"}]
841
- })
842
- end
843
- property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
844
- end
845
- builder.Phase(:number => 0) do
846
-
847
- transformation_source = "function integer transform() {\n" + ([["filePath", "filePath"]].map {|t| "$out.0.#{t.last} = $in.0.#{t.first};"}.join("\n")) + "\nreturn OK;\n}"
848
- build_node2(builder, GoodData::CloverGenerator::Nodes.file_list2(:baseURL => target, :id => "file_list", :transformation => transformation_source))
849
346
 
850
- build_node2(builder, GoodData::CloverGenerator::Nodes.file_delete2(:baseURL => "${filePath}", :id => "file_delete"))
851
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "file_delete:0", :fromNode => "file_list:0", :metadata => "list_metadata", :id => get_id()}))
852
- end
853
- builder.Phase(:number => 1) do
854
- build_node2(builder, GoodData::CloverGenerator::Nodes.file_copy2({:sourcePath => source, :targetPath => target, :operation => operation, :id => "file_copy"}))
855
- end
856
- end
857
- end
858
- end
859
347
 
860
- def self.create_uploading_graph(file, options={})
861
- metadata = options[:metadata]
862
- dataset_infos = [options[:datasets]]
863
- input_file = options[:input_file]
864
-
865
- File.open(file, "w") do |file|
866
- builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
867
- builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
868
- builder.Graph({
869
- :name => "Goodsales Downloader"
870
- }) do
871
- builder.Global do
872
- property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
873
- dataset_infos.each do |dataset_info|
874
- dataset = dataset_info[:id]
875
- builder.Metadata({:id => "#{dataset}_load"}) do |builder|
876
- csv_metadata(builder, metadata)
877
- end
878
- end
879
- end
880
- builder.Phase(:number => 0) do
881
- dataset_infos.each do |dataset_info|
882
- dataset = dataset_info[:id]
883
- gd_dataset = dataset_info[:gd_name] || dataset_info[:id]
884
- to_svinstvo = build_gd_dataset_loader_json(dataset_info)
885
- build_node2(builder, GoodData::CloverGenerator::Nodes.reader2({:name => "#{dataset} CSV Loader", :id => "#{dataset}_loader", :fileURL => "${PROJECT}/data/#{dataset}.csv"}))
886
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{dataset}_load:0", :fromNode => "#{dataset}_loader:0", :metadata => "#{dataset}_load", :id => get_id()}))
887
- build_node2(builder, GoodData::CloverGenerator::Nodes.gd_loader2({:name => "#{dataset} Loader", :id => "#{dataset}_load", :dataset => "dataset.#{gd_dataset}", :datasetFieldMappings => to_svinstvo.to_json}))
888
- end
889
- end
890
- end
891
- end
892
- end
893
-
894
348
  def self.create_es_uploading_graph(file, options={})
895
349
  metadata = options[:metadata]
896
350
  dataset_infos = [options[:datasets]]
@@ -920,361 +374,20 @@ HEREDOC
920
374
  build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{dataset}_load:0", :fromNode => "#{dataset}_loader:0", :metadata => "#{dataset}_load", :id => get_id()}))
921
375
  build_node2(builder, GoodData::CloverGenerator::Nodes.gd_loader2({:name => "#{dataset} Loader", :id => "#{dataset}_load", :dataset => "dataset.#{gd_dataset}", :datasetFieldMappings => to_svinstvo.to_json}))
922
376
  end
923
- end
377
+ end
924
378
  end
925
379
  end
926
380
  end
927
381
 
928
382
  def self.create_metadata(mod)
929
- module_name = mod[:object]
930
- file = mod["file"] || module_name
383
+ module_name = mod.object
384
+ file = mod.file || module_name
931
385
  # dataset = mod["dataset"] || file || module_name
932
386
  {
933
387
  :name => file,
934
- :fields => translate(clover_metadata_for_module(mod))
388
+ :fields => clover_metadata_for_module(mod)
935
389
  }
936
390
  end
937
391
 
938
-
939
- def self.create_sf_downloading_graph(file, sources, options={})
940
- metadata = options[:metadata]
941
- s3_backup = true && options[:s3_backup]
942
-
943
- File.open(file, "w") do |file|
944
- builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
945
- builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
946
- builder.Graph({
947
- :name => "Goodsales Downloader"
948
- }) do
949
- builder.Global do
950
- sources.each do |mod|
951
- module_name = mod[:object]
952
- file = mod[:id] || module_name
953
- dataset = file || module_name
954
-
955
- builder.Metadata({:id => "#{file}_sf_metadata"}) do |builder|
956
- csv_metadata(builder, {
957
- :name => "#{file}_sf_metadata",
958
- :fields => translate(sf_metadata_for_module(mod))
959
- })
960
- end
961
- builder.Metadata({:id => "#{file}_clover_metadata"}) do |builder|
962
- csv_metadata(builder, {
963
- :name => "#{file}_clover_metadata",
964
- :fields => translate(clover_metadata_for_module(mod))
965
- })
966
- end
967
- end
968
-
969
- sf_connection(builder, {})
970
- property(builder, {:id => "SFDC_CLIENT_ID", :value => "gooddata/gooddata/"})
971
- property(builder, {:id => "SFDC_LOGIN_HOSTNAME", :value => options[:sf_server] || "login.salesforce.com"})
972
- property(builder, {:id => "SFDC_NAME", :value => "Salesforce connection"})
973
- property(builder, {:id => "SFDC_PASSWORD", :value => options[:password]})
974
- property(builder, {:id => "SFDC_TOKEN", :value => options[:token]})
975
- property(builder, {:id => "SFDC_USERNAME", :value => options[:login]})
976
- property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
977
- end
978
-
979
- builder.Phase(:number => 0) do
980
- sources.each do |mod|
981
- module_name = mod[:object]
982
- file = mod[:id] || module_name
983
- dataset = file || module_name
984
-
985
- fields = mod[:fields]
986
- mapping = "{\"xmlFieldsMapping\":{\"xmlFields\":["
987
- add = fields.map do |f|
988
- "{\"xmlFieldMapping\":{\"name\":\"#{f[:name]}\",\"label\":\"#{f[:label]}\",\"xmlPath\":\"#{f[:name]}\",\"metadataField\":\"#{f[:name]}\"}}"
989
- end
990
-
991
- stuff = mapping + add.join(",") + "]}}"
992
- mandatory_fields = fields.reject {|f| f[:is_mandatory] == false }.map {|f| f[:name] + ";"}.join
993
-
994
- build_node2(builder, GoodData::CloverGenerator::Nodes.sfdc_reader2({:name => "#{file} SF Writer", :id => "#{file}_sf", :soql => generate_select(mod), :sfdcConnection => "SFDC", :fieldsMapping => stuff, :mandatoryFields => mandatory_fields}))
995
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_reformat:0", :fromNode => "#{file}_sf:0", :metadata => "#{file}_sf_metadata", :id => get_id()}))
996
-
997
- transformation_source = "function integer transform() {\n" + (transformation_acts_as(mod).map {|t| "$out.0.#{t.last} = $in.0.#{t.first};"}.join("\n")) + "\nreturn OK;\n}"
998
- build_node2(builder, GoodData::CloverGenerator::Nodes.reformat2({:name => "#{file} Reformat", :id => "#{file}_reformat", :transformation => transformation_source}))
999
-
1000
-
1001
- build_node2(builder, GoodData::CloverGenerator::Nodes.copy2({:name => "#{file} copy", :id => "#{file}_copy"}))
1002
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_copy:0", :fromNode => "#{file}_reformat:0", :metadata => "#{file}_clover_metadata", :id => get_id()}))
1003
- build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:name => "#{file} CSV Writer", :id => "#{file}_csv", :fileURL => "data/#{dataset.downcase}.csv", :outputFieldNames => "true"}))
1004
- if s3_backup then build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:name => "#{file} s3 Writer", :id => "#{file}_s3", :fileURL => "bucket", :outputFieldNames => true, :quotedStrings => false})) end
1005
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_csv:0", :fromNode => "#{file}_copy:0", :metadata => "#{file}_clover_metadata", :id => get_id()}))
1006
- if s3_backup then build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_s3:0", :fromNode => "#{file}_copy:1", :metadata => "#{file}_clover_metadata", :id => get_id()})) end
1007
- end
1008
- end
1009
- end
1010
- end
1011
- end
1012
-
1013
- def self.create_es_downloading_graph(file, sources, options={})
1014
- metadata = options[:metadata]
1015
- s3_backup = true && options[:s3_backup]
1016
-
1017
- File.open(file, "w") do |file|
1018
- builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
1019
- builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
1020
- builder.Graph({
1021
- :name => "Goodsales Downloader"
1022
- }) do
1023
- builder.Global do
1024
- sources.each do |mod|
1025
- module_name = mod[:object]
1026
- file = mod[:id] || module_name
1027
- dataset = file || module_name
1028
-
1029
- es_metadata = GoodData::CloverGenerator::DSL::Metadata.new({
1030
- :name => "#{file}_es_metadata",
1031
- :fields => clover_metadata_for_module(mod)
1032
- })
1033
-
1034
- builder.Metadata({:id => "#{file}_es_metadata"}) do |builder|
1035
- csv_metadata(builder, es_metadata.change do |m|
1036
- m.remove("timestamp")
1037
- end.to_hash)
1038
- end
1039
- end
1040
-
1041
- property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
1042
- end
1043
-
1044
- builder.Phase(:number => 0) do
1045
- sources.each do |mod|
1046
- module_name = mod[:object]
1047
- file = mod[:id] || module_name
1048
- dataset = file || module_name
1049
-
1050
- es_metadata = GoodData::CloverGenerator::DSL::Metadata.new({
1051
- :name => "#{file}_es_metadata",
1052
- :fields => clover_metadata_for_module(mod)
1053
- })
1054
- es_metadata = es_metadata.change do |m|
1055
- m.remove("timestamp")
1056
- end.to_hash
1057
-
1058
- fields = es_metadata[:fields]
1059
-
1060
- e = Es::Entity.new("x", {
1061
- :file => "none",
1062
- :fields => fields.map do |f|
1063
- name = f[:name]
1064
- if name == "Id"
1065
- Es::Field.new('Id', 'recordid')
1066
- else
1067
- Es::Field.new(name, 'attribute')
1068
- end
1069
- end,
1070
- :timeframe => Es::Timeframe::parse("latest")
1071
- })
1072
-
1073
-
1074
- stuff = {
1075
- :entityName => dataset,
1076
- :fieldsMapping => fields.inject({}) do |memo, field|
1077
- name = field[:name]
1078
- memo[name] = name
1079
- memo
1080
- end,
1081
- :eventStoreFieldToTypeMapping => fields.inject({}) do |memo, field|
1082
- name = field[:name]
1083
- if name == "Id"
1084
- memo[name] = "recordid"
1085
- else
1086
- memo[name] = "attribute"
1087
- end
1088
-
1089
- memo
1090
- end,
1091
- :outputMetadataName => "#{file}_es_metadata"
1092
- }
1093
-
1094
- readmap = {
1095
- :columns => e.to_extract_fragment('pid')["readTask"]["readMap"].first[:columns],
1096
- :populates => e.to_extract_fragment('pid')["readTask"]["readMap"].first[:populates]
1097
- }
1098
-
1099
- build_node2(builder, GoodData::CloverGenerator::Nodes.es_reader2({:name => "#{file} ES Reader", :id => "#{file}_es", :entityFieldsMapping => stuff.to_json , :readMap => readmap.to_json}))
1100
- build_node2(builder, GoodData::CloverGenerator::Nodes.copy2({:name => "#{file} copy", :id => "#{file}_copy"}))
1101
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_copy:0", :fromNode => "#{file}_es:0", :metadata => "#{file}_es_metadata", :id => get_id()}))
1102
- build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:name => "#{file} CSV Writer", :id => "#{file}_csv", :fileURL => "${PROJECT}/data/#{dataset.downcase}.csv", :outputFieldNames => "true", :makeDirs => "true"}))
1103
- if s3_backup then build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:name => "#{file} s3 Writer", :id => "#{file}_s3", :fileURL => "bucket", :outputFieldNames => true, :quotedStrings => false})) end
1104
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_csv:0", :fromNode => "#{file}_copy:0", :metadata => "#{file}_es_metadata", :id => get_id()}))
1105
- if s3_backup then build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_s3:0", :fromNode => "#{file}_copy:1", :metadata => "#{file}_es_metadata", :id => get_id()})) end
1106
- end
1107
- end
1108
- end
1109
- end
1110
- end
1111
-
1112
- def self.create_incremental_downloading_graph(file, sources, options={})
1113
- metadata = options[:metadata]
1114
- store = options[:store] || "${GDC_EVENTSTORE}"
1115
- s3_backup = true && options[:s3_backup]
1116
-
1117
- merged_sources = sources.reduce([]) do |memo, source|
1118
- merged_source = memo.find {|s| s[:object] == source[:object]}
1119
- if merged_source
1120
- merged_source[:fields] = (merged_source[:fields] + source[:fields]).uniq_by {|f| f[:name]}
1121
- else
1122
- memo.push(source)
1123
- end
1124
- memo
1125
- end
1126
-
1127
- File.open(file, "w") do |file|
1128
- builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
1129
- builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
1130
- builder.Graph({:name => "Goodsales incremental Downloader"}) do
1131
- builder.Global do
1132
- property_file(builder, {:id => "params_params", :fileURL => "params.txt"})
1133
- property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
1134
-
1135
- create_lookup_meta(builder)
1136
- merged_sources.each do |mod|
1137
- module_name = mod[:object]
1138
- file = mod[:id] || module_name
1139
-
1140
- sf_metadata = GoodData::CloverGenerator::DSL::Metadata.new({
1141
- :name => "#{file}_sf_metadata",
1142
- :fields => sf_metadata_for_module(mod)
1143
- })
1144
-
1145
- clover_metadata = GoodData::CloverGenerator::DSL::Metadata.new({
1146
- :name => "#{file}_clover_metadata",
1147
- :fields => clover_metadata_for_module(mod)
1148
- })
1149
-
1150
-
1151
- builder.Metadata({:id => "#{file}_sf_metadata"}) do |builder|
1152
- csv_metadata(builder, sf_metadata.to_hash)
1153
- end
1154
-
1155
- builder.Metadata({:id => "#{file}_clover_metadata"}) do |builder|
1156
- csv_metadata(builder, clover_metadata.to_hash)
1157
- end
1158
-
1159
- begin
1160
- builder.Metadata({:id => "#{file}_es_metadata"}) do |builder|
1161
- csv_metadata(builder, clover_metadata.change do |m|
1162
- m.remove("timestamp")
1163
- m.add(:name => "timestamp", :type => "date")
1164
- end.to_hash)
1165
- end
1166
- rescue GoodData::CloverGenerator::DSL::RemoveMetadataFieldError => e
1167
- exit_now!("Removing field \"#{e.field}\" failed from metadata \"#{e.metadata.name}\" for source \"#{file}\".")
1168
- end
1169
- # build_node2(builder, GoodData::CloverGenerator::Nodes.lookup2({:lookupTable => "params_lookup", :fileURL => "${PROJECT}/params.txt", :id => "params_lookup_id", :key => "key", :metadata => "lookup_metadata", :name => "params_lookup"}))
1170
- build_node2(builder, GoodData::CloverGenerator::Nodes.lookup2({:name => "gdLookup0", :id => "gdLookup0", :type => GoodData::CloverGenerator::Nodes::GD_LOOKUP, :metadata => "lookup_metadata"}))
1171
-
1172
- end
1173
-
1174
- sf_connection(builder, {})
1175
- property(builder, {:id => "SFDC_CLIENT_ID", :value => "gooddata/gooddata/"})
1176
- property(builder, {:id => "SFDC_LOGIN_HOSTNAME", :value => options[:sf_server] || "login.salesforce.com"})
1177
- property(builder, {:id => "SFDC_NAME", :value => "Salesforce connection"})
1178
- property(builder, {:id => "SFDC_PASSWORD", :value => options[:password]})
1179
- property(builder, {:id => "SFDC_TOKEN", :value => options[:token]})
1180
- property(builder, {:id => "SFDC_USERNAME", :value => options[:login]})
1181
- property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
1182
- end
1183
-
1184
-
1185
-
1186
-
1187
- phase = 1
1188
-
1189
- merged_sources.each do |mod|
1190
-
1191
- module_name = mod[:object]
1192
- file = mod[:id] || module_name
1193
- dataset = file || module_name
1194
-
1195
-
1196
- builder.Phase(:number => phase += 1) do
1197
- build_node2(builder, GoodData::CloverGenerator::Nodes.es_truncate2({:guiName => dataset, :store => store, :entity => dataset, :timestamp => "${#{dataset}_TRUNCATE_DATE}", :name => "#{module_name} es truncate", :id => "#{module_name}_es_truncate"}))
1198
- end
1199
-
1200
- builder.Phase(:number => phase += 1) do
1201
-
1202
-
1203
- fields = mod[:fields]
1204
- mapping = "{\"xmlFieldsMapping\":{\"xmlFields\":["
1205
- add = fields.map do |f|
1206
- "{\"xmlFieldMapping\":{\"name\":\"#{f[:name]}\",\"label\":\"#{f[:label]}\",\"xmlPath\":\"#{f[:name]}\",\"metadataField\":\"#{f[:name]}\"}}"
1207
- end
1208
-
1209
- stuff = mapping + add.join(",") + "]}}"
1210
- mandatory_fields = fields.reject {|f| f[:is_mandatory] == false }.map {|f| f[:name] + ";"}.join
1211
-
1212
- build_node2(builder, GoodData::CloverGenerator::Nodes.sfdc_reader2({:name => "#{file} SF Writer", :id => "#{file}_sf", :soql => generate_incremental_select(mod), :sfdcConnection => "SFDC", :fieldsMapping => stuff, :mandatoryFields => mandatory_fields}))
1213
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_reformat:0", :fromNode => "#{file}_sf:0", :metadata => "#{file}_sf_metadata", :id => get_id()}))
1214
-
1215
- transformation_source = "function integer transform() {\n" + (transformation_acts_as(mod).map {|t| "$out.0.#{t.last} = $in.0.#{t.first};"}.join("\n")) + "\nreturn OK;\n}"
1216
- es_transformation_source = "function integer transform() {\n$out.0.* = $in.0.*;\n$out.0.timestamp = str2date($in.0.timestamp,\"joda:yyyy-MM-dd'T'HH:mm:ss.SSSZZ\");;\nreturn OK;\n}"
1217
-
1218
- build_node2(builder, GoodData::CloverGenerator::Nodes.reformat2({:name => "#{file} Reformat", :id => "#{file}_reformat", :transformation => transformation_source}))
1219
-
1220
-
1221
- build_node2(builder, GoodData::CloverGenerator::Nodes.copy2({:name => "#{file} copy", :id => "#{file}_copy"}))
1222
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_copy:0", :fromNode => "#{file}_reformat:0", :metadata => "#{file}_clover_metadata", :id => get_id()}))
1223
- build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:enabled => "disabled", :name => "#{file} CSV Writer", :id => "#{file}_csv", :fileURL => "data/#{dataset.downcase}.csv", :outputFieldNames => "true"}))
1224
-
1225
- build_node2(builder, GoodData::CloverGenerator::Nodes.reformat2({:name => "#{file} Reformat", :id => "#{file}_es_reformat", :transformation => es_transformation_source}))
1226
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_es_reformat:0", :fromNode => "#{file}_copy:1", :metadata => "#{file}_clover_metadata", :id => get_id()}))
1227
-
1228
- if s3_backup then
1229
- build_node2(builder, GoodData::CloverGenerator::Nodes.writer2({:enabled => "enabled", :name => "#{file} s3 Writer", :id => "#{file}_s3", :fileURL => "https://${S3_ACCESS_KEY_ID}:\`replace(\"${S3_SECRET_ACCESS_KEY}\",\"/\",\"%2F\")\`@${S3_BUCKETNAME}.s3.amazonaws.com/${GDC_PROJECT_ID}/#{file}/#{file}_\`date2long(today())\`", :outputFieldNames => true, :quotedStrings => false}))
1230
- end
1231
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_csv:0", :fromNode => "#{file}_copy:0", :metadata => "#{file}_clover_metadata", :id => get_id()}))
1232
- if s3_backup then
1233
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_s3:0", :fromNode => "#{file}_copy:2", :metadata => "#{file}_clover_metadata", :id => get_id()}))
1234
- end
1235
-
1236
-
1237
- build_node2(builder, GoodData::CloverGenerator::Nodes.sort2({:sortKey => "timestamp(a)",:name => "#{file} es Sort", :id => "#{file}_es_sort"}))
1238
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_es_sort:0", :fromNode => "#{file}_es_reformat:0", :metadata => "#{file}_es_metadata", :id => get_id()}))
1239
-
1240
- build_node2(builder, GoodData::CloverGenerator::Nodes.es_writer2({:name => "#{file} es Writer", :id => "#{file}_es", :store => store, :entityFieldsMapping => create_es_write_json(mod).to_json}))
1241
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "#{file}_es:0", :fromNode => "#{file}_es_sort:0", :metadata => "#{file}_es_metadata", :id => get_id()}))
1242
-
1243
-
1244
- end
1245
-
1246
- builder.Phase(:number => phase += 1) do
1247
- generate_func = <<HEREDOC
1248
- function integer generate() {
1249
- date time_end = jodaStr2date("${#{dataset}_END}",["yyyy-MM-dd'T'HH:mm:ss.SSSZZ"], 'en_US', 'UTC', 'UTC');
1250
- $out.0.key = "#{dataset}_LAST_RUN";
1251
- $out.0.value = jodaDate2str(time_end,"yyyy-MM-dd'T'HH:mm:ss.SSSZZ", 'en_US', 'UTC');
1252
- return OK;
1253
- }
1254
- HEREDOC
1255
-
1256
- build_node2(builder, GoodData::CloverGenerator::Nodes.data_generator2({:guiName => dataset, :name => "generator_#{phase}", :id => "generator_#{phase}", :generate => generate_func}))
1257
- build_node2(builder, GoodData::CloverGenerator::Nodes.lookup_reader_writer2({:guiName => dataset, :lookupTable => "gdLookup0", :id => "gd_lookup_reader_#{phase}" }))
1258
- build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "gd_lookup_reader_#{phase}:0", :fromNode => "generator_#{phase}:0", :metadata => "lookup_metadata", :id => get_id()}))
1259
-
1260
-
1261
- end
1262
-
1263
- end
1264
-
1265
-
1266
-
1267
- # builder.Phase(:number => phase += 1) do
1268
- # build_node2(builder, GoodData::CloverGenerator::Nodes.data_generator2({:guiName => "generator1", :name => "generator1", :id => "generator1", :generate => generate_func}))
1269
- # build_node2(builder, GoodData::CloverGenerator::Nodes.lookup_reader_writer2({:lookupTable => "gdLookup0", :id => "gd_lookup_reader", :graphName => "incremental.grf"}))
1270
- # build_node2(builder, GoodData::CloverGenerator::Nodes.edge2({:toNode => "gd_lookup_reader:0", :fromNode => "generator1:0", :metadata => "lookup_metadata", :id => get_id()}))
1271
-
1272
- # end
1273
-
1274
-
1275
- end
1276
- end
1277
- end
1278
-
1279
392
  end
1280
393
  end