gd_bam 0.1.3 → 0.1.5

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,663 @@
1
+ module GoodData
2
+ module Bam
3
+ module Generators
4
+ module Downloaders
5
+
6
+ include GoodData::Bam
7
+ include GoodData::CloudConnect
8
+
9
+ def self.create_incremental_downloader_run_graph(file, taps, options={})
10
+
11
+ File.open(file, "w") do |file|
12
+ builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
13
+ builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
14
+ builder.Graph({
15
+ :name => "Run graph"
16
+ }) do
17
+ builder.Global do
18
+ Helpers::property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
19
+ Helpers::property_file(builder, {:id => "params_params", :fileURL => "params.prm"})
20
+ Helpers::create_trash_meta(builder)
21
+ Helpers::create_lookup_meta(builder)
22
+ Helpers::create_file_list_meta(builder)
23
+ Helpers::create_run_graph_failure_metadata(builder)
24
+
25
+ Core::build_node2(builder, Nodes.lookup2({:name => "gdLookup0", :id => "gdLookup0", :type => Nodes::GD_LOOKUP, :metadata => "lookup_metadata"}))
26
+
27
+ end
28
+ phase = 0
29
+ sf_taps = Taps.get_salesforce(taps)
30
+
31
+ sf_taps.each do |tap|
32
+ module_name = tap[:object]
33
+ file = tap[:id] || module_name
34
+ dataset = file || module_name
35
+
36
+
37
+ normalize_code = <<HEREDOC
38
+ boolean done = false;
39
+ function integer count() {
40
+
41
+ if (indexOf($in.0.key, "#{dataset}_LAST_RUN") != -1) {
42
+ return 4;
43
+ }
44
+ else {
45
+ return 0;
46
+ }
47
+ }
48
+
49
+ string last_run = null;
50
+ string end_date = null;
51
+
52
+ function integer transform(integer idx) {
53
+ if (last_run == null) {
54
+ last_run = $in.0.value;
55
+ }
56
+ if (end_date == null) {
57
+ end_date = jodaDate2str(today(), "yyyy-MM-dd'T'HH:mm:ss.SSSZZ", "en_US", 'UTC');
58
+ }
59
+
60
+
61
+ if (idx == 1) {
62
+ $out.0.all = "#{dataset}_TRUNCATE_DATE=" + jodaDate2str(jodaStr2date(last_run, ["yyyy-MM-dd'T'HH:mm:ss.SSSZZ"], 'en_US', 'UTC', 'UTC'), "yyyy-MM-dd HH:mm:ss", 'en_US', 'UTC');
63
+ } else if (idx == 2) {
64
+ $out.0.all = "#{dataset}_START=" + last_run;
65
+ } else if (idx == 3) {
66
+ $out.0.all = "#{dataset}_END=" + end_date;
67
+ } else {
68
+ $out.0.all = "#{dataset}_LAST_RUN=" + end_date;
69
+ }
70
+
71
+ return OK;
72
+ }
73
+
74
+ HEREDOC
75
+
76
+
77
+
78
+
79
+ builder.Phase(:number => phase += 1) do
80
+ generate_func = <<HEREDOC
81
+ function integer generate() {
82
+ $out.0.key = "#{dataset}_LAST_RUN";
83
+ $out.0.value = "1970-01-01T00:00:00.000+00:00";
84
+ return OK;
85
+ }
86
+ HEREDOC
87
+
88
+ join_func = <<HEREDOC
89
+ function integer transform() {
90
+ $out.0.key = nvl2($in.1.value, $in.1.key, $in.0.key);
91
+ $out.0.value = nvl2($in.1.value, $in.1.value, $in.0.value);
92
+ return OK;
93
+ }
94
+ HEREDOC
95
+
96
+ Core::build_node2(builder, Nodes.data_generator2({:name => "generator_#{dataset}", :id => "generator_#{dataset}", :generate => generate_func}))
97
+ Core::build_node2(builder, Nodes.lookup_reader_writer2({:lookupTable => "gdLookup0", :id => "gd_lookup_reader_#{dataset}" }))
98
+ Core::build_node2(builder, Nodes.hash_join2({:id => "join_#{dataset}", :joinType => "leftOuter", :joinKey => "$key=$key", :transformation => join_func}))
99
+
100
+ Core::build_node2(builder, Nodes.edge2({:toNode => "join_#{dataset}:0", :fromNode => "generator_#{dataset}:0", :metadata => "lookup_metadata"}))
101
+ Core::build_node2(builder, Nodes.edge2({:toNode => "join_#{dataset}:1", :fromNode => "gd_lookup_reader_#{dataset}:0", :metadata => "lookup_metadata"}))
102
+
103
+ Core::build_node2(builder, Nodes.normalizer2({:name => "normalizer_#{dataset}", :id => "normalizer_#{dataset}", :normalize => normalize_code }))
104
+ Core::build_node2(builder, Nodes.edge2({:toNode => "normalizer_#{dataset}:0", :fromNode => "join_#{dataset}:0", :metadata => "lookup_metadata"}))
105
+
106
+ Core::build_node2(builder, Nodes.writer2({:quotedStrings => "false", :name => "params_writer_#{dataset}", :id => "params_writer_#{dataset}", :fileURL => "params.prm", :outputFieldNames => "false", :append => "true"}))
107
+ Core::build_node2(builder, Nodes.edge2({:toNode => "params_writer_#{dataset}:0", :fromNode => "normalizer_#{dataset}:0", :metadata => "trash_metadata"}))
108
+ end
109
+ end
110
+ builder.Phase(:number => phase += 1) do
111
+ if !sf_taps.empty?
112
+ Core::build_node2(builder, Nodes.run_graph2({:guiName => "incremental", :name => "incremental_downloaders", :id => "downlaoders", :graphName => "graphs/incremental.grf"}))
113
+ end
114
+ end
115
+
116
+ file_taps = Taps.get_file(taps)
117
+
118
+ file_taps.each do |tap|
119
+ source = tap[:source]
120
+ id = tap[:id]
121
+
122
+
123
+ reformat_func = <<HEREDOC
124
+
125
+ function integer transform() {
126
+ $out.0.filePath = replace($in.0.filePath, "${GDC_WEBDAV_HOST}", replace(replace(\"${GD_LOGIN}\",\"@\",\"%40\"),\"\\\\+\",\"%2B\") + ":${GD_PASSWORD}@${GDC_WEBDAV_HOST}");
127
+ $out.0.fileName = $in.0.fileName;
128
+ return ALL;
129
+ }
130
+ HEREDOC
131
+
132
+ builder.Phase(:number => phase += 1) do
133
+ Core::build_node2(builder, Nodes.file_list2({:id => "#{id}_file_list", :name => "#{id}_file_list", :dataPolicy => "Strict", :baseURL => "#{tap[:source]}", :output_mapping => Nodes::MAP_ALL}))
134
+ Core::build_node2(builder, Nodes.reformat2({:name => "#{id} Reformat", :id => "#{id}_reformat", :transformation => reformat_func}))
135
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{id}_reformat:0", :fromNode => "#{id}_file_list:0", :metadata => "file_list"}))
136
+ Core::build_node2(builder, Nodes.writer2({:name => "PARAMS CSV Writer", :id => "#{id}_writer", :fileURL => "data/#{id}_files_to_read.csv"}))
137
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{id}_writer:0", :fromNode => "#{id}_reformat:0", :metadata => "file_list"}))
138
+ end
139
+
140
+ builder.Phase(:number => phase += 1) do
141
+ ctl = "function integer generate() {$out.0.all = \"#{id}_SKIP_LINES=0\";return OK;}"
142
+ Core::build_node2(builder, Nodes.data_generator2({:name => "#{id}_generator", :id => "#{id}_generator", :generate => ctl}))
143
+ Core::build_node2(builder, Nodes.writer2({:name => "PARAMS CSV Writer", :id => "#{id}_csv_writer", :fileURL => "#{id}_counter.prm", :outputFieldNames => "false", :quotedStrings => "false"}))
144
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{id}_csv_writer:0", :fromNode => "#{id}_generator:0", :metadata => "trash_metadata"}))
145
+ end
146
+
147
+ subgraph_reformat_func = <<HEREDOC
148
+
149
+ function integer transform() {
150
+ $out.0.all = "graphs/#{id}_loop.grf";
151
+ return ALL;
152
+ }
153
+ HEREDOC
154
+
155
+ fail_reformat = <<HEREDOC
156
+ function integer transform() {
157
+ raiseError("Loop failed");
158
+ }
159
+ HEREDOC
160
+
161
+
162
+ builder.Phase(:number => phase += 1) do
163
+ Core::build_node2(builder, Nodes.reformat2({:name => "#{id} Reformat graph", :id => "#{id}_reformat_graph", :transformation => subgraph_reformat_func}))
164
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{id}_reformat_graph:0", :fromNode => "#{id}_reformat:1", :metadata => "file_list"}))
165
+ Core::build_node2(builder, Nodes.run_graph2({
166
+ :guiName => id,
167
+ :name => id,
168
+ :id => "#{id}_run_graph"
169
+ }))
170
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{id}_run_graph:0", :fromNode => "#{id}_reformat_graph:0", :metadata => "trash_metadata"}))
171
+ Core::build_node2(builder, Nodes.reformat2({:name => "#{id} Reformat fail", :id => "#{id}_reformat_fail", :transformation => fail_reformat}))
172
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{id}_reformat_fail:0", :fromNode => "#{id}_run_graph:1", :metadata => "run_graph_failure_metadata"}))
173
+ Core::build_node2(builder, Nodes.trash2({:name => "#{id}_trash", :id => "#{id}_trash", :debugPrint => true}))
174
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{id}_trash:0", :fromNode => "#{id}_reformat_fail:0", :metadata => "run_graph_failure_metadata"}))
175
+ end
176
+ end
177
+
178
+
179
+ end
180
+ end
181
+ end
182
+
183
+ def self.create_incremental_sf_downloading_graph(file, taps, options={})
184
+ metadata = options[:metadata]
185
+ store = options[:store] || "${GDC_EVENTSTORE}"
186
+ s3_backup = options[:s3_backup]
187
+
188
+ File.open(file, "w") do |file|
189
+ builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
190
+ builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
191
+ builder.Graph({:name => "Goodsales incremental Downloader"}) do
192
+ builder.Global do
193
+ Helpers::property_file(builder, {:id => "params_params", :fileURL => "params.prm"})
194
+ Helpers::property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
195
+
196
+ Helpers::create_lookup_meta(builder)
197
+ taps.each do |tap|
198
+ module_name = tap[:object]
199
+ file = tap[:id] || module_name
200
+
201
+ builder.Metadata({:id => "#{file}_s3_metadata"}) do |builder|
202
+ Helpers::csv_metadata(builder, GoodData::Bam::Metadata.add_field(Metadata.get_target_metadata(Tap.prepare_for_s3_backup(tap)), {:name => "Timestamp"}))
203
+ end
204
+
205
+ tap = Tap.prepare_for_sf_downloader(tap)
206
+ builder.Metadata({:id => "#{file}_source_metadata"}) do |builder|
207
+ Helpers::csv_metadata(builder, Metadata.get_source_metadata(tap))
208
+ end
209
+
210
+ builder.Metadata({:id => "#{file}_es_metadata"}) do |builder|
211
+ Helpers::csv_metadata(builder, Metadata.add_timestamp_as_date(Metadata.get_target_metadata(tap)))
212
+ end
213
+
214
+ Core::build_node2(builder, Nodes.lookup2({:name => "gdLookup0", :id => "gdLookup0", :type => Nodes::GD_LOOKUP, :metadata => "lookup_metadata"}))
215
+
216
+ end
217
+
218
+ Helpers::sf_connection(builder, {})
219
+ Helpers::property(builder, {:id => "SFDC_CLIENT_ID", :value => "gooddata/gooddata/"})
220
+ Helpers::property(builder, {:id => "SFDC_LOGIN_HOSTNAME", :value => options[:sf_server] || "login.salesforce.com"})
221
+ Helpers::property(builder, {:id => "SFDC_NAME", :value => "Salesforce connection"})
222
+ Helpers::property(builder, {:id => "SFDC_PASSWORD", :value => options[:sf_password]})
223
+ Helpers::property(builder, {:id => "SFDC_TOKEN", :value => options[:sf_token]})
224
+ Helpers::property(builder, {:id => "SFDC_USERNAME", :value => options[:sf_login]})
225
+ Helpers::property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
226
+ end
227
+
228
+ phase = 1
229
+
230
+ taps.each do |tap|
231
+ module_name = tap[:object]
232
+ file = tap[:id]
233
+ dataset = file || module_name
234
+ source_file = tap[:source]
235
+
236
+ has_timestamp = Tap.has_output_field?(tap, "Timestamp")
237
+ timestamp_field = Tap.find_output_field(tap, "Timestamp")
238
+ id_field = Tap.find_output_field(tap, "Id")
239
+ tap = Tap.prepare_for_sf_downloader(tap)
240
+ builder.Phase(:number => phase += 1) do
241
+ Core::build_node2(builder, Nodes.es_truncate2({:guiName => dataset, :store => store, :entity => dataset, :timestamp => "${#{dataset}_TRUNCATE_DATE}", :name => "#{module_name} es truncate", :id => "#{module_name}_es_truncate"}))
242
+ end
243
+
244
+ builder.Phase(:number => phase += 1) do
245
+ fields = tap[:fields]
246
+ mapping = "{\"xmlFieldsMapping\":{\"xmlFields\":["
247
+ add = fields.map do |f|
248
+ "{\"xmlFieldMapping\":{\"name\":\"#{f[:name]}\",\"label\":\"#{f[:label]}\",\"xmlPath\":\"#{f[:name]}\",\"metadataField\":\"#{f[:name]}\"}}"
249
+ end
250
+ stuff = mapping + add.join(",") + "]}}"
251
+
252
+ mandatory_fields = fields.reject {|f| f[:is_mandatory] == false }.map {|f| f[:name] + ";"}.join
253
+
254
+ Core::build_node2(builder, Nodes.sfdc_reader2({:name => "#{file} SF Reader", :id => "#{file}_reader", :soql => Helpers::generate_incremental_select(tap), :sfdcConnection => "SFDC", :fieldsMapping => stuff, :mandatoryFields => mandatory_fields}))
255
+
256
+
257
+ es_transformation_source = if has_timestamp
258
+ "function integer transform() {\n$out.0.* = $in.0.*;\n$out.0.Id = $in.0.#{id_field[:name]};\n$out.0.Timestamp = str2date($in.0.#{timestamp_field[:name]},\"joda:yyyy-MM-dd'T'HH:mm:ss.SSSZZ\");;\nreturn OK;\n}"
259
+ else
260
+ "function integer transform() {\n$out.0.* = $in.0.*;\n$out.0.Id = $in.0.#{id_field[:name]};\n$out.0.Timestamp = long2date(${GRAPH_STARTED_TIMESTAMP});\nreturn OK;\n}"
261
+ end
262
+
263
+ Core::build_node2(builder, Nodes.copy2({:name => "#{file} copy", :id => "#{file}_copy"}))
264
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_copy:0", :fromNode => "#{file}_reader:0", :metadata => "#{file}_source_metadata"}))
265
+ Core::build_node2(builder, Nodes.writer2({:enabled => "disabled", :name => "#{file} CSV Writer", :id => "#{file}_csv", :fileURL => "data/#{dataset.downcase}.csv", :outputFieldNames => "true"}))
266
+
267
+ Core::build_node2(builder, Nodes.reformat2({:name => "#{file} Reformat", :id => "#{file}_es_reformat", :transformation => es_transformation_source}))
268
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_es_reformat:0", :fromNode => "#{file}_copy:1", :metadata => "#{file}_source_metadata"}))
269
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_csv:0", :fromNode => "#{file}_copy:0", :metadata => "#{file}_source_metadata"}))
270
+
271
+ if s3_backup then
272
+ if has_timestamp
273
+
274
+ Core::build_node2(builder, Nodes.writer2({:enabled => "enabled", :name => "#{file} s3 Writer", :id => "#{file}_s3", :fileURL => "https://${S3_ACCESS_KEY_ID}:\`replace(\"${S3_SECRET_ACCESS_KEY}\",\"/\",\"%2F\")\`@${S3_BUCKETNAME}.s3.amazonaws.com/${GDC_PROJECT_ID}/#{file}/#{file}_\`date2long(today())\`", :outputFieldNames => true}))
275
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_s3:0", :fromNode => "#{file}_copy:2", :metadata => "#{file}_source_metadata"}))
276
+
277
+ else
278
+ Core::build_node2(builder, Nodes.reformat2({:name => "#{file} Reformat", :id => "#{file}_s3_reformat", :transformation => "function integer transform() {\n$out.0.* = $in.0.*;\n$out.0.Timestamp = toString(${GRAPH_STARTED_TIMESTAMP});\nreturn OK;\n}"}))
279
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_s3_reformat:0", :fromNode => "#{file}_copy:2", :metadata => "#{file}_source_metadata"}))
280
+
281
+ Core::build_node2(builder, Nodes.writer2({:enabled => "enabled", :name => "#{file} s3 Writer", :id => "#{file}_s3", :fileURL => "https://${S3_ACCESS_KEY_ID}:\`replace(\"${S3_SECRET_ACCESS_KEY}\",\"/\",\"%2F\")\`@${S3_BUCKETNAME}.s3.amazonaws.com/${GDC_PROJECT_ID}/#{file}/#{file}_\`date2long(today())\`", :outputFieldNames => true}))
282
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_s3:0", :fromNode => "#{file}_s3_reformat:0", :metadata => "#{file}_s3_metadata"}))
283
+
284
+ end
285
+ end
286
+
287
+
288
+ Core::build_node2(builder, Nodes.sort2({:sortKey => "Timestamp(a)",:name => "#{file} es Sort", :id => "#{file}_es_sort"}))
289
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_es_sort:0", :fromNode => "#{file}_es_reformat:0", :metadata => "#{file}_es_metadata"}))
290
+
291
+ Core::build_node2(builder, Nodes.es_writer2({:name => "#{file} es Writer", :id => "#{file}_es", :store => store, :entityFieldsMapping => Helpers::create_es_write_json(Tap.add_timestamp_field(tap)).to_json}))
292
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_es:0", :fromNode => "#{file}_es_sort:0", :metadata => "#{file}_es_metadata"}))
293
+
294
+ end
295
+
296
+ builder.Phase(:number => phase += 1) do
297
+ generate_func = <<HEREDOC
298
+ function integer generate() {
299
+ date time_end = jodaStr2date("${#{dataset}_END}",["yyyy-MM-dd'T'HH:mm:ss.SSSZZ"], 'en_US', 'UTC', 'UTC');
300
+ $out.0.key = "#{dataset}_LAST_RUN";
301
+ $out.0.value = jodaDate2str(time_end,"yyyy-MM-dd'T'HH:mm:ss.SSSZZ", 'en_US', 'UTC');
302
+ return OK;
303
+ }
304
+ HEREDOC
305
+
306
+ Core::build_node2(builder, Nodes.data_generator2({:guiName => dataset, :name => "generator_#{phase}", :id => "generator_#{phase}", :generate => generate_func}))
307
+ Core::build_node2(builder, Nodes.lookup_reader_writer2({:guiName => dataset, :lookupTable => "gdLookup0", :id => "gd_lookup_reader_#{phase}" }))
308
+ Core::build_node2(builder, Nodes.edge2({:toNode => "gd_lookup_reader_#{phase}:0", :fromNode => "generator_#{phase}:0", :metadata => "lookup_metadata"}))
309
+
310
+ end
311
+ end
312
+ end
313
+ end
314
+ end
315
+
316
+
317
+
318
+ def self.create_incremental_sf_history_init_graph(file, taps, options={})
319
+ metadata = options[:metadata]
320
+ store = options[:store] || "${GDC_EVENTSTORE}"
321
+ s3_backup = options[:s3_backup]
322
+
323
+
324
+ client = GoodData::Bam::Commands.get_sf_client(options)
325
+
326
+ File.open(file, "w") do |file|
327
+ builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
328
+ builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
329
+ builder.Graph({:name => "Goodsales incremental Downloader"}) do
330
+ builder.Global do
331
+ Helpers::property_file(builder, {:id => "params_params", :fileURL => "params.prm"})
332
+ Helpers::property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
333
+
334
+ Helpers::create_lookup_meta(builder)
335
+ builder.Metadata({:id => "data_metadata"}) do |builder|
336
+ Helpers::csv_metadata(builder, {:name => "data_metadata",:fields => [{:name => "Id"}, {:name => "Timestamp"}, {:name => "Value"}]})
337
+ end
338
+
339
+ builder.Metadata({:id => "es_metadata"}) do |builder|
340
+ Helpers::csv_metadata(builder, {:name => "data_metadata",:fields => [{:name => "Id"}, {:name => "Timestamp", :type => "date"}, {:name => "Value"}]})
341
+ end
342
+
343
+ Core::build_node2(builder, Nodes.lookup2({:name => "gdLookup0", :id => "gdLookup0", :type => Nodes::GD_LOOKUP, :metadata => "lookup_metadata"}))
344
+
345
+ Helpers::sf_connection(builder, {})
346
+ Helpers::property(builder, {:id => "SFDC_CLIENT_ID", :value => "gooddata/gooddata/"})
347
+ Helpers::property(builder, {:id => "SFDC_LOGIN_HOSTNAME", :value => options[:sf_server] || "login.salesforce.com"})
348
+ Helpers::property(builder, {:id => "SFDC_NAME", :value => "Salesforce connection"})
349
+ Helpers::property(builder, {:id => "SFDC_PASSWORD", :value => options[:sf_password]})
350
+ Helpers::property(builder, {:id => "SFDC_TOKEN", :value => options[:sf_token]})
351
+ Helpers::property(builder, {:id => "SFDC_USERNAME", :value => options[:sf_login]})
352
+ Helpers::property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
353
+ end
354
+
355
+ phase = 1
356
+
357
+ taps_with_history = []
358
+ taps.each do |tap|
359
+ module_name = tap[:object]
360
+ dataset = tap[:id]
361
+ source_file = tap[:source]
362
+
363
+ has_timestamp = Tap.has_output_field?(tap, "Timestamp")
364
+ timestamp_field = Tap.find_output_field(tap, "Timestamp")
365
+ id_field = Tap.find_output_field(tap, "Id")
366
+ tap = Tap.prepare_for_sf_downloader(tap)
367
+ builder.Phase(:number => phase += 1) do
368
+ # Core::build_node2(builder, Nodes.es_truncate2({:guiName => dataset, :store => store, :entity => dataset, :timestamp => "${#{dataset}_TRUNCATE_DATE}", :name => "#{module_name} es truncate", :id => "#{module_name}_es_truncate"}))
369
+ end
370
+
371
+ fields = tap[:fields]
372
+
373
+ puts tap[:object]
374
+ sf_object = tap[:object]
375
+ id_field = Tap.find_output_field(tap, "Id")
376
+ timestamp_field = Tap.find_output_field(tap, "Timestamp")
377
+
378
+
379
+ objects_to_get = Helpers.objects_for_history(client, tap)
380
+
381
+ (tap[:fields] - [id_field, timestamp_field]).each_with_index do |field, i|
382
+ builder.Phase(:number => phase += 1) do
383
+
384
+ f = field[:name]
385
+
386
+ selects = objects_to_get.reduce([]) do |memo, o|
387
+ fields = client.fields(o)
388
+ generic_field_history = ["NewValue", "OldValue", "ParentId"].all? {|f| fields.include?(f)}
389
+ specific_field_history = ["NewValue", "OldValue", "#{sf_object}Id"].all? {|f| fields.include?(f)}
390
+ specific_history = ["SystemModstamp", "#{sf_object}Id"].all? {|f| fields.include?(f)}
391
+
392
+ select, mapping, mandatory = if generic_field_history
393
+ [
394
+ "SELECT NewValue, CreatedDate, ParentId FROM #{o} WHERE Field = '#{f}'",
395
+ "{\"xmlFieldsMapping\":{\"xmlFields\":[
396
+ {\"xmlFieldMapping\":{\"name\":\"NewValue\",\"label\":\"NewValue\",\"xmlPath\":\"NewValue\",\"metadataField\":\"Value\"}},
397
+ {\"xmlFieldMapping\":{\"name\":\"CreatedDate\",\"label\":\"CreatedDate\",\"xmlPath\":\"CreatedDate\",\"metadataField\":\"Timestamp\"}},
398
+ {\"xmlFieldMapping\":{\"name\":\"ParentId\",\"label\":\"ParentId\",\"xmlPath\":\"ParentId\",\"metadataField\":\"Id\"}}
399
+ ]}}",
400
+ ["ParentId", "NewValue", "CreatedDate"]
401
+ ]
402
+ elsif specific_field_history
403
+ [
404
+
405
+ "SELECT NewValue, CreatedDate, #{sf_object}Id FROM #{o} WHERE Field = '#{f}'",
406
+ "{\"xmlFieldsMapping\":{\"xmlFields\":[
407
+ {\"xmlFieldMapping\":{\"name\":\"NewValue\",\"label\":\"NewValue\",\"xmlPath\":\"NewValue\",\"metadataField\":\"Value\"}},
408
+ {\"xmlFieldMapping\":{\"name\":\"CreatedDate\",\"label\":\"CreatedDate\",\"xmlPath\":\"CreatedDate\",\"metadataField\":\"Timestamp\"}},
409
+ {\"xmlFieldMapping\":{\"name\":\"#{sf_object}Id\",\"label\":\"#{sf_object}Id\",\"xmlPath\":\"#{sf_object}Id\",\"metadataField\":\"Id\"}}
410
+ ]}}",
411
+ ["#{sf_object}Id", "NewValue", "CreatedDate"]
412
+ ]
413
+ elsif specific_history
414
+ if fields.include?(f)
415
+ [
416
+ "SELECT #{f}, SystemModstamp, #{sf_object}Id FROM #{o}",
417
+ "{\"xmlFieldsMapping\":{\"xmlFields\":[
418
+ {\"xmlFieldMapping\":{\"name\":\"#{f}\",\"label\":\"#{f}\",\"xmlPath\":\"#{f}\",\"metadataField\":\"Value\"}},
419
+ {\"xmlFieldMapping\":{\"name\":\"SystemModstamp\",\"label\":\"SystemModstamp\",\"xmlPath\":\"SystemModstamp\",\"metadataField\":\"Timestamp\"}},
420
+ {\"xmlFieldMapping\":{\"name\":\"#{sf_object}Id\",\"label\":\"#{o}Id\",\"xmlPath\":\"#{o}Id\",\"metadataField\":\"Id\"}}
421
+ ]}}",
422
+ ["#{sf_object}Id", f, "SystemModstamp"]
423
+ ]
424
+ else
425
+ [nil, nil]
426
+ end
427
+ else
428
+ fail "Unrecognized fields configuration for historization in SF."
429
+ end
430
+ if select.nil?
431
+ memo
432
+ else
433
+ memo.concat([{
434
+ :object => o,
435
+ :query => select,
436
+ <<<<<<< HEAD
437
+ :mapping => mapping
438
+ =======
439
+ :mapping => mapping,
440
+ :mandatory => mandatory
441
+ >>>>>>> Added historical SF downloader
442
+ }])
443
+ end
444
+ end
445
+
446
+ unless selects.empty?
447
+ taps_with_history.concat([tap])
448
+ selects = selects.concat([{
449
+ :object => sf_object,
450
+ :query => "SELECT Id, #{f}, SystemModstamp FROM #{sf_object}",
451
+ :mapping => "{\"xmlFieldsMapping\":{\"xmlFields\":[
452
+ {\"xmlFieldMapping\":{\"name\":\"#{f}\",\"label\":\"#{f}\",\"xmlPath\":\"#{f}\",\"metadataField\":\"Value\"}},
453
+ {\"xmlFieldMapping\":{\"name\":\"SystemModstamp\",\"label\":\"SystemModstamp\",\"xmlPath\":\"SystemModstamp\",\"metadataField\":\"Timestamp\"}},
454
+ {\"xmlFieldMapping\":{\"name\":\"Id\",\"label\":\"Id\",\"xmlPath\":\"Id\",\"metadataField\":\"Id\"}}
455
+ ]}}",
456
+ :mandatory => ["Id", f, "SystemModstamp"]
457
+ }])
458
+ selects.each_with_index do |obj, i|
459
+
460
+ o = obj[:object]
461
+ mapping = obj[:mapping]
462
+ select = obj[:query]
463
+ mandatory = obj[:mandatory]
464
+
465
+ Core::build_node2(builder, Nodes.sfdc_reader2({:name => "#{o} SF Reader #{i}", :id => "#{f}_#{o}_reader", :soql => select, :sfdcConnection => "SFDC", :fieldsMapping => mapping, :mandatoryFields => mandatory.join(';')}))
466
+ puts "#{f}_#{o}_reader:0 -> #{f}_#{sf_object}_gather:#{i}"
467
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{f}_#{sf_object}_gather:#{i}", :fromNode => "#{f}_#{o}_reader:0", :metadata => "data_metadata"}))
468
+ end
469
+ Core::build_node2(builder, Nodes.gather2({:name => "#{f} Gather", :id => "#{f}_#{sf_object}_gather"}))
470
+ Core::build_node2(builder, Nodes.sort2({:sortKey => "Timestamp(a)",:name => "#{f}_#{sf_object}_sort", :id => "#{f}_#{sf_object}_sort"}))
471
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{f}_#{sf_object}_sort:0", :fromNode => "#{f}_#{sf_object}_gather:0", :metadata => "data_metadata"}))
472
+
473
+ Core::build_node2(builder, Nodes.copy2({:name => "#{file} copy", :id => "#{f}_#{sf_object}_copy"}))
474
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{f}_#{sf_object}_copy:0", :fromNode => "#{f}_#{sf_object}_sort:0", :metadata => "data_metadata"}))
475
+
476
+ if s3_backup
477
+ Core::build_node2(builder, Nodes.writer2({:enabled => "enabled", :name => "#{f}_#{sf_object} s3 Writer", :id => "#{f}_#{sf_object}_s3", :fileURL => "https://${S3_ACCESS_KEY_ID}:\`replace(\"${S3_SECRET_ACCESS_KEY}\",\"/\",\"%2F\")\`@${S3_BUCKETNAME}.s3.amazonaws.com/${GDC_PROJECT_ID}/#{sf_object}/#{f}_#{sf_object}_\`date2long(today())\`", :outputFieldNames => true}))
478
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{f}_#{sf_object}_s3:0", :fromNode => "#{f}_#{sf_object}_copy:1", :metadata => "data_metadata"}))
479
+ end
480
+
481
+ transform = "function integer transform() {\n$out.0.* = $in.0.*;\n$out.0.Timestamp = str2date($in.0.Timestamp,\"joda:yyyy-MM-dd'T'HH:mm:ss.SSSZZ\");\nreturn OK;\n}"
482
+
483
+ Core::build_node2(builder, Nodes.reformat2({:name => "Reformat", :id => "#{f}_#{sf_object}_reformat", :transformation => transform}))
484
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{f}_#{sf_object}_reformat:0", :fromNode => "#{f}_#{sf_object}_copy:0", :metadata => "data_metadata"}))
485
+
486
+ Core::build_node2(builder, Nodes.es_writer2({:name => "#{file} es Writer", :id => "#{f}_#{sf_object}_es", :store => store, :entityFieldsMapping => Helpers::create_es_write_json({:type => :tap, :id => tap[:id], :fields => [{:name => "Id"}, {:name => "Timestamp"}, {:name => f, :meta => "Value"}]}).to_json}))
487
+
488
+ # Core::build_node2(builder, Nodes.trash2({:name => "#{f}_#{sf_object}_es", :id => "#{f}_#{sf_object}_es", :debugPrint => true}))
489
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{f}_#{sf_object}_es:0", :fromNode => "#{f}_#{sf_object}_reformat:0", :metadata => "es_metadata"}))
490
+ end
491
+ end
492
+ end
493
+ end
494
+ taps_with_history.each do |tap|
495
+ puts "#{tap[:id]}_LAST_RUN"
496
+ builder.Phase(:number => phase += 1) do
497
+ generate_func = <<HEREDOC
498
+ function integer generate() {
499
+ date time_end = jodaStr2date("${#{tap[:id]}_END}",["yyyy-MM-dd'T'HH:mm:ss.SSSZZ"], 'en_US', 'UTC', 'UTC');
500
+ $out.0.key = "#{tap[:id]}_LAST_RUN";
501
+ $out.0.value = jodaDate2str(time_end,"yyyy-MM-dd'T'HH:mm:ss.SSSZZ", 'en_US', 'UTC');
502
+ return OK;
503
+ }
504
+ HEREDOC
505
+
506
+ Core::build_node2(builder, Nodes.data_generator2({:guiName => tap[:id], :name => "generator_#{phase}", :id => "generator_#{phase}", :generate => generate_func}))
507
+ Core::build_node2(builder, Nodes.lookup_reader_writer2({:guiName => tap[:id], :lookupTable => "gdLookup0", :id => "gd_lookup_reader_#{phase}" }))
508
+ Core::build_node2(builder, Nodes.edge2({:toNode => "gd_lookup_reader_#{phase}:0", :fromNode => "generator_#{phase}:0", :metadata => "lookup_metadata"}))
509
+
510
+ end
511
+ end
512
+ end
513
+ end
514
+ end
515
+
516
+ def self.generate_downloaders(home, project, params)
517
+ home = Pathname(home)
518
+
519
+ incremental_taps = Taps.get_incremental(project[:taps])
520
+
521
+ sf_taps = Taps.get_salesforce(incremental_taps)
522
+ file_taps = Taps.get_file(incremental_taps)
523
+
524
+ create_incremental_downloader_run_graph(home + "main.grf", incremental_taps)
525
+ # create_incremental_sf_downloading_graph(home + "incremental.grf", sf_taps, params)
526
+ create_incremental_sf_history_init_graph(home + "incremental.grf", sf_taps, params)
527
+
528
+ file_taps.each do |tap|
529
+ id = tap[:id]
530
+ Helpers::loop_over_file(home + "#{tap[:id]}_loop.grf", {
531
+ :token => id,
532
+ :file_to_loop => "data/#{id}_files_to_read.csv",
533
+ :graph_to_run => "graphs/#{id}_download.grf"
534
+ })
535
+ create_incremental_file_downloading_graph(home + "#{tap[:id]}_download.grf", [tap], params)
536
+
537
+ end
538
+ end
539
+
540
+
541
+ def self.generate_history_downloaders(home, project, params)
542
+ home = Pathname(home)
543
+
544
+ incremental_taps = Taps.get_incremental(project[:taps])
545
+
546
+ sf_taps = Taps.get_salesforce(incremental_taps)
547
+ file_taps = Taps.get_file(incremental_taps)
548
+
549
+ create_incremental_downloader_run_graph(home + "main.grf", incremental_taps)
550
+ create_incremental_sf_history_init_graph(home + "incremental.grf", sf_taps, params)
551
+ end
552
+
553
+ def self.create_incremental_file_downloading_graph(file, taps, options={})
554
+ metadata = options[:metadata]
555
+ store = options[:store] || "${GDC_EVENTSTORE}"
556
+ s3_backup = options[:s3_backup]
557
+ File.open(file, "w") do |file|
558
+ builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
559
+ builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
560
+ builder.Graph({:name => "Goodsales incremental Downloader"}) do
561
+ builder.Global do
562
+ Helpers::property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
563
+ Helpers::create_lookup_meta(builder)
564
+ taps.each do |tap|
565
+ module_name = tap[:object]
566
+ file = tap[:id] || module_name
567
+
568
+ Helpers::property_file(builder, {:id => "params_params", :fileURL => "#{file}_item.prm"})
569
+
570
+ builder.Metadata({:id => "#{file}_s3_metadata"}) do |builder|
571
+ Helpers::csv_metadata(builder, GoodData::Bam::Metadata.add_field(Metadata.get_target_metadata(Tap.prepare_for_s3_backup(tap)), {:name => "Timestamp"}))
572
+ end
573
+
574
+ tap = Tap.prepare_for_sf_downloader(tap)
575
+ builder.Metadata({:id => "#{file}_source_metadata"}) do |builder|
576
+ Helpers::csv_metadata(builder, Metadata.get_source_metadata(tap))
577
+ end
578
+
579
+ builder.Metadata({:id => "#{file}_es_metadata"}) do |builder|
580
+ Helpers::csv_metadata(builder, GoodData::Bam::Metadata.add_timestamp_as_date(Metadata.get_target_metadata(tap)))
581
+ end
582
+ Core::build_node2(builder, Nodes.lookup2({:name => "gdLookup0", :id => "gdLookup0", :type => Nodes::GD_LOOKUP, :metadata => "lookup_metadata"}))
583
+ end
584
+ end
585
+
586
+ phase = 1
587
+
588
+ taps.each do |tap|
589
+ module_name = tap[:object]
590
+ file = tap[:id]
591
+ dataset = file || module_name
592
+ source_file = tap[:source]
593
+
594
+
595
+ has_timestamp = GoodData::Bam::Tap.has_output_field?(tap, "Timestamp")
596
+ timestamp_field = GoodData::Bam::Tap.find_output_field(tap, "Timestamp")
597
+ id_field = GoodData::Bam::Tap.find_output_field(tap, "Id")
598
+
599
+ tap = Tap.prepare_for_sf_downloader(tap)
600
+
601
+ builder.Phase(:number => phase += 1) do
602
+ fields = tap[:fields]
603
+ mapping = "{\"xmlFieldsMapping\":{\"xmlFields\":["
604
+ add = fields.map do |f|
605
+ "{\"xmlFieldMapping\":{\"name\":\"#{f[:name]}\",\"label\":\"#{f[:label]}\",\"xmlPath\":\"#{f[:name]}\",\"metadataField\":\"#{f[:name]}\"}}"
606
+ end
607
+ stuff = mapping + add.join(",") + "]}}"
608
+
609
+ mandatory_fields = fields.reject {|f| f[:is_mandatory] == false }.map {|f| f[:name] + ";"}.join
610
+
611
+ Core::build_node2(builder, Nodes.reader2({:name => "#{file} File Reader", :id => "#{file}_reader", :fileURL => "${FILE}"}))
612
+ es_transformation_source = if has_timestamp
613
+ "function integer transform() {\n$out.0.* = $in.0.*;\n$out.0.Id = $in.0.#{id_field[:name]};\n$out.0.Timestamp = str2date($in.0.#{timestamp_field[:name]},\"joda:yyyy-MM-dd'T'HH:mm:ss.SSSZZ\");;\nreturn OK;\n}"
614
+ else
615
+ "function integer transform() {\n$out.0.* = $in.0.*;\n$out.0.Id = $in.0.#{id_field[:name]};\n$out.0.Timestamp = long2date(${GRAPH_STARTED_TIMESTAMP});\nreturn OK;\n}"
616
+ end
617
+
618
+ # build_node2(builder, Nodes.reformat2({:name => "#{file} Reformat", :id => "#{file}_reformat", :transformation => transformation_source}))
619
+
620
+ Core::build_node2(builder, Nodes.copy2({:name => "#{file} copy", :id => "#{file}_copy"}))
621
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_copy:0", :fromNode => "#{file}_reader:0", :metadata => "#{file}_source_metadata"}))
622
+ Core::build_node2(builder, Nodes.writer2({:enabled => "disabled", :name => "#{file} CSV Writer", :id => "#{file}_csv", :fileURL => "data/#{dataset.downcase}.csv", :outputFieldNames => "true"}))
623
+
624
+ Core::build_node2(builder, Nodes.reformat2({:name => "#{file} Reformat", :id => "#{file}_es_reformat", :transformation => es_transformation_source}))
625
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_es_reformat:0", :fromNode => "#{file}_copy:1", :metadata => "#{file}_source_metadata"}))
626
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_csv:0", :fromNode => "#{file}_copy:0", :metadata => "#{file}_source_metadata"}))
627
+
628
+ if s3_backup
629
+ if has_timestamp
630
+ Core::build_node2(builder, Nodes.writer2({:enabled => "enabled", :name => "#{file} s3 Writer", :id => "#{file}_s3", :fileURL => "https://${S3_ACCESS_KEY_ID}:\`replace(\"${S3_SECRET_ACCESS_KEY}\",\"/\",\"%2F\")\`@${S3_BUCKETNAME}.s3.amazonaws.com/${GDC_PROJECT_ID}/#{file}/#{file}_\`date2long(today())\`", :outputFieldNames => true}))
631
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_s3:0", :fromNode => "#{file}_copy:2", :metadata => "#{file}_source_metadata"}))
632
+ else
633
+ Core::build_node2(builder, Nodes.reformat2({:name => "#{file} Reformat", :id => "#{file}_s3_reformat", :transformation => "function integer transform() {\n$out.0.* = $in.0.*;\n$out.0.Timestamp = toString(${GRAPH_STARTED_TIMESTAMP});\nreturn OK;\n}"}))
634
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_s3_reformat:0", :fromNode => "#{file}_copy:2", :metadata => "#{file}_source_metadata"}))
635
+
636
+ Core::build_node2(builder, Nodes.writer2({:enabled => "enabled", :name => "#{file} s3 Writer", :id => "#{file}_s3", :fileURL => "https://${S3_ACCESS_KEY_ID}:\`replace(\"${S3_SECRET_ACCESS_KEY}\",\"/\",\"%2F\")\`@${S3_BUCKETNAME}.s3.amazonaws.com/${GDC_PROJECT_ID}/#{file}/#{file}_\`date2long(today())\`", :outputFieldNames => true}))
637
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_s3:0", :fromNode => "#{file}_s3_reformat:0", :metadata => "#{file}_s3_metadata"}))
638
+ end
639
+ # Core::build_node2(builder, Nodes.writer2({:enabled => "enabled", :name => "#{file} s3 Writer", :id => "#{file}_s3", :fileURL => "https://${S3_ACCESS_KEY_ID}:\`replace(\"${S3_SECRET_ACCESS_KEY}\",\"/\",\"%2F\")\`@${S3_BUCKETNAME}.s3.amazonaws.com/${GDC_PROJECT_ID}/#{file}/#{file}_\`date2long(today())\`", :outputFieldNames => true}))
640
+ # Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_s3:0", :fromNode => "#{file}_copy:2", :metadata => "#{file}_source_metadata"}))
641
+ end
642
+
643
+
644
+ Core::build_node2(builder, Nodes.sort2({:sortKey => "Timestamp(a)",:name => "#{file} es Sort", :id => "#{file}_es_sort"}))
645
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_es_sort:0", :fromNode => "#{file}_es_reformat:0", :metadata => "#{file}_es_metadata"}))
646
+ Core::build_node2(builder, Nodes.es_writer2({:name => "#{file} es Writer", :id => "#{file}_es", :store => store, :entityFieldsMapping => Helpers::create_es_write_json(Tap.add_timestamp_field(tap)).to_json}))
647
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{file}_es:0", :fromNode => "#{file}_es_sort:0", :metadata => "#{file}_es_metadata"}))
648
+ end
649
+
650
+ builder.Phase(:number => phase += 1) do
651
+ Core::build_node2(builder, Nodes.file_delete2({:guiName => "#{file}_file_delete", :name => "#{file}_file_delete", :id => "#{file}_file_delete", :baseURL => "${FILE}"}))
652
+ end
653
+
654
+ end
655
+
656
+ end
657
+ end
658
+ end
659
+
660
+ end
661
+ end
662
+ end
663
+ end