gd_bam 0.0.15 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. data/README.md +313 -5
  2. data/bin/bam +126 -48
  3. data/lib/bam/version.rb +1 -1
  4. data/lib/bam.rb +51 -0
  5. data/lib/base/errors.rb +15 -0
  6. data/lib/base/flow.rb +37 -0
  7. data/lib/base/graph.rb +23 -0
  8. data/lib/base/metadata.rb +107 -0
  9. data/lib/base/project.rb +95 -0
  10. data/lib/base/repo.rb +35 -0
  11. data/lib/base/sink.rb +44 -0
  12. data/lib/base/step.rb +47 -0
  13. data/lib/base/tap.rb +167 -0
  14. data/lib/base/taps.rb +19 -0
  15. data/lib/cloud_connect/dsl/cc.rb +42 -0
  16. data/lib/cloud_connect/dsl/es_helpers.rb +49 -0
  17. data/lib/cloud_connect/dsl/helpers.rb +199 -0
  18. data/lib/{nodes → cloud_connect/dsl}/nodes.rb +106 -16
  19. data/lib/cloud_connect/dsl/sf_helpers.rb +39 -0
  20. data/lib/cloud_connect/dsl/structure_helpers.rb +94 -0
  21. data/lib/commands/commands.rb +110 -0
  22. data/lib/commands/deployment.rb +217 -0
  23. data/lib/commands/docs_commands.rb +41 -0
  24. data/lib/commands/gd_commands.rb +95 -0
  25. data/lib/commands/scaffold_commands.rb +103 -0
  26. data/lib/commands/sf_commands.rb +37 -0
  27. data/lib/commands/validators.rb +19 -0
  28. data/lib/compatibility.rb +19 -0
  29. data/lib/compiler/compiler.rb +76 -0
  30. data/lib/compiler/etl_visitor.rb +165 -0
  31. data/lib/dsl/dsl.rb +125 -0
  32. data/lib/generators/downloaders.rb +449 -0
  33. data/lib/generators/etl.rb +261 -0
  34. data/lib/generators/validators.rb +445 -0
  35. data/lib/graphs/docentize.grf +1 -1
  36. data/lib/graphs/dummy.grf +1 -1
  37. data/lib/graphs/goodsales_v2/docentize.grf +47 -0
  38. data/lib/graphs/goodsales_v2/dummy.grf +46 -0
  39. data/lib/graphs/goodsales_v2/load_history.grf +579 -0
  40. data/lib/graphs/goodsales_v2/process_account.grf +47 -0
  41. data/lib/graphs/goodsales_v2/process_activity.grf +222 -0
  42. data/lib/graphs/goodsales_v2/process_activity_dim.grf +88 -0
  43. data/lib/graphs/goodsales_v2/process_activity_owner.grf +48 -0
  44. data/lib/graphs/goodsales_v2/process_forecast.grf +20 -0
  45. data/lib/graphs/goodsales_v2/process_opp_records.grf +84 -0
  46. data/lib/graphs/goodsales_v2/process_opportunity.grf +46 -0
  47. data/lib/graphs/goodsales_v2/process_opportunity_line_item.grf +171 -0
  48. data/lib/graphs/goodsales_v2/process_opportunity_snapshot.grf +94 -0
  49. data/lib/graphs/goodsales_v2/process_owner.grf +48 -0
  50. data/lib/graphs/goodsales_v2/process_stage.grf +51 -0
  51. data/lib/graphs/goodsales_v2/process_stage_history.grf +184 -0
  52. data/lib/graphs/goodsales_v2/process_velocity_duration.grf +140 -0
  53. data/lib/graphs/process_account.grf +1 -1
  54. data/lib/graphs/process_activity.grf +1 -1
  55. data/lib/graphs/process_activity_dim.grf +1 -1
  56. data/lib/graphs/process_activity_owner.grf +1 -1
  57. data/lib/graphs/process_forecast.grf +1 -1
  58. data/lib/graphs/process_opp_records.grf +1 -1
  59. data/lib/graphs/process_opportunity.grf +1 -1
  60. data/lib/graphs/process_opportunity_line_item.grf +1 -1
  61. data/lib/graphs/process_opportunity_snapshot.grf +1 -1
  62. data/lib/graphs/process_owner.grf +1 -1
  63. data/lib/graphs/process_stage.grf +1 -1
  64. data/lib/graphs/process_stage_history.grf +1 -1
  65. data/lib/graphs/process_velocity_duration.grf +1 -1
  66. data/lib/nodes/clover_gen.rb +59 -946
  67. data/lib/nodes/dependency.rb +95 -96
  68. data/lib/runtime.rb +7 -648
  69. data/lib/utils/utils.rb +66 -0
  70. data/templates/flow.rb.erb +7 -6
  71. data/templates/join_template.grf.erb +1 -1
  72. data/templates/reformat_template.grf.erb +1 -1
  73. data/templates/sink.json.erb +28 -0
  74. data/templates/tap.json.erb +3 -5
  75. data/templates/workspace.prm.erb +4 -0
  76. metadata +50 -8
  77. data/lib/contract_checkers/contract_checkers.rb +0 -53
  78. data/lib/dsl/project_dsl.rb +0 -259
  79. data/lib/repo/1_config.json +0 -8
  80. data/templates/dataset.json.erb +0 -13
  81. data/templates/source.json.erb +0 -22
@@ -0,0 +1,42 @@
1
+ module GoodData
2
+ module CloudConnect
3
+ module Core
4
+
5
+ include GoodData::CloudConnect
6
+
7
+ def self.build_node2(builder, node)
8
+ if node[:type] == Nodes::EDGE
9
+ builder.tag!("Edge", node)
10
+ elsif node[:type] == Nodes::SF_CONNECTION
11
+ builder.tag!("Connection", node)
12
+ elsif node[:type] == Nodes::FILE_LIST
13
+ builder.tag!("Node", node.remove(:output_mapping)) do |xml|
14
+ xml.attr({:name => "outputMapping"}) do |attr|
15
+ transformation = node[:output_mapping]
16
+ attr.cdata! transformation
17
+ end
18
+ end
19
+ elsif node[:type] == Nodes::REFORMAT || node[:type] == Nodes::EXT_HASH_JOIN
20
+ builder.tag!("Node", node.remove(:transformation)) do |xml|
21
+ xml.attr({:name => "transform"}) do |attr|
22
+ transformation = node[:transformation]
23
+ attr.cdata! transformation
24
+ end
25
+ end
26
+ elsif node[:type] == Nodes::DATA_GENERATOR
27
+ builder.tag!("Node", node.remove(:transformation)) do |xml|
28
+ xml.attr({:name => "generate"}) do |attr|
29
+ transformation = node[:generate]
30
+ attr.cdata! transformation
31
+ end
32
+ end
33
+ elsif node[:type] == Nodes::PERSISTENT_LOOKUP || node[:type] == Nodes::GD_LOOKUP
34
+ builder.tag!("LookupTable", node)
35
+ else
36
+ builder.tag!("Node", node)
37
+ end
38
+ end
39
+
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,49 @@
1
+ module GoodData
2
+ module CloudConnect
3
+ module Helpers
4
+
5
+ def self.create_es_write_json(tap)
6
+ fail "Only tap should be used here. You provided \"#{tap}\"" unless tap[:type] == :tap
7
+ {
8
+ :entityName => tap[:id] || tap[:object],
9
+ :fieldsMapping => tap[:fields].reduce({}) do |memo, f|
10
+
11
+ if f.has_key?(:acts_as)
12
+
13
+ f[:acts_as].each do |a|
14
+ type = case a
15
+ when "Id"
16
+ "recordid"
17
+ when "Timestamp"
18
+ "timestamp"
19
+ else
20
+ f[:type] || "attribute"
21
+ end
22
+ memo[a] = {
23
+ :name => a,
24
+ :type => type
25
+ }
26
+ end
27
+ else
28
+ type = case f[:name]
29
+ when "Id"
30
+ "recordid"
31
+ when "Timestamp"
32
+ "timestamp"
33
+ else
34
+ f[:type] || "attribute"
35
+ end
36
+
37
+ memo[f[:name]] = {
38
+ :name => f[:name],
39
+ :type => type
40
+ }
41
+ end
42
+ memo
43
+ end
44
+
45
+ }
46
+ end
47
+ end
48
+ end
49
+ end
@@ -0,0 +1,199 @@
1
+ module GoodData
2
+ module CloudConnect
3
+ module Helpers
4
+
5
+ def self.property(builder, data)
6
+ builder.Property({
7
+ :id => data[:id],
8
+ :name => data[:name] || data[:id],
9
+ :value => data[:value]
10
+ })
11
+ end
12
+
13
+ def self.property_file(builder, data)
14
+ builder.Property({
15
+ :id => data[:id],
16
+ :fileURL => data[:fileURL]
17
+ })
18
+ end
19
+
20
+ def self.create_trash_meta(builder)
21
+ builder.Metadata({:id => "trash_metadata"}) do |builder|
22
+ csv_metadata(builder, {
23
+ :name => "trash_metadata",
24
+ :fields => [{:type => "string", :name => "all"}]
25
+ })
26
+ end
27
+ end
28
+
29
+ def self.save_metadata(filename, data)
30
+ dirname = File.dirname(filename)
31
+ FileUtils.mkdir_p(dirname) unless File.directory?(dirname)
32
+
33
+ File.open(filename, "w") do |file|
34
+ builder = Builder::XmlMarkup.new(:target => file, :indent=>2)
35
+ builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
36
+ csv_metadata(builder, data)
37
+ end
38
+ end
39
+
40
+ def self.create_moving_graph(file, options={})
41
+ source = options[:source]
42
+ target = options[:target]
43
+ operation = options[:operation]
44
+ force = options[:force] || false
45
+
46
+ File.open(file, "w") do |file|
47
+ builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
48
+ builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
49
+ builder.Graph({
50
+ :name => "File Copy"
51
+ }) do
52
+ builder.Global do
53
+ builder.Metadata({:id => "list_metadata"}) do |builder|
54
+ Helpers::csv_metadata(builder, {
55
+ :name => "list_metadata",
56
+ :fields => [{:name=>"filePath", :type=>"string"}]
57
+ })
58
+ end
59
+ Helpers::property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
60
+ end
61
+ builder.Phase(:number => 0) do
62
+
63
+ transformation_source = "function integer transform() {\n" + ([["filePath", "filePath"]].map {|t| "$out.0.#{t.last} = $in.0.#{t.first};"}.join("\n")) + "\nreturn OK;\n}"
64
+ Core::build_node2(builder, Nodes.file_list2(:baseURL => target, :id => "file_list", :output_mapping => transformation_source))
65
+
66
+ Core::build_node2(builder, Nodes.file_delete2(:baseURL => "${filePath}", :id => "file_delete"))
67
+ Core::build_node2(builder, Nodes.edge2({:toNode => "file_delete:0", :fromNode => "file_list:0", :metadata => "list_metadata"}))
68
+ end
69
+ builder.Phase(:number => 1) do
70
+ Core::build_node2(builder, Nodes.file_copy2({:sourcePath => source, :targetPath => target, :operation => operation, :id => "file_copy"}))
71
+ end
72
+ end
73
+ end
74
+ end
75
+
76
+ def self.csv_metadata(builder, description)
77
+ sf_description = description.merge({
78
+ :fieldDelimiter => ",",
79
+ :recordDelimiter => "\\n",
80
+ :type => "delimited",
81
+ })
82
+ metadata(builder, sf_description)
83
+ end
84
+
85
+ def self.metadata(builder, description)
86
+ builder.Record({
87
+ :fieldDelimiter => description[:fieldDelimiter],
88
+ :name => description[:name],
89
+ :recordDelimiter => description[:recordDelimiter],
90
+ :type => description[:type]
91
+ }) do |record|
92
+ description[:fields].each do |field|
93
+ builder.Field :name => field[:name], :type => field[:type] || "string", :nullable => "true"
94
+ end
95
+ end
96
+ end
97
+
98
+ def self.create_lookup_meta(builder)
99
+ builder.Metadata({:id => "lookup_metadata"}) do |builder|
100
+ csv_metadata(builder, {
101
+ :name => "lookup_metadata",
102
+ :fields => [{:type => "string", :name => "key"}, {:type => "string", :name => "value"}]
103
+ })
104
+ end
105
+ end
106
+
107
+ def self.create_file_list_meta(builder)
108
+ builder.Metadata({:id => "file_list"}) do |builder|
109
+ csv_metadata(builder, {
110
+ :name => "file_list",
111
+ :fields => [
112
+ {:name => "filePath", :type => "string"},
113
+ {:name => "fileName", :type => "string"}
114
+ ]
115
+ })
116
+ end
117
+ end
118
+
119
+ def self.create_read_error_meta(builder)
120
+ builder.Metadata({:id => "reader_error_metadata"}) do |builder|
121
+ csv_metadata(builder, {
122
+ :name => "reader_error_metadata",
123
+ :fields => [
124
+ {:name => "line_number", :type => "integer"},
125
+ {:name => "field_number", :type => "integer"},
126
+ {:name => "record", :type => "string"},
127
+ {:name => "message", :type => "string"},
128
+ {:name => "file", :type => "string"}
129
+ ]
130
+ })
131
+ end
132
+ end
133
+
134
+ def self.create_run_graph_failure_metadata(builder)
135
+ builder.Metadata({:id => "run_graph_failure_metadata"}) do |builder|
136
+ csv_metadata(builder, {
137
+ :name => "run_graph_failure_metadata",
138
+ :fields => [
139
+ {:type => "string", :name => "graph"},
140
+ {:type => "string", :name => "result"},
141
+ {:type => "string", :name => "description"},
142
+ {:type => "string", :name => "message"},
143
+ {:type => "decimal", :name => "duration"}
144
+ ]
145
+ })
146
+ end
147
+ end
148
+
149
+ def self.create_run_graph(file, options={})
150
+ subgraphs = options[:subgraphs]
151
+ flow = options[:flow]
152
+ File.open(file, "w") do |file|
153
+ builder = Builder::XmlMarkup.new(:target=>file, :indent=>2)
154
+ builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
155
+ builder.Graph({
156
+ :name => "Run graph"
157
+ }) do
158
+ builder.Global do
159
+ property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
160
+ property_file(builder, {:id => "params_params", :fileURL => "params.prm"})
161
+ create_trash_meta(builder)
162
+ create_lookup_meta(builder)
163
+
164
+ end
165
+ phase = 0
166
+ subgraphs.each do |subgraph|
167
+ name = GoodData::Bam::Step.step_name(GoodData::Bam::Graph.create(:path => subgraph))
168
+ # Pathname(subgraph).basename.to_s.chomp(Pathname(subgraph).extname)
169
+ builder.Phase(:number => phase+1) do
170
+ id1 = GoodData::CloudConnect::Nodes.get_id
171
+ id2 = GoodData::CloudConnect::Nodes.get_id
172
+ ctl = "function integer generate() {$out.0.all = \"FLOW=#{flow}\";return OK;}"
173
+ Core::build_node2(builder, Nodes.data_generator2({:name => id1, :id => id1, :generate => ctl}))
174
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{id2}:0", :fromNode => "#{id1}:0", :metadata => "trash_metadata"}))
175
+ Core::build_node2(builder, Nodes.writer2({:name => "PARAMS CSV Writer", :id => "#{id2}", :fileURL => "params.prm", :outputFieldNames => "false", :quotedStrings => "false"}))
176
+ end
177
+ builder.Phase(:number => phase+2) do
178
+
179
+ id1 = GoodData::CloudConnect::Nodes.get_id
180
+ id2 = GoodData::CloudConnect::Nodes.get_id
181
+ ctl = "function integer generate() {$out.0.all = \"NAME=#{name}\";return OK;}"
182
+ Core::build_node2(builder, Nodes.data_generator2({:name => id1, :id => id1, :generate => ctl}))
183
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{id2}:0", :fromNode => "#{id1}:0", :metadata => "trash_metadata"}))
184
+ Core::build_node2(builder, Nodes.writer2({:name => "PARAMS CSV Writer", :id => "#{id2}", :fileURL => "params.prm", :outputFieldNames => "false", :append => "true", :quotedStrings => "false"}))
185
+ end
186
+
187
+ builder.Phase(:number => phase+3) do
188
+ Core::build_node2(builder, Nodes.run_graph2({:guiName => name, :name => name, :id => flow, :graphName => subgraph}))
189
+ end
190
+ phase += 4
191
+ end
192
+ end
193
+ end
194
+ end
195
+
196
+
197
+ end
198
+ end
199
+ end
@@ -1,16 +1,21 @@
1
1
  module GoodData
2
- module CloverGenerator
2
+ module CloudConnect
3
3
  module Nodes
4
4
 
5
5
  SF_CONNECTION = "SFDC"
6
6
  EDGE = "EDGE"
7
7
  WRITER = "DATA_WRITER"
8
+ DEDUP = "DEDUP"
8
9
  READER = "DATA_READER"
9
10
  SF_READER = "SF_READER"
10
11
  REFORMAT = "REFORMAT"
12
+ FILE_BACKUP = "FILE_BACKUP"
11
13
  PERSISTENT_LOOKUP = "persistentLookup"
12
14
  GD_LOOKUP = "gdLookup"
13
15
  GD_ESTORE_WRITER = "GD_ESTORE_WRITER"
16
+ GD_FILE_BACKUP = "FILE_BACKUP"
17
+ GD_FILE_COPY = "FILE_COPY_MOVE"
18
+ FILE_DELETE = "FILE_DELETE"
14
19
  GD_ESTORE_READER = "GD_ESTORE_READER"
15
20
  ENABLED = "ENABLED"
16
21
  DEFAULT_HEIGHT = "77"
@@ -25,12 +30,17 @@ module GoodData
25
30
  TRASH = "TRASH"
26
31
  LOOKUP_TABLE_READER_WRITER = "LOOKUP_TABLE_READER_WRITER"
27
32
  FILE_COPY_MOVE = "FILE_COPY_MOVE"
28
- FILE_DELETE = "FILE_DELETE"
29
33
  FILE_LIST = "FILE_LIST"
30
34
  EXT_SORT = "EXT_SORT"
31
35
  EXT_HASH_JOIN = "EXT_HASH_JOIN"
32
36
  NORMALIZER = "NORMALIZER"
33
-
37
+
38
+ MAP_ALL = <<HEREDOC
39
+ function integer transform() {
40
+ $out.0.* = $in.0.*;
41
+ return ALL;
42
+ }
43
+ HEREDOC
34
44
 
35
45
  DEFAULT_NODE_PARAMS = {
36
46
  :enabled => ENABLED,
@@ -38,6 +48,11 @@ module GoodData
38
48
  :guiWidth => DEFAULT_WIDTH
39
49
  }
40
50
 
51
+ $__cloud_connect_id = 0
52
+ def self.get_id()
53
+ $__cloud_connect_id += 1
54
+ end
55
+
41
56
  def self.node(data, options={})
42
57
  defaults = options[:defaults] || {}
43
58
  required = options[:required] || []
@@ -45,7 +60,7 @@ module GoodData
45
60
  output_data = defaults.merge(data)
46
61
  required.each do |key|
47
62
  unless output_data.has_key?(key)
48
- fail "#{key} is required but not provided"
63
+ fail "#{key} is required but not provided. You defined \"#{data}\""
49
64
  end
50
65
  end
51
66
  output_data
@@ -55,7 +70,9 @@ module GoodData
55
70
  defaults = DEFAULT_NODE_PARAMS.merge(options[:defaults] || {})
56
71
  required = (options[:required] || []).concat([:type, :id])
57
72
  allowed = options[:allowed] || []
58
- output_data = defaults.merge(data)
73
+ output_data = defaults.merge({
74
+ :id => get_id()
75
+ }).merge(data)
59
76
 
60
77
  node(output_data, {
61
78
  :allowed => [:enabled, :guiName, :id, :type, :guiHeight, :guiWidth, :name].concat(allowed),
@@ -113,6 +130,67 @@ module GoodData
113
130
  })
114
131
  end
115
132
 
133
+
134
+
135
+ def self.file_delete2(data, options={})
136
+ local_defaults = {
137
+ :type => FILE_DELETE
138
+ }
139
+ defaults = local_defaults.merge(options[:defaults] || {})
140
+
141
+ base_node(defaults.merge(data), {
142
+ :allowed => [:baseURL],
143
+ :defaults => defaults,
144
+ :required => [:baseURL]
145
+ })
146
+ end
147
+
148
+ def self.backup2(data, options={})
149
+ local_defaults = {
150
+ :type => GD_FILE_BACKUP,
151
+ :mode => "ALL_TO_ONE",
152
+ :appendTimestamp => true,
153
+ :makeDirs => true
154
+ }
155
+ defaults = local_defaults.merge(options[:defaults] || {})
156
+
157
+ base_node(defaults.merge(data), {
158
+ :allowed => [:sourcePath, :makeDirs, :appendTimestamp, :mode, :baseURL],
159
+ :defaults => defaults,
160
+ :required => []
161
+ })
162
+ end
163
+
164
+ def self.file_backup2(data, options={})
165
+ local_defaults = {
166
+ :type => FILE_BACKUP,
167
+ :makeDirs => true
168
+ }
169
+ defaults = local_defaults.merge(options[:defaults] || {})
170
+
171
+ base_node(defaults.merge(data), {
172
+ :allowed => [:sourcePath, :baseURL, :mode, :makeDirs],
173
+ :defaults => defaults,
174
+ :required => []
175
+ })
176
+ end
177
+
178
+ def self.file_copy2(data, options={})
179
+ local_defaults = {
180
+ :type => GD_FILE_COPY,
181
+ :operation => "COPY",
182
+ :makeDirs => true
183
+ }
184
+ defaults = local_defaults.merge(options[:defaults] || {})
185
+
186
+ base_node(defaults.merge(data), {
187
+ :allowed => [:sourcePath, :targetPath, :makeDirs, :operation, :baseURL],
188
+ :defaults => defaults,
189
+ :required => []
190
+ })
191
+ end
192
+
193
+
116
194
  def self.sfdc_reader2(data, options={})
117
195
  local_defaults = {
118
196
  :type => SF_READER,
@@ -127,6 +205,20 @@ module GoodData
127
205
 
128
206
  end
129
207
 
208
+ def self.dedup2(data, options={})
209
+ local_defaults = {
210
+ :type => DEDUP,
211
+ }
212
+ defaults = local_defaults.merge(options[:defaults] || {})
213
+
214
+ base_node(defaults.merge(data), {
215
+ :allowed => [],
216
+ :defaults => defaults,
217
+ :required => [:dedupKey]
218
+ })
219
+
220
+ end
221
+
130
222
  def self.run_graph2(data, options={})
131
223
  local_defaults = {
132
224
  :type => RUN_GRAPH,
@@ -136,8 +228,7 @@ module GoodData
136
228
 
137
229
  base_node(defaults.merge(data), {
138
230
  :allowed => [:paramsToPass, :type, :name, :clientId, :loginHostname, :name, :username, :password, :passwordEncrypted, :token, :id],
139
- :defaults => defaults,
140
- :required => [:graphName]
231
+ :defaults => defaults
141
232
  })
142
233
  end
143
234
 
@@ -247,7 +338,7 @@ module GoodData
247
338
  base_node(defaults.merge(data), {
248
339
  :allowed => [],
249
340
  :defaults => defaults,
250
- :required => []
341
+ :required => [:output_mapping]
251
342
  })
252
343
  end
253
344
 
@@ -312,10 +403,11 @@ module GoodData
312
403
  })
313
404
  end
314
405
 
315
-
316
- def self.file_delete2(data, options={})
406
+ def self.file_copy2(data, options={})
317
407
  local_defaults = {
318
- :type => FILE_DELETE
408
+ :type => FILE_COPY_MOVE,
409
+ :operation => "COPY",
410
+ :baseURL => "${PROJECT}"
319
411
  }
320
412
  defaults = local_defaults.merge(options[:defaults] || {})
321
413
  base_node(defaults.merge(data), {
@@ -325,11 +417,9 @@ module GoodData
325
417
  })
326
418
  end
327
419
 
328
- def self.file_copy2(data, options={})
420
+ def self.gather2(data, options={})
329
421
  local_defaults = {
330
- :type => FILE_COPY_MOVE,
331
- :operation => "COPY",
332
- :baseURL => "${PROJECT}"
422
+ :type => SIMPLE_GATHER,
333
423
  }
334
424
  defaults = local_defaults.merge(options[:defaults] || {})
335
425
  base_node(defaults.merge(data), {
@@ -338,7 +428,7 @@ module GoodData
338
428
  :required => []
339
429
  })
340
430
  end
341
-
431
+
342
432
 
343
433
  def self.copy2(data, options={})
344
434
  local_defaults = {
@@ -0,0 +1,39 @@
1
+ module GoodData
2
+ module CloudConnect
3
+ module Helpers
4
+
5
+ def self.generate_select(tap)
6
+ fields = tap[:fields].map do |f|
7
+ f.has_key?(:multi_currency) ? "convertCurrency(#{f[:name]})" : f[:name]
8
+ end
9
+ condition = tap[:condition].nil?() ? "" : "WHERE #{tap[:condition]}"
10
+ limit = tap[:limit].nil?() ? "" : "LIMIT #{tap[:limit]}"
11
+ "SELECT #{fields.join(', ')} FROM #{tap[:object]} #{condition} #{limit}"
12
+ end
13
+
14
+ def self.generate_incremental_select(tap)
15
+ if tap[:condition].blank?
16
+ tap[:condition] = "SystemModstamp > ${#{tap[:id]}_START} AND SystemModstamp <= ${#{tap[:id]}_END}"
17
+ else
18
+ tap[:condition] += " AND SystemModstamp > ${#{tap[:id]}_START} AND SystemModstamp <= ${#{tap[:id]}_END}"
19
+ end
20
+ generate_select(tap)
21
+ end
22
+
23
+ def self.sf_connection(builder, data)
24
+ builder.Connection({
25
+ :clientId => "${SFDC_CLIENT_ID}",
26
+ :id => "SFDC",
27
+ :loginHostname => "${SFDC_LOGIN_HOSTNAME}",
28
+ :name => "${SFDC_NAME}",
29
+ :password => "${SFDC_PASSWORD}",
30
+ :passwordEncrypted => "true",
31
+ :token => "${SFDC_TOKEN}",
32
+ :type => "SFDC",
33
+ :username => "${SFDC_USERNAME}",
34
+ :passwordEncrypted => "false"
35
+ })
36
+ end
37
+ end
38
+ end
39
+ end
@@ -0,0 +1,94 @@
1
+ module GoodData
2
+ module CloudConnect
3
+ module Helpers
4
+
5
+ include GoodData::CloudConnect
6
+
7
+ def self.loop_over_file(file, options={})
8
+ file_to_loop = options[:file_to_loop]
9
+ token = options[:token]
10
+ graph_to_run = options[:graph_to_run]
11
+
12
+ fail "token not defined" if token.blank?
13
+ fail "file to loop if not defined" if file_to_loop.blank?
14
+ fail "graph_to_run not defined" if graph_to_run.blank?
15
+
16
+
17
+ File.open(file, "w") do |f|
18
+
19
+ builder = Builder::XmlMarkup.new(:target=>f, :indent=>2)
20
+ builder.instruct! :xml, :version=>"1.0", :encoding=>"UTF-8"
21
+ builder.Graph({
22
+ :name => "Contract Checker: File list reader"
23
+ }) do
24
+ builder.Global do
25
+ Helpers::property_file(builder, {:id => "workspace_params", :fileURL => "workspace.prm"})
26
+ Helpers::property_file(builder, {:id => "skiplines_params", :fileURL => "#{token}_counter.prm"})
27
+
28
+ Helpers::create_trash_meta(builder)
29
+ Helpers::create_run_graph_failure_metadata(builder)
30
+ Helpers::create_file_list_meta(builder)
31
+
32
+ end
33
+
34
+ reformat_func = <<HEREDOC
35
+ function integer transform() {
36
+ $out.0.all = "FILE=" + $in.0.filePath;
37
+ return ALL;
38
+ }
39
+ HEREDOC
40
+
41
+ builder.Phase(:number => 0) do
42
+ Core::build_node2(builder, Nodes.reader2({
43
+ :id => "#{token}_reader",
44
+ :guiName => "#{token}_reader",
45
+ :name => "#{token}_reader",
46
+ :dataPolicy => "Strict",
47
+ :skipRows => "${#{token}_SKIP_LINES}",
48
+ :numRecords => 1,
49
+ :fileURL => file_to_loop
50
+ }))
51
+ Core::build_node2(builder, Nodes.reformat2({:name => "#{token} Reformat", :id => "#{token}_reformat", :transformation => reformat_func }))
52
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{token}_reformat:0", :fromNode => "#{token}_reader:0", :metadata => "file_list" }))
53
+ Core::build_node2(builder, Nodes.writer2({:name => "PARAMS CSV Writer", :id => "#{token}_writer", :fileURL => "#{token}_item.prm" , :outputFieldNames => "false", :quotedStrings => "false"}))
54
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{token}_writer:0", :fromNode => "#{token}_reformat:0", :metadata => "trash_metadata" }))
55
+ end
56
+ builder.Phase(:number => 1) do
57
+
58
+ reformat_func = <<HEREDOC
59
+ function integer transform() {
60
+ $out.0.all = "#{graph_to_run}";
61
+ return ALL;
62
+ }
63
+ HEREDOC
64
+
65
+ fail_func = <<HEREDOC
66
+ function integer transform() {
67
+ raiseError("Graph #{graph_to_run} failed");
68
+ }
69
+ HEREDOC
70
+ Core::build_node2(builder, Nodes.reformat2({:name => "#{token} Reformat_graph", :id => "#{token}_reformat_graph", :transformation => reformat_func }))
71
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{token}_reformat_graph:0", :fromNode => "#{token}_reformat:1", :metadata => "trash_metadata" }))
72
+ Core::build_node2(builder, Nodes.run_graph2({:guiName => "#{token}_validate", :name => "#{token}_validate", :id => "#{token}_validate" }))
73
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{token}_validate:0", :fromNode => "#{token}_reformat_graph:0", :metadata => "trash_metadata" }))
74
+ Core::build_node2(builder, Nodes.reformat2({:name => "#{token} Reformat_fail", :id => "#{token}_reformat_fail", :transformation => fail_func }))
75
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{token}_reformat_fail:0", :fromNode => "#{token}_validate:1", :metadata => "run_graph_failure_metadata" }))
76
+ Core::build_node2(builder, Nodes.trash2({:name => "#{token}_trash", :id => "#{token}_trash", :debugPrint => true}))
77
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{token}_trash:0", :fromNode => "#{token}_reformat_fail:0", :metadata => "run_graph_failure_metadata"}))
78
+
79
+ end
80
+
81
+ builder.Phase(:number => 2) do
82
+
83
+
84
+ ctl = "function integer generate() {$out.0.all = \"#{token}_SKIP_LINES=\" + toString(${#{token}_SKIP_LINES} + 1);return OK;}"
85
+ Core::build_node2(builder, Nodes.data_generator2({:name => "#{token}_generator", :id => "#{token}_generator", :generate => ctl}))
86
+ Core::build_node2(builder, Nodes.writer2({:name => "PARAMS CSV Writer", :id => "#{token}_csv_writer", :fileURL => "#{token}_counter.prm", :outputFieldNames => "false", :quotedStrings => "false"}))
87
+ Core::build_node2(builder, Nodes.edge2({:toNode => "#{token}_csv_writer:0", :fromNode => "#{token}_generator:0", :metadata => "trash_metadata" }))
88
+ end
89
+ end
90
+ end
91
+ end
92
+ end
93
+ end
94
+ end