gd_bam 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/lib/runtime.rb ADDED
@@ -0,0 +1,517 @@
1
+ require 'nodes/clover_gen'
2
+ require 'nodes/nodes'
3
+ require 'nodes/dependency'
4
+ require 'dsl/project_dsl'
5
+ require 'pathname'
6
+ require 'json'
7
+ require 'builder'
8
+ require 'erubis'
9
+ require 'salesforce'
10
+ require 'active_support/time'
11
+ require 'es'
12
+ require 'gooddata'
13
+ require 'logger'
14
+ require 'highline'
15
+
16
+ module GoodData
17
+
18
+ module CloverGenerator
19
+
20
+ GENERATOR_ROOT = Pathname(__FILE__).expand_path.dirname
21
+ BAM_ROOT = GENERATOR_ROOT.parent
22
+
23
+ BAM_DEFINED_GRAPHS_ROOT = GENERATOR_ROOT + "graphs"
24
+ REPO_ROOT = GENERATOR_ROOT + "repo"
25
+ TEMPLATE_ROOT = BAM_ROOT + "templates"
26
+
27
+ DEFINITION_ROOT = Pathname.new(Dir.pwd)
28
+ USER_DEFINED_GRAPHS_ROOT = DEFINITION_ROOT + "local_graphs"
29
+
30
+ CLOVER_PROJECT_ROOT = DEFINITION_ROOT + "clover-project"
31
+ CLOVER_DOWNLOADERS_ROOT = DEFINITION_ROOT + "downloader-project"
32
+
33
+ PROJECT_GRAPHS_ROOT = "graphs"
34
+ PROJECT_METADATA_ROOT = "metadata"
35
+ PROJECT_DATA_ROOT = "data"
36
+
37
+ PARAM_FILE = DEFINITION_ROOT + 'params.json'
38
+ FLOWS_ROOT = DEFINITION_ROOT + "flows"
39
+ PARAMS = File.exist?(PARAM_FILE) ? JSON.parse(File.read(PARAM_FILE), :symbolize_names => true) : {}
40
+
41
+ PARAMS = {:additional_params => nil}.merge(PARAMS)
42
+
43
+ include GoodData::CloverGenerator::Dependency
44
+
45
+
46
+ def self.render_template(template, params, options={})
47
+ to_file = options[:to_file]
48
+ root = options[:root] || TEMPLATE_ROOT
49
+ t = Pathname(template)
50
+ output = Erubis::Eruby.new(File.read(root + t)).result(params)
51
+ if to_file.nil?
52
+ output
53
+ else
54
+ File.open(to_file, 'w') {|f| f.write(output)}
55
+ end
56
+ end
57
+
58
+ def self.setup_clover_project(base)
59
+ [PROJECT_GRAPHS_ROOT, PROJECT_METADATA_ROOT, PROJECT_DATA_ROOT].each do |dir|
60
+ FileUtils::mkdir_p base + dir
61
+ end
62
+ FileUtils::touch(base + 'params.txt')
63
+ render_template("project.erb", PARAMS, :to_file => base + '.project')
64
+ render_template("workspace.prm.erb", PARAMS, :to_file => base + 'workspace.prm')
65
+ end
66
+
67
+ def self.setup_bash_structure(name)
68
+ fail "Directory \"#{name}\" already exists. Please remove it if you want to move forward." if File.exist?(name)
69
+ FileUtils::mkdir_p name
70
+ FileUtils::cd(name) do
71
+ render_template("params.json.erb", PARAMS, :to_file => 'params.json')
72
+ ['flows', 'sinks', 'taps'].each do |dir|
73
+ FileUtils::mkdir_p dir
74
+ end
75
+
76
+ setup_flow('user')
77
+ # render_template("flow.rb.erb", PARAMS, :to_file => 'flows/flow_example.rb')
78
+ render_template("source.json.erb", PARAMS, :to_file => 'taps/source_example.json')
79
+ render_template("dataset.json.erb", PARAMS, :to_file => 'sinks/dataset_example.json')
80
+ end
81
+
82
+ end
83
+
84
+ def self.setup_flow(name)
85
+ render_template("flow.rb.erb", {:flow_name => name}, :to_file => "flows/#{name}.rb")
86
+ end
87
+
88
+ def self.setup_tap(name)
89
+ render_template("tap.json.erb", {:tap_name => name}, :to_file => "taps/#{name}.json")
90
+ end
91
+
92
+ def self.clobber_clover_project()
93
+ FileUtils::rm_rf(CLOVER_PROJECT_ROOT)
94
+ end
95
+
96
+ def self.clobber_downloader_project()
97
+ FileUtils::rm_rf(CLOVER_DOWNLOADERS_ROOT)
98
+ end
99
+
100
+ def self.build_project()
101
+ p = GoodData::CloverGenerator::DSL.project do |p|
102
+ project_name("Goodsales for ABCD")
103
+
104
+ use_dims ["GoodSales/opportunity", "GoodSales/user"]
105
+ use_usecase("GoodSales/quota_attainment")
106
+ end
107
+
108
+ repos = GoodData::CloverGenerator::Repo.load(REPO_ROOT).map {|config| N.new(config)}
109
+ p.run(repos)
110
+ p
111
+ end
112
+
113
+ def self.validate_datasets
114
+ project = build_project
115
+ datasets = project.get_datasets
116
+ GoodData.connect(PARAMS[:gd_login], PARAMS[:gd_pass])
117
+ GoodData.project = PARAMS[:project_pid]
118
+ gd_datasets = GoodData.get("/gdc/md/#{PARAMS[:project_pid]}/query/datasets")['query']['entries'].map {|m| GoodData.get(m['link'])}
119
+ report1 = GoodData::CloverGenerator::validate_gd_datasets_metadata(datasets, gd_datasets)
120
+ report2 = GoodData::CloverGenerator::validate_gd_datasets(datasets, gd_datasets)
121
+ report1.merge(report2)
122
+ end
123
+
124
+ def self.procs_list(options={})
125
+ project = build_project
126
+ datasets = project.get_datasets
127
+ GoodData.connect(PARAMS[:gd_login], PARAMS[:gd_pass])
128
+ procs = GoodData.get("/gdc/projects/#{PARAMS[:project_pid]}/dataload/processes")
129
+ procs["processes"]["items"].map {|p| [p["process"]["name"], p["process"]["links"]["self"]]}
130
+ end
131
+
132
+ def self.validate_taps
133
+ project = build_project
134
+ sources = project.get_sources
135
+ client = get_sf_client(PARAMS)
136
+ sf_sources = sources.find_all {|tap| tap[:source] == "salesforce"}
137
+ report = GoodData::CloverGenerator::validate_sf_metadata(client, sf_sources)
138
+ pp report
139
+ end
140
+
141
+ def self.taps_generate_docs
142
+ project = build_project
143
+ sources = project.get_sources
144
+
145
+
146
+ taps = ""
147
+ sources.each do |source|
148
+ x = GoodData::CloverGenerator.transformation_acts_as(source)
149
+ taps += "#####{source[:object]}"
150
+ taps += "\n"
151
+ x.each do |field|
152
+ taps += "* #{field[0]} -> #{field[1]}"
153
+ taps += "\n"
154
+ end
155
+ end
156
+ render_template("README.md.erb", PARAMS.merge(:taps => taps), :to_file => 'README.md', :root => DEFINITION_ROOT)
157
+ end
158
+
159
+ def self.model_sync(options)
160
+ dry_run = options[:dry]
161
+ project = build_project
162
+ datasets = project.get_datasets
163
+ datasets.each do |ds|
164
+ dataset_path = "cl_file_#{ds[:id]}"
165
+ File.open(dataset_path, "w") do |temp|
166
+ builder = Builder::XmlMarkup.new(:target=>temp, :indent=>2)
167
+ builder.schema do |builder|
168
+ builder.name(ds[:gd_name])
169
+ builder.title(ds[:gd_name])
170
+ builder.columns do |b|
171
+ ds[:fields].each do |f|
172
+ builder.column do |builder|
173
+ builder.title(f[:name])
174
+ builder.ldmType(f[:type].upcase)
175
+ builder.reference(f[:for]) if f.has_key?(:for)
176
+ builder.reference(f[:ref]) if f.has_key?(:ref)
177
+ builder.schemaReference(f[:schema]) if f.has_key?(:schema)
178
+ if f[:type] == "date"
179
+ builder.schemaReference("#{f[:dd]}")
180
+ builder.name("#{f[:name]}")
181
+ else
182
+ builder.name(f[:name] || f[:ref])
183
+ end
184
+ end
185
+ end
186
+ end
187
+ end
188
+ end
189
+ template_name = dry_run ? "update_dataset_dry.script.erb" : "update_dataset.script.erb"
190
+ render_template(template_name, PARAMS.merge({"config_file" => dataset_path}), :to_file => 'update_dataset.script')
191
+ puts "Generate #{ds[:id]}"
192
+ system("~/Downloads/gooddata-cli-1.2.65/bin/gdi.sh update_dataset.script --username #{PARAMS[:gd_login]} --password #{PARAMS[:gd_pass]}")
193
+ File.delete(dataset_path)
194
+ end
195
+ end
196
+
197
+ def self.generate_downloaders(options={})
198
+ setup_clover_project(CLOVER_DOWNLOADERS_ROOT)
199
+ project = build_project
200
+ sources = project.get_sources
201
+ sf_sources = sources.find_all {|tap| tap[:source] == "salesforce" && tap[:incremental] == true}
202
+ create_incremental_downloader_run_graph(CLOVER_DOWNLOADERS_ROOT + PROJECT_GRAPHS_ROOT + "main.grf", sf_sources)
203
+ GoodData::CloverGenerator::create_incremental_downloading_graph(CLOVER_DOWNLOADERS_ROOT + PROJECT_GRAPHS_ROOT + "incremental.grf", sf_sources, {
204
+ :password => PARAMS[:sf_password],
205
+ :token => PARAMS[:sf_token],
206
+ :login => PARAMS[:sf_login],
207
+ :sf_server => PARAMS[:sf_server],
208
+ :s3_backup => false
209
+ })
210
+ end
211
+
212
+
213
+ def self.execute_process(link, dir)
214
+ result = GoodData.post(link, {
215
+ :graphExecution => {
216
+ :graph => "./#{dir}/graphs/main.grf",
217
+ :params => {}
218
+ }
219
+ })
220
+ GoodData.poll(result, "cloverExecutionTask")
221
+ end
222
+
223
+ def self.connect_to_gd
224
+ # GoodData.logger = Logger.new(STDOUT)
225
+ GoodData.connect(PARAMS[:gd_login], PARAMS[:gd_pass])
226
+ GoodData.project = PARAMS[:project_pid]
227
+ end
228
+
229
+ def self.create_email_channel(&block)
230
+ data = {
231
+ :channelConfiguration => {
232
+ :configuration => {
233
+ :emailConfiguration => {
234
+ :to => "svarovsky@gooddata.com"
235
+ }
236
+ },
237
+ :meta => {
238
+ :title => "temporary email channel"
239
+ }
240
+ }
241
+ }
242
+ profile_id = GoodData.connection.user["profile"].split("/").last
243
+ res = GoodData.post("/gdc/account/profile/#{profile_id}/channelConfigurations", data)
244
+ self_link = res["channelConfiguration"]["meta"]["uri"]
245
+ if block
246
+ begin
247
+ block.call(res)
248
+ ensure
249
+ GoodData.delete(self_link)
250
+ end
251
+ else
252
+ res
253
+ end
254
+ end
255
+
256
+ def self.deploy_graph(dir, options={})
257
+ deploy_name = options[:name]
258
+ verbose = options[:verbose] || false
259
+ puts HighLine::color("Deploying #{dir}", HighLine::BOLD) if verbose
260
+
261
+ Tempfile.open("deploy-graph-archive") do |temp|
262
+ Zip::ZipOutputStream.open(temp.path) do |zio|
263
+ Dir.glob("./#{dir}/**/*") do |item|
264
+ puts "including #{item}" if verbose
265
+ unless File.directory?(item)
266
+ zio.put_next_entry(item)
267
+ zio.print IO.read(item)
268
+ end
269
+ end
270
+ end
271
+
272
+
273
+ GoodData.connection.upload(temp.path)
274
+ process_id = options[:process]
275
+
276
+ data = {
277
+ :cloverTransformation => {
278
+ :name => deploy_name || "#{PARAMS[:project_name]}",
279
+ :path => "/uploads/#{File.basename(temp.path)}"
280
+ }
281
+ }
282
+ res = if process_id.nil?
283
+ GoodData.post("/gdc/projects/#{PARAMS[:project_pid]}/etl/clover/transformations", data)
284
+ else
285
+ GoodData.put("/gdc/projects/#{PARAMS[:project_pid]}/etl/clover/transformations/#{process_id}", data)
286
+ end
287
+ end
288
+ puts HighLine::color("Deploy DONE #{dir}", HighLine::BOLD) if verbose
289
+ end
290
+
291
+ def self.deploy(dir, options={}, &block)
292
+ verbose = options[:verbose] || false
293
+
294
+ if block
295
+ begin
296
+ res = deploy_graph(dir, options)
297
+ block.call(res)
298
+ ensure
299
+ self_link = res["cloverTransformation"]["links"]["self"]
300
+ GoodData.delete(self_link)
301
+ end
302
+ else
303
+ deploy_graph(dir, options)
304
+ end
305
+ end
306
+
307
+
308
+ def self.run(options)
309
+
310
+ only_flow = options[:only]
311
+ setup_clover_project(CLOVER_PROJECT_ROOT)
312
+ p = build_project
313
+ sources = p.get_sources
314
+ datasets = p.get_datasets
315
+
316
+ flows = []
317
+ FileUtils::cd FLOWS_ROOT do
318
+ flows_sources = Dir.glob("*.rb")
319
+ flows = flows_sources.map do |f|
320
+ instance_eval(File.read(f))
321
+ end
322
+ end
323
+
324
+ flows = flows.find_all {|flow| flow && flow.name == only_flow} unless only_flow.nil? || only_flow.empty?
325
+ super_flow = []
326
+ FileUtils::cd CLOVER_PROJECT_ROOT do
327
+
328
+ flows.each do |f|
329
+
330
+ current_metadata = {}
331
+ steps_to_be_wrapped = []
332
+ flow_sources = []
333
+ if f.nil?
334
+ puts "Flow skipped"
335
+ next
336
+ end
337
+ name = f.name
338
+ step_no = 0
339
+
340
+ begin
341
+ f.steps.each_with_index do |s, i|
342
+
343
+ if s[:type] == :tap
344
+ source_name = s[:source_name] || f.name
345
+ source = sources.find do |source|
346
+ source[:id] == source_name
347
+ end
348
+
349
+ fail "Source \"#{source_name}\" was not found" if source.nil?
350
+
351
+ flow_sources << source
352
+
353
+ dataset_name = source[:dataset] || source[:id]
354
+
355
+ current_metadata[source_name] = GoodData::CloverGenerator::create_metadata(source)
356
+ graph_name = "graphs/#{dataset_name}_#{source[:source]}_#{source[:type]}.grf"
357
+ steps_to_be_wrapped << {
358
+ :name => "#{source_name}_download",
359
+ :file => graph_name,
360
+ :flow => source_name
361
+ }
362
+
363
+ if source[:incremental] == true
364
+ current_metadata[source_name] = GoodData::CloverGenerator::DSL::Metadata.new(current_metadata[source_name]).change do |m|
365
+ m.remove("timestamp")
366
+ end.to_hash
367
+
368
+ GoodData::CloverGenerator::create_es_downloading_graph(graph_name, [source], {
369
+ :metadata => current_metadata[source_name],
370
+ :s3_backup => false
371
+ })
372
+ else
373
+ GoodData::CloverGenerator::create_sf_downloading_graph(graph_name, [source], {
374
+ :password => PARAMS[:sf_password],
375
+ :token => PARAMS[:sf_token],
376
+ :login => PARAMS[:sf_login],
377
+ :sf_server => PARAMS[:sf_server],
378
+ :metadata => current_metadata[source_name],
379
+ :s3_backup => false
380
+ })
381
+ end
382
+
383
+ step_no += 1
384
+ elsif s[:type] == :upload
385
+ source_name = s[:id] || f.name
386
+ dataset = datasets.find {|d| d[:id] == source_name}
387
+
388
+ fail "Dataset \"#{source_name}\" was not found" if dataset.nil?
389
+ fail "Sink needs to have id defined" if dataset[:id].nil?
390
+
391
+ unless current_metadata.has_key?(source_name)
392
+ fail("Source #{source_name} was not found in current metadata")
393
+ end
394
+ fail "Dataset \"#{f.name}\" was not found" if dataset.nil?
395
+ metadata = current_metadata[source_name]
396
+
397
+ graph_name = "graphs/#{dataset[:id]}_#{dataset[:type]}.grf"
398
+ steps_to_be_wrapped << {
399
+ :name => "#{name}_upload",
400
+ :file => graph_name,
401
+ :flow => source_name
402
+ }
403
+
404
+ GoodData::CloverGenerator::create_uploading_graph(graph_name, {
405
+ :datasets => dataset,
406
+ :metadata => current_metadata[source_name]
407
+ })
408
+ step_no += 1
409
+ elsif s[:type] == :user_provided
410
+ # check that what is getting in a step is check
411
+ s[:metadata_block] && s[:metadata_block].each do |val|
412
+ name = val[:name]
413
+ next if(name.nil?)
414
+ sources_names = flow_sources.map {|flow_source| flow_source[:id]}
415
+ included_in_flow = sources_names.include?(name)
416
+ unless included_in_flow
417
+ # binding.pry
418
+ # fail "Metadata \"#{name}\" is not in the defined by any source"
419
+ end
420
+ end
421
+ graph_name = s[:graph]
422
+ graph_filename = "#{s[:graph]}.grf"
423
+
424
+ if File.exist?(USER_DEFINED_GRAPHS_ROOT + graph_filename)
425
+ FileUtils::cp(USER_DEFINED_GRAPHS_ROOT + graph_filename, CLOVER_PROJECT_ROOT + PROJECT_GRAPHS_ROOT)
426
+ elsif File.exist?(BAM_DEFINED_GRAPHS_ROOT + graph_filename)
427
+ FileUtils::cp(BAM_DEFINED_GRAPHS_ROOT + graph_filename, CLOVER_PROJECT_ROOT + PROJECT_GRAPHS_ROOT)
428
+ else
429
+ fail("The graph \"#{graph_filename}\" was not found in any location (local, global)")
430
+ end
431
+ graph_filename = "graphs/#{graph_filename}"
432
+
433
+ step_no += 1
434
+ s[:metadata_block] && s[:metadata_block].each_with_index do |metadata, i|
435
+ j = i+1
436
+ bl = metadata[:block]
437
+ name = metadata[:name] || f.name
438
+ as = metadata[:out_as]
439
+ FileUtils::mkdir_p("metadata/#{f.name}/#{graph_name}")
440
+
441
+ input_meta = current_metadata[name].clone
442
+
443
+ input_meta[:name] = "in_#{j}"
444
+ GoodData::CloverGenerator::save_metadata("metadata/#{f.name}/#{graph_name}/#{j}_in.xml", input_meta)
445
+ m = GoodData::CloverGenerator::DSL::Metadata.new(current_metadata[name])
446
+ new_m = (bl && bl.call(m)) || current_metadata[name]
447
+
448
+ current_metadata[name] = new_m
449
+ unless as.nil?
450
+ as_metadata = new_m.clone
451
+ as_metadata[:name] = as
452
+ current_metadata[as] = as_metadata
453
+ end
454
+
455
+ output_meta = current_metadata[name].clone
456
+ output_meta[:name] = "out_#{j}"
457
+ GoodData::CloverGenerator::save_metadata("metadata/#{f.name}/#{graph_name}/#{j}_out.xml", output_meta)
458
+
459
+ GoodData::CloverGenerator::create_moving_graph("graphs/#{f.name}_#{graph_name}_move_in_#{j}.grf", {
460
+ :source => "${DATA}/#{name}.csv",
461
+ :target => "${DATA}/#{j}_in.csv",
462
+ :operation => "MOVE"
463
+ })
464
+
465
+ steps_to_be_wrapped << {
466
+ :name => "graphs/#{f.name}_#{graph_name}_move_in_#{j}.grf",
467
+ :file => "graphs/#{f.name}_#{graph_name}_move_in_#{j}.grf",
468
+ :flow => "kopirujeme"
469
+ }
470
+ end
471
+
472
+ steps_to_be_wrapped << {
473
+ :name => graph_name,
474
+ :file => graph_filename,
475
+ :flow => f.name
476
+ }
477
+ output_id = if s[:metadata_block] && s[:metadata_block].any? {|metadata| metadata[:out_as]}
478
+ s[:metadata_block].find {|metadata| metadata[:out_as]}[:out_as]
479
+ end
480
+
481
+ output_id = s[:metadata_block].first[:name] if output_id.nil? && s[:metadata_block].length == 1
482
+
483
+ GoodData::CloverGenerator::create_moving_graph("graphs/#{f.name}_#{graph_name}_move_out.grf", {
484
+ :source => "${DATA}/out.csv",
485
+ :target => "${DATA}/#{output_id}.csv",
486
+ :operation => "MOVE"
487
+ })
488
+
489
+ steps_to_be_wrapped << {
490
+ :name => "graphs/#{f.name}_#{graph_name}_move_out.grf",
491
+ :file => "graphs/#{f.name}_#{graph_name}_move_out.grf",
492
+ :flow => "kopirujeme"
493
+ }
494
+
495
+ end
496
+ end
497
+ rescue GoodData::CloverGenerator::DSL::RemoveMetadataFieldError => e
498
+ puts "Removing field \"#{e.field}\" from metadata \"#{e.metadata.name}\" in Flow \"#{f.name}\" there was a roblem with step X."
499
+ end
500
+
501
+ GoodData::CloverGenerator::create_run_graph("graphs/#{f.name}_main.grf", {
502
+ :subgraphs => steps_to_be_wrapped
503
+ })
504
+ super_flow << {
505
+ :name => name,
506
+ :file => "graphs/#{f.name}_main.grf",
507
+ :flow => f.name
508
+ }
509
+ end
510
+
511
+ GoodData::CloverGenerator::create_run_graph("graphs/main.grf", {
512
+ :subgraphs => super_flow
513
+ })
514
+ end
515
+ end
516
+ end
517
+ end
@@ -0,0 +1,13 @@
1
+ {
2
+ "type" : "dataset"
3
+ ,"id" : "user"
4
+ ,"gd_name" : "user"
5
+ ,"fields" : [
6
+ {
7
+ "name" : "Id"
8
+ },
9
+ {
10
+ "name" : "Name"
11
+ }
12
+ ]
13
+ }
@@ -0,0 +1,12 @@
1
+ GoodData::CloverGenerator::DSL::flow("<%= flow_name %>") do |f|
2
+ tap(:id => "user")
3
+
4
+ graph("process_owner")
5
+ metadata("user") do |m|
6
+ m.remove("FirstName")
7
+ m.remove("LastName")
8
+ m.add(:name => "Name", :type => "string")
9
+ end
10
+
11
+ sink(:id => "user")
12
+ end
@@ -0,0 +1,7 @@
1
+ {
2
+ "project_name" : "",
3
+ "project_pid" : "",
4
+ "sf_password" : "",
5
+ "sf_token" : "",
6
+ "sf_login" : ""
7
+ }
@@ -0,0 +1,18 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <projectDescription>
3
+ <name><%= project_name%></name>
4
+ <comment></comment>
5
+ <projects>
6
+ </projects>
7
+ <buildSpec>
8
+ <buildCommand>
9
+ <name>org.eclipse.jdt.core.javabuilder</name>
10
+ <arguments>
11
+ </arguments>
12
+ </buildCommand>
13
+ </buildSpec>
14
+ <natures>
15
+ <nature>com.cloveretl.gui.runtime.CloudConnect</nature>
16
+ <nature>org.eclipse.jdt.core.javanature</nature>
17
+ </natures>
18
+ </projectDescription>
@@ -0,0 +1,22 @@
1
+ {
2
+ "source" : "salesforce"
3
+ ,"object" : "User"
4
+ ,"id" : "user"
5
+ ,"fields" : [
6
+ {
7
+ "name" : "Id"
8
+ },
9
+ {
10
+ "name" : "FirstName"
11
+ },
12
+ {
13
+ "name" : "LastName"
14
+ },
15
+ {
16
+ "name" : "Region"
17
+ },
18
+ {
19
+ "name" : "Department"
20
+ }
21
+ ]
22
+ }
@@ -0,0 +1,16 @@
1
+ {
2
+ "type" : "tap"
3
+ ,"source" : "salesforce"
4
+ ,"object" : "<%= tap_name %>"
5
+ ,"id" : "<%= tap_name %>"
6
+ ,"incremental" : false
7
+ ,"fields" : [
8
+ {
9
+ "name" : "Id"
10
+ },
11
+ {
12
+ "name" : "Name", "acts_as": ["Name_alias"]
13
+ }
14
+ ]
15
+ // ,"limit": "10"
16
+ }
@@ -0,0 +1,4 @@
1
+ OpenProject(id="<%= project_pid %>");
2
+ UseCsv(csvDataFile="xy", configFile="<%= config_file %>", hasHeader="true", separator = ",");
3
+ GenerateUpdateMaql(maqlFile="<%= config_file %>.maql");
4
+ ExecuteMaql(maqlFile="<%= config_file %>.maql");
@@ -0,0 +1,3 @@
1
+ OpenProject(id="<%= project_pid %>");
2
+ UseCsv(csvDataFile="xy", configFile="<%= config_file %>", hasHeader="true", separator = ",");
3
+ GenerateUpdateMaql(maqlFile="<%= config_file %>.maql");
@@ -0,0 +1,25 @@
1
+ #(Please use slash '/' character as a path delimiter in all path specifications, e.g., C:/Users/username/workspace/project)
2
+
3
+ PROJECT= .
4
+ DATA=${PROJECT}/data
5
+ DATA_SOURCE=${PROJECT}/data/source
6
+ DATA_TRANSFORM=${PROJECT}/data/transform
7
+ DATA_GOODDATA=${PROJECT}/data/gooddata
8
+ DATA_TMP=${PROJECT}/data/tmp
9
+ GRAPH=${PROJECT}/graph
10
+ META=${PROJECT}/meta
11
+ TRANS=${PROJECT}/trans
12
+ MODEL=${PROJECT}/model
13
+
14
+
15
+ # GoodData working project
16
+ # GDC_PROJECT_ID identifies the GoodData working project. Transformed data will be uploaded into the datasets of the GoodData working project.
17
+ # The data model information necessary for transformation definition is also obtained from this project.
18
+ # You can find the ID of a project in the project context menu in Server Explorer.
19
+ GDC_PROJECT_ID=<%= project_pid %>
20
+
21
+ <% if additional_params %>
22
+ <% additional_params.each_pair do |key, val| %>
23
+ <%= "#{key}=#{val}" %>
24
+ <% end %>
25
+ <% end %>