gooddata 0.6.0.pre6 → 0.6.0.pre7

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,18 +1,45 @@
1
1
  module GoodData::Helpers
2
- def home_directory
2
+ def self.home_directory
3
3
  running_on_windows? ? ENV['USERPROFILE'] : ENV['HOME']
4
4
  end
5
5
 
6
- def running_on_windows?
6
+ def self.running_on_windows?
7
7
  RUBY_PLATFORM =~ /mswin32|mingw32/
8
8
  end
9
9
 
10
- def running_on_a_mac?
10
+ def self.running_on_a_mac?
11
11
  RUBY_PLATFORM =~ /-darwin\d/
12
12
  end
13
13
 
14
- def error(msg)
14
+ def self.error(msg)
15
15
  STDERR.puts(msg)
16
16
  exit 1
17
17
  end
18
+
19
+ def self.find_goodfile(pwd, options={})
20
+ root = Pathname(options[:root] || '/' )
21
+ pwd = Pathname(pwd).expand_path
22
+ begin
23
+ gf = pwd + "Goodfile"
24
+ if gf.exist?
25
+ return gf
26
+ end
27
+ pwd = pwd.parent
28
+ end until root == pwd
29
+ fail "Goodfile not found in #{pwd.to_s} or any parent up to #{root.to_s}"
30
+ end
31
+
32
+ def self.hash_dfs(thing, &block)
33
+ if !thing.is_a?(Hash) && !thing.is_a?(Array)
34
+ elsif thing.is_a?(Array)
35
+ thing.each do |child|
36
+ hash_dfs(child, &block)
37
+ end
38
+ else
39
+ thing.each do |key, val|
40
+ yield(thing, key)
41
+ hash_dfs(val, &block)
42
+ end
43
+ end
44
+ end
18
45
  end
@@ -1,5 +1,5 @@
1
1
  require 'open-uri'
2
-
2
+ require 'active_support/all'
3
3
  ##
4
4
  # Module containing classes that counter-part GoodData server-side meta-data
5
5
  # elements, including the server-side data model.
@@ -49,6 +49,134 @@ module GoodData
49
49
  GoodData.post ldm_uri, { 'manage' => { 'maql' => schema.to_maql_create } }
50
50
  end
51
51
 
52
+ # Load given file into a data set described by the given schema
53
+ def upload_data(path, manifest, options={})
54
+ project = options[:project] || GoodData.project
55
+ # mode = options[:mode] || "FULL"
56
+ path = path.path if path.respond_to? :path
57
+ inline_data = path.is_a?(String) ? false : true
58
+
59
+ # create a temporary zip file
60
+ dir = Dir.mktmpdir
61
+ begin
62
+ Zip::File.open("#{dir}/upload.zip", Zip::File::CREATE) do |zip|
63
+ # TODO make sure schema columns match CSV column names
64
+ zip.get_output_stream('upload_info.json') { |f| f.puts JSON.pretty_generate(manifest) }
65
+ if inline_data
66
+ zip.get_output_stream('data.csv') do |f|
67
+ path.each do |row|
68
+ f.puts row.to_csv
69
+ end
70
+ end
71
+ else
72
+ zip.add('data.csv', path)
73
+ end
74
+ end
75
+
76
+ # upload it
77
+ GoodData.upload_to_user_webdav("#{dir}/upload.zip", :directory => File.basename(dir))
78
+ ensure
79
+ FileUtils.rm_rf dir
80
+ end
81
+
82
+ # kick the load
83
+ pull = { 'pullIntegration' => File.basename(dir) }
84
+ link = project.md.links('etl')['pull']
85
+ task = GoodData.post link, pull
86
+ while (GoodData.get(task["pullTask"]["uri"])["taskStatus"] === "RUNNING" || GoodData.get(task["pullTask"]["uri"])["taskStatus"] === "PREPARED") do
87
+ sleep 30
88
+ end
89
+ if (GoodData.get(task["pullTask"]["uri"])["taskStatus"] == "ERROR")
90
+ s = StringIO.new
91
+ GoodData.download_form_user_webdav(File.basename(dir) + '/upload_status.json', s)
92
+ js = JSON.parse(s.string)
93
+ fail "Load Failed with error #{JSON.pretty_generate(js)}"
94
+ end
95
+ puts "Done loading"
96
+ end
97
+
98
+ end
99
+
100
+ class ProjectBlueprint
101
+
102
+ attr_accessor :data
103
+
104
+ def change(&block)
105
+ builder = ProjectBuilder.create_from_data(self)
106
+ block.call(builder)
107
+ builder
108
+ @data = builder.to_hash
109
+ self
110
+ end
111
+
112
+ def datasets
113
+ data[:datasets].map {|d| SchemaBlueprint.new(d)}
114
+ end
115
+
116
+ def get_dataset(name)
117
+ ds = data[:datasets].find {|d| d[:name] == name}
118
+ SchemaBlueprint.new(ds) unless ds.nil?
119
+ end
120
+
121
+ def initialize(init_data)
122
+ @data = init_data
123
+ end
124
+
125
+ end
126
+
127
+ class SchemaBlueprint
128
+
129
+ attr_accessor :data
130
+
131
+ def change(&block)
132
+ builder = SchemaBuilder.create_from_data(self)
133
+ block.call(builder)
134
+ builder
135
+ @data = builder.to_hash
136
+ self
137
+ end
138
+
139
+ def initialize(init_data)
140
+ @data = init_data
141
+ end
142
+
143
+ def upload(source, options={})
144
+ project = options[:project] || GoodData.project
145
+ mode = options[:load] || "FULL"
146
+ project.upload(source, to_schema, mode)
147
+ end
148
+
149
+ def name
150
+ data[:name]
151
+ end
152
+
153
+ def title
154
+ data[:title]
155
+ end
156
+
157
+ def to_hash
158
+ data
159
+ end
160
+
161
+ def columns
162
+ data[:columns]
163
+ end
164
+
165
+ def to_schema
166
+ Schema.new(to_hash)
167
+ end
168
+
169
+ def to_manifest
170
+ to_schema.to_manifest
171
+ end
172
+
173
+ def pretty_print(printer)
174
+ printer.text "Schema <#{object_id}>:\n"
175
+ printer.text " Name: #{name}\n"
176
+ printer.text " Columns: \n"
177
+ printer.text columns.map {|c| " #{c[:name]}: #{c[:type]}"}.join("\n")
178
+ end
179
+
52
180
  end
53
181
 
54
182
  class ProjectBuilder
@@ -56,13 +184,19 @@ module GoodData
56
184
  attr_reader :title, :datasets, :reports, :metrics, :uploads, :users, :assert_report, :date_dimensions
57
185
 
58
186
  class << self
59
-
187
+
188
+ def create_from_data(blueprint)
189
+ pb = ProjectBuilder.new
190
+ pb.data = blueprint.to_hash
191
+ pb
192
+ end
193
+
60
194
  def create(title, options={}, &block)
61
195
  pb = ProjectBuilder.new(title)
62
196
  block.call(pb)
63
197
  pb
64
198
  end
65
-
199
+
66
200
  end
67
201
 
68
202
  def initialize(title)
@@ -129,8 +263,14 @@ module GoodData
129
263
  @users << users
130
264
  end
131
265
 
132
- def to_json
133
- JSON.pretty_generate(to_hash)
266
+ def to_json(options={})
267
+ eliminate_empty = options[:eliminate_empty] || false
268
+
269
+ if eliminate_empty
270
+ JSON.pretty_generate(to_hash.reject {|k, v| v.is_a?(Enumerable) && v.empty?})
271
+ else
272
+ JSON.pretty_generate(to_hash)
273
+ end
134
274
  end
135
275
 
136
276
  def to_hash
@@ -147,10 +287,14 @@ module GoodData
147
287
  }
148
288
  end
149
289
 
290
+ def get_dataset(name)
291
+ datasets.find {|d| d.name == name}
292
+ end
293
+
150
294
  end
151
295
 
152
296
  class DashboardBuilder
153
-
297
+
154
298
  def initialize(title)
155
299
  @title = title
156
300
  @tabs = []
@@ -172,7 +316,7 @@ module GoodData
172
316
  end
173
317
 
174
318
  class TabBuilder
175
-
319
+
176
320
  def initialize(title)
177
321
  @title = title
178
322
  @stuff = []
@@ -193,15 +337,35 @@ module GoodData
193
337
 
194
338
  class SchemaBuilder
195
339
 
196
- attr_accessor :title, :name
340
+ attr_accessor :data
341
+
342
+ class << self
343
+
344
+ def create_from_data(blueprint)
345
+ sc = SchemaBuilder.new
346
+ sc.data = blueprint.to_hash
347
+ sc
348
+ end
349
+
350
+ end
197
351
 
198
352
  def initialize(name=nil)
199
- @name = name
200
- @columns = []
353
+ @data = {
354
+ :name => name,
355
+ :columns => []
356
+ }
357
+ end
358
+
359
+ def name
360
+ data[:name]
361
+ end
362
+
363
+ def columns
364
+ data[:columns]
201
365
  end
202
366
 
203
367
  def add_column(column_def)
204
- @columns.push(column_def)
368
+ columns.push(column_def)
205
369
  self
206
370
  end
207
371
 
@@ -233,21 +397,18 @@ module GoodData
233
397
  add_column({ :type => :reference, :name => name}.merge(options))
234
398
  end
235
399
 
236
- def to_schema
237
- Schema.new(to_hash)
238
- end
239
-
240
400
  def to_json
241
401
  JSON.pretty_generate(to_hash)
242
402
  end
243
403
 
244
404
  def to_hash
245
- h = {
246
- :name => @name,
247
- :columns => @columns
248
- }
249
- h.has_key?(:title) ? h.merge({:title => h[:title]}) : h
405
+ data
250
406
  end
407
+
408
+ def to_schema
409
+ Schema.new(to_hash)
410
+ end
411
+
251
412
  end
252
413
 
253
414
  class ProjectCreator
@@ -257,20 +418,20 @@ module GoodData
257
418
 
258
419
  spec = options[:spec] || fail("You need to provide spec for migration")
259
420
  spec = spec.to_hash
260
- project = options[:project]
421
+
261
422
  token = options[:token] || fail("You need to specify token for project creation")
262
- new_project = GoodData::Project.create(:title => spec[:title], :auth_token => token)
423
+ project = options[:project] || GoodData::Project.create(:title => spec[:title], :auth_token => token)
263
424
 
264
425
  begin
265
- GoodData.with_project(new_project) do |p|
266
- migrate_date_dimensions(p, spec[:date_dimensions])
267
- migrate_datasets(p, spec[:datasets])
426
+ GoodData.with_project(project) do |p|
427
+ migrate_date_dimensions(p, spec[:date_dimensions] || [])
428
+ migrate_datasets(p, spec[:datasets] || [])
268
429
  load(p, spec)
269
- migrate_metrics(p, spec[:metrics])
270
- migrate_reports(p, spec[:reports])
271
- migrate_dashboards(p, spec[:dashboards])
272
- migrate_users(p, spec[:users])
273
- execute_tests(p, spec[:assert_tests])
430
+ migrate_metrics(p, spec[:metrics] || [])
431
+ migrate_reports(p, spec[:reports] || [])
432
+ migrate_dashboards(p, spec[:dashboards] || [])
433
+ migrate_users(p, spec[:users] || [])
434
+ execute_tests(p, spec[:assert_tests] || [])
274
435
  p
275
436
  end
276
437
  end
@@ -284,7 +445,9 @@ module GoodData
284
445
 
285
446
  def migrate_datasets(project, spec)
286
447
  spec.each do |ds|
287
- project.add_dataset(GoodData::Model::Schema.new(ds))
448
+ schema = GoodData::Model::Schema.new(ds)
449
+ project.add_dataset(schema)
450
+ GoodData::ProjectMetadata["manifest_#{schema.name}"] = schema.to_manifest.to_json
288
451
  end
289
452
  end
290
453
 
@@ -459,55 +622,20 @@ module GoodData
459
622
  Tempfile.open('remote_file') do |temp|
460
623
  temp << open(path).read
461
624
  temp.flush
462
- upload_data(temp, project, mode)
625
+ upload_data(temp, mode)
463
626
  end
464
627
  else
465
- upload_data(path, project, mode)
628
+ upload_data(path, mode)
466
629
  end
467
630
  end
468
631
 
469
- # Load given file into a data set described by the given schema
470
- def upload_data(path, project = nil, mode = "FULL")
471
- path = path.path if path.respond_to? :path
472
-
473
- inline_data = path.is_a?(String) ? false : true
474
-
475
- project = GoodData.project unless project
476
-
477
- # create a temporary zip file
478
- dir = Dir.mktmpdir
479
- Zip::File.open("#{dir}/upload.zip", Zip::File::CREATE) do |zip|
480
- # TODO make sure schema columns match CSV column names
481
- zip.get_output_stream('upload_info.json') { |f| f.puts JSON.pretty_generate(to_manifest(mode)) }
482
- if inline_data
483
- zip.get_output_stream('data.csv') do |f|
484
- path.each do |row|
485
- f.puts row.to_csv
486
- end
487
- end
488
- else
489
- zip.add('data.csv', path)
490
- end
491
- end
492
-
493
- # upload it
494
- GoodData.upload_to_user_webdav("#{dir}/upload.zip", :directory => File.basename(dir))
495
- FileUtils.rm_rf dir
496
-
497
- # kick the load
498
- pull = { 'pullIntegration' => File.basename(dir) }
499
- link = project.md.links('etl')['pull']
500
- task = GoodData.post link, pull
501
- while (GoodData.get(task["pullTask"]["uri"])["taskStatus"] === "RUNNING" || GoodData.get(task["pullTask"]["uri"])["taskStatus"] === "PREPARED") do
502
- sleep 30
503
- end
504
- fail "Load Failed" if (GoodData.get(task["pullTask"]["uri"])["taskStatus"] == "ERROR")
505
- puts "Done loading"
632
+ def upload_data(path, mode)
633
+ GoodData::Model.upload_data(path, to_manifest(mode))
506
634
  end
507
635
 
508
636
  # Generates the SLI manifest describing the data loading
509
637
  #
510
- def to_manifest(mode)
638
+ def to_manifest(mode="FULL")
511
639
  {
512
640
  'dataSetSLIManifest' => {
513
641
  'parts' => fields.reduce([]) { |memo, f| val = f.to_manifest_part(mode); memo << val unless val.nil?; memo },
@@ -1024,7 +1152,6 @@ module GoodData
1024
1152
  @title = spec[:title] || @name
1025
1153
  @urn = spec[:urn] || "URN:GOODDATA:DATE"
1026
1154
  end
1027
-
1028
1155
 
1029
1156
  def to_maql_create
1030
1157
  # urn = "urn:chefs_warehouse_fiscal:date"
@@ -1,36 +1,7 @@
1
- module GoodData
2
-
3
- class Row < CSV::Row
4
- def ==(other)
5
- len = length()
6
- return false if len != other.length
7
- result = true
8
-
9
- len.times do |i|
10
- result = false unless convert_field(field(i)) == convert_field(other.field(i))
11
- end
12
- result
13
- end
14
-
15
- private
16
- def convert_field(val)
17
- if val.is_a?(String) && val.match(/^[-+]?\d*\.?\d+(?:[eE][-+]?\d+)?$/)
18
- # Is it a Number?
19
- val = val.scan(/[-+]?\d*\.?\d+(?:[eE][-+]?\d+)?/).first
20
- val = val.include?('.') ? val.to_f.round : val.to_i
21
- return val
22
- elsif val.nil? || (val.respond_to?(:strip) && val.strip.empty?)
23
- #is ia a String
24
- return ''
25
- elsif val.respond_to? :round
26
- # No idea what that one does
27
- return val.round
28
- else
29
- return val
30
- end
31
- end
32
- end
1
+ class BigDecimal; def pretty_print(p) p.text to_s; end; end
33
2
 
3
+ module GoodData
4
+
34
5
  class DataResult
35
6
 
36
7
  attr_reader :data
@@ -43,14 +14,17 @@ module GoodData
43
14
  puts to_s
44
15
  end
45
16
 
46
- def to_s
17
+ def to_s(options={})
18
+ with_indices = options[:index] || false
47
19
  a = to_table.to_a
48
- a.transpose.unshift((1..a.length).to_a).each_with_index.map{|col, i|
49
- col.unshift(i.zero?? nil : i) # inserts row labels #
20
+ data = a.transpose
21
+ data.unshift((1..a.length).to_a) if with_indices
22
+ data.each_with_index.map{|col, i|
23
+ col.unshift(i.zero? ? nil : i) if with_indices # inserts row labels #
50
24
  w = col.map{|cell| cell.to_s.length}.max # w = "column width" #
51
25
  col.each_with_index.map{|cell, i|
52
- i.zero?? cell.to_s.center(w) : cell.to_s.ljust(w)} # alligns the column #
53
- }.transpose.map{|row| "[#{row.join(' | ')}]"}.join("\n")
26
+ i.zero? ? cell.to_s.center(w) : cell.to_s.ljust(w)} # alligns the column #
27
+ }.transpose.map{|row| "[#{row.join(' | ')}]"}.unshift("").join("\n")
54
28
  end
55
29
 
56
30
  def to_table
@@ -72,7 +46,8 @@ module GoodData
72
46
  end
73
47
 
74
48
  def assemble_table
75
- @table = CSV::Table.new([GoodData::Row.new([],[],false)])
49
+ @table = [[]]
50
+ # CSV::Table.new([GoodData::Row.new([],[],false)])
76
51
  end
77
52
 
78
53
  def to_table
@@ -92,69 +67,71 @@ module GoodData
92
67
  end
93
68
  end
94
69
 
95
- class SFDataResult < DataResult
96
-
97
- def initialize(data, options = {})
98
- super(data)
99
- @options = options
100
- assemble_table
101
- end
102
-
103
- def assemble_table
104
- sf_data = data[:queryResponse][:result][:records]
105
- sf_data = sf_data.is_a?(Hash) ? [sf_data] : sf_data
106
- if @options[:soql]
107
- # puts @options[:soql]
108
- fields = @options[:soql].strip.match(/SELECT (.*) FROM/i)[1]
109
- @headers = fields.strip.split(",").map do |item|
110
- item.strip.split(/\s/)
111
- end.map do |item|
112
- item.last.to_sym
113
- end
114
- # pp @headers
115
- elsif @options[:headers]
116
- @headers = @options[:headers]
117
- else
118
- @headers = sf_data.first.keys - [:type, :Id]
119
- end
120
- @table = CSV::Table.new(sf_data.collect do |line|
121
- GoodData::Row.new([], @headers.map {|h| line[h] || ' '}, false)
122
- end)
123
- rescue
124
- fail "Unable to assemble the table. Either the data provided are empty or the SOQL is malformed."
125
- end
126
-
127
- def to_table
128
- @table
129
- end
130
-
131
- def == (otherDataResult)
132
- result = true
133
- len = @table.length
134
- other_table = otherDataResult.to_table
135
- if len != other_table.length
136
- # puts "TABLES ARE OF DIFFERENT SIZES"
137
- return false
138
- end
139
-
140
- diff(otherDataResult).empty?() ? true : false
141
-
142
- end
143
-
144
- def diff(otherDataResult)
145
- other_table = otherDataResult.to_table
146
- differences = []
147
-
148
- @table.each do |row|
149
- differences << row unless other_table.detect {|r| r == row}
150
- end
151
- differences
152
- end
153
-
154
- end
70
+ # class SFDataResult < DataResult
71
+ #
72
+ # def initialize(data, options = {})
73
+ # super(data)
74
+ # @options = options
75
+ # assemble_table
76
+ # end
77
+ #
78
+ # def assemble_table
79
+ # sf_data = data[:queryResponse][:result][:records]
80
+ # sf_data = sf_data.is_a?(Hash) ? [sf_data] : sf_data
81
+ # if @options[:soql]
82
+ # # puts @options[:soql]
83
+ # fields = @options[:soql].strip.match(/SELECT (.*) FROM/i)[1]
84
+ # @headers = fields.strip.split(",").map do |item|
85
+ # item.strip.split(/\s/)
86
+ # end.map do |item|
87
+ # item.last.to_sym
88
+ # end
89
+ # # pp @headers
90
+ # elsif @options[:headers]
91
+ # @headers = @options[:headers]
92
+ # else
93
+ # @headers = sf_data.first.keys - [:type, :Id]
94
+ # end
95
+ # @table = CSV::Table.new(sf_data.collect do |line|
96
+ # GoodData::Row.new([], @headers.map {|h| line[h] || ' '}, false)
97
+ # end)
98
+ # rescue
99
+ # fail "Unable to assemble the table. Either the data provided are empty or the SOQL is malformed."
100
+ # end
101
+ #
102
+ # def to_table
103
+ # @table
104
+ # end
105
+ #
106
+ # def == (otherDataResult)
107
+ # result = true
108
+ # len = @table.length
109
+ # other_table = otherDataResult.to_table
110
+ # if len != other_table.length
111
+ # # puts "TABLES ARE OF DIFFERENT SIZES"
112
+ # return false
113
+ # end
114
+ #
115
+ # diff(otherDataResult).empty?() ? true : false
116
+ #
117
+ # end
118
+ #
119
+ # def diff(otherDataResult)
120
+ # other_table = otherDataResult.to_table
121
+ # differences = []
122
+ #
123
+ # @table.each do |row|
124
+ # differences << row unless other_table.detect {|r| r == row}
125
+ # end
126
+ # differences
127
+ # end
128
+ #
129
+ # end
155
130
 
156
131
  class ReportDataResult < DataResult
157
132
 
133
+ ROW_LIMIT = 10000000
134
+
158
135
  attr_reader :row_headers, :column_headers, :table, :headers_height, :headers_width
159
136
 
160
137
  def initialize(data)
@@ -170,16 +147,32 @@ module GoodData
170
147
  end
171
148
 
172
149
  def without_column_headers
173
- @table = table.transpose[headers_height, 1000000].transpose
150
+ @table = table.transpose[headers_height, ROW_LIMIT].transpose
174
151
  self
175
152
  end
176
153
 
154
+ def to_data_table
155
+ table.transpose[headers_height, ROW_LIMIT].transpose[headers_width, ROW_LIMIT]
156
+ end
157
+
177
158
  def each_line
178
159
  table.transpose.each {|line| yield line}
179
160
  end
180
161
 
162
+ def to_a
163
+ table.to_a
164
+ end
165
+
181
166
  def to_table
182
- CSV::Table.new(table.transpose.map {|line| GoodData::Row.new([], line.map {|item| item || ' '}, false)})
167
+ table.transpose
168
+ end
169
+
170
+ def [](index)
171
+ table[index]
172
+ end
173
+
174
+ def include_row?(row)
175
+ to_table.include?(row)
183
176
  end
184
177
 
185
178
  def == (otherDataResult)
@@ -275,7 +268,7 @@ module GoodData
275
268
  (row_headers.size).times do |j|
276
269
  table[headers_width + i] ||= []
277
270
  # puts "[#{headers_width + i}, #{headers_height + j}] [#{i}][#{j}]=#{xtab_data[j][i]}"
278
- table[headers_width + i][headers_height + j] = xtab_data[j][i]
271
+ table[headers_width + i][headers_height + j] = BigDecimal(xtab_data[j][i])
279
272
  end
280
273
  end
281
274
  end