gooddata 0.2.0 → 0.4.0

Sign up to get free protection for your applications and to get access to all the features.
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.2.0
1
+ 0.4.0
@@ -1,3 +1,4 @@
1
1
  module GoodData; end
2
2
 
3
3
  require 'gooddata/client'
4
+ require 'gooddata/model'
@@ -72,8 +72,8 @@ module GoodData
72
72
  # * +user+ - A GoodData username
73
73
  # * +password+ - A GoodData password
74
74
  #
75
- def connect(user, password, url = nil)
76
- threaded[:connection] = Connection.new user, password, url
75
+ def connect(user, password, url = nil, options={})
76
+ threaded[:connection] = Connection.new user, password, url, options
77
77
  end
78
78
 
79
79
  # Returns the active GoodData connection earlier initialized via
@@ -1,4 +1,4 @@
1
- require 'json/pure'
1
+ require 'json'
2
2
  require 'net/ftptls'
3
3
 
4
4
  # silence the parenthesis warning in rest-client 1.6.1
@@ -42,11 +42,12 @@ module GoodData
42
42
  #
43
43
  # * +username+ - The GoodData account username
44
44
  # * +password+ - The GoodData account password
45
- def initialize(username, password, url = nil)
45
+ def initialize(username, password, url = nil, options = {})
46
46
  @status = :not_connected
47
47
  @username = username
48
48
  @password = password
49
49
  @url = url || DEFAULT_URL
50
+ @options = options
50
51
  end
51
52
 
52
53
  # Returns the user JSON object of the currently logged in GoodData user account.
@@ -165,11 +166,13 @@ module GoodData
165
166
  }
166
167
  }
167
168
 
168
- @server = RestClient::Resource.new @url, :headers => {
169
- :content_type => :json,
170
- :accept => [ :json, :zip ],
171
- :user_agent => GoodData.gem_version_string
172
- }
169
+ @server = RestClient::Resource.new @url,
170
+ :timeout => @options[:timeout],
171
+ :headers => {
172
+ :content_type => :json,
173
+ :accept => [ :json, :zip ],
174
+ :user_agent => GoodData.gem_version_string,
175
+ }
173
176
 
174
177
  GoodData.logger.debug "Logging in..."
175
178
  @user = post(LOGIN_PATH, credentials, :dont_reauth => true)['userLogin']
@@ -1,6 +1,6 @@
1
1
  require 'csv'
2
2
 
3
- module GoodData::Extract
3
+ module GoodData::Extract
4
4
  class CsvFile
5
5
  def initialize(file)
6
6
  @file = file
@@ -6,7 +6,9 @@ require 'fastercsv'
6
6
  # elements, including the server-side data model.
7
7
  #
8
8
  module GoodData
9
+
9
10
  module Model
11
+
10
12
  # GoodData REST API categories
11
13
  LDM_CTG = 'ldm'
12
14
  LDM_MANAGE_CTG = 'ldm-manage'
@@ -20,12 +22,19 @@ module GoodData
20
22
  LABEL_COLUMN_PREFIX = 'nm_'
21
23
  ATTRIBUTE_FOLDER_PREFIX = 'dim'
22
24
  ATTRIBUTE_PREFIX = 'attr'
25
+ LABEL_PREFIX = 'label'
23
26
  FACT_PREFIX = 'fact'
24
27
  DATE_FACT_PREFIX = 'dt'
28
+ DATE_ATTRIBUTE = "date"
29
+ DATE_ATTRIBUTE_DEFAULT_DISPLAY_FORM = 'mdyy'
25
30
  TIME_FACT_PREFIX = 'tm.dt'
26
31
  TIME_ATTRIBUTE_PREFIX = 'attr.time'
27
32
  FACT_FOLDER_PREFIX = 'ffld'
28
33
 
34
+ SKIP_FIELD = false
35
+
36
+ BEGINNING_OF_TIMES = Date.parse('1/1/1900')
37
+
29
38
  class << self
30
39
  def add_dataset(title, columns, project = nil)
31
40
  add_schema Schema.new('columns' => columns, 'title' => title), project
@@ -74,7 +83,7 @@ module GoodData
74
83
  # model abstractions.
75
84
  #
76
85
  class Schema < MdObject
77
- attr_reader :fields
86
+ attr_reader :fields, :attributes, :facts, :folders, :references, :labels
78
87
 
79
88
  def self.load(file)
80
89
  Schema.new JSON.load(open(file))
@@ -82,20 +91,57 @@ module GoodData
82
91
 
83
92
  def initialize(config, title = nil)
84
93
  @fields = []
94
+ @attributes = {}
95
+ @facts = {}
96
+ @folders = {
97
+ :facts => {},
98
+ :attributes => {}
99
+ }
100
+ @references = {}
101
+ @labels = []
102
+
85
103
  config['title'] = title unless config['title']
86
104
  raise 'Schema name not specified' unless config['title']
87
105
  self.title = config['title']
88
106
  self.config = config
89
107
  end
90
108
 
109
+ def transform_header(headers)
110
+ result = fields.reduce([]) do |memo, f|
111
+ val = f.to_csv_header(headers)
112
+ memo << val unless val === SKIP_FIELD
113
+ memo
114
+ end
115
+ result.flatten
116
+ end
117
+
118
+ def transform_row(headers, row)
119
+ result = fields.reduce([]) do |memo, f|
120
+ val = f.to_csv_data(headers, row)
121
+ memo << val unless val === SKIP_FIELD
122
+ memo
123
+ end
124
+ result.flatten
125
+ end
126
+
91
127
  def config=(config)
92
- labels = []
93
128
  config['columns'].each do |c|
94
- add_attribute c if c['type'] == 'ATTRIBUTE'
95
- add_fact c if c['type'] == 'FACT'
96
- add_date c if c['type'] == 'DATE'
97
- set_connection_point c if c['type'] == 'CONNECTION_POINT'
98
- labels.push c if c['type'] == 'LABEL'
129
+ case c['type']
130
+ when 'ATTRIBUTE'
131
+ add_attribute c
132
+ when 'FACT'
133
+ add_fact c
134
+ when 'DATE'
135
+ add_date c
136
+ when 'CONNECTION_POINT'
137
+ set_connection_point c
138
+ when 'LABEL'
139
+ add_label c
140
+ when 'REFERENCE'
141
+ add_reference c
142
+ else
143
+ fail "Unexpected type #{c['type']} in #{c.inspect}"
144
+ end
99
145
  end
100
146
  @connection_point = RecordsOf.new(nil, self) unless @connection_point
101
147
  end
@@ -107,10 +153,6 @@ module GoodData
107
153
 
108
154
  def type_prefix ; 'dataset' ; end
109
155
 
110
- def attributes; @attributes ||= {} ; end
111
- def facts; @facts ||= {} ; end
112
- def folders; @folders ||= {}; end
113
-
114
156
  ##
115
157
  # Underlying fact table name
116
158
  #
@@ -142,24 +184,43 @@ module GoodData
142
184
  maql += "ALTER DATASET {#{self.identifier}} ADD {#{obj.identifier}};\n\n"
143
185
  end
144
186
  end
187
+
188
+ labels.each do |label|
189
+ maql += "# Creating Labels\n"
190
+ maql += label.to_maql_create
191
+ end
192
+
193
+ references.values.each do |ref|
194
+ maql += "# Creating references\n"
195
+ maql += ref.to_maql_create
196
+ end
197
+
145
198
  folders_maql = "# Create folders\n"
146
- folders.keys.each { |folder| folders_maql += folder.to_maql_create }
199
+ (folders[:attributes].values + folders[:facts].values).each { |folder| folders_maql += folder.to_maql_create }
147
200
  folders_maql + "\n" + maql + "SYNCHRONIZE {#{identifier}};\n"
148
201
  end
149
202
 
150
203
  # Load given file into a data set described by the given schema
151
204
  #
152
- def upload(path, project = nil)
205
+ def upload(path, project = nil, mode = "FULL")
153
206
  path = path.path if path.respond_to? :path
207
+ header = nil
154
208
  project = GoodData.project unless project
155
209
 
156
210
  # create a temporary zip file
157
211
  dir = Dir.mktmpdir
158
212
  Zip::ZipFile.open("#{dir}/upload.zip", Zip::ZipFile::CREATE) do |zip|
159
213
  # TODO make sure schema columns match CSV column names
160
- zip.get_output_stream('upload_info.json') { |f| f.puts JSON.pretty_generate(to_manifest) }
214
+ zip.get_output_stream('upload_info.json') { |f| f.puts JSON.pretty_generate(to_manifest(mode)) }
161
215
  zip.get_output_stream('data.csv') do |f|
162
- FasterCSV.foreach(path) { |row| f.puts row.to_csv }
216
+ FasterCSV.foreach(path, :headers => true, :return_headers => true) do |row|
217
+ output = if row.header_row?
218
+ transform_header(row)
219
+ else
220
+ transform_row(header, row)
221
+ end
222
+ f.puts output.to_csv
223
+ end
163
224
  end
164
225
  end
165
226
 
@@ -170,18 +231,22 @@ module GoodData
170
231
  # kick the load
171
232
  pull = { 'pullIntegration' => File.basename(dir) }
172
233
  link = project.md.links('etl')['pull']
173
- GoodData.post link, pull
234
+ task = GoodData.post link, pull
235
+ while (GoodData.get(task["pullTask"]["uri"])["taskStatus"] === "RUNNING" || GoodData.get(task["pullTask"]["uri"])["taskStatus"] === "PREPARED") do
236
+ sleep 30
237
+ end
238
+ puts "Done loading"
174
239
  end
175
240
 
176
241
  # Generates the SLI manifest describing the data loading
177
- #
178
- def to_manifest
242
+ #
243
+ def to_manifest(mode)
179
244
  {
180
245
  'dataSetSLIManifest' => {
181
- 'parts' => fields.map { |f| f.to_manifest_part },
246
+ 'parts' => fields.reduce([]) { |memo, f| val = f.to_manifest_part(mode); memo << val unless val.nil?; memo },
182
247
  'dataSet' => self.identifier,
183
248
  'file' => 'data.csv', # should be configurable
184
- 'csvParams' => {
249
+ 'csvParams' => {
185
250
  'quoteChar' => '"',
186
251
  'escapeChar' => '"',
187
252
  'separatorChar' => ',',
@@ -195,24 +260,51 @@ module GoodData
195
260
 
196
261
  def add_attribute(column)
197
262
  attribute = Attribute.new column, self
198
- @fields << attribute
199
- add_to_hash(self.attributes, attribute)
200
- folders[AttributeFolder.new(attribute.folder)] = 1 if attribute.folder
263
+ fields << attribute
264
+ add_to_hash(attributes, attribute)
265
+ add_attribute_folder(attribute.folder)
266
+ # folders[AttributeFolder.new(attribute.folder)] = 1 if attribute.folder
267
+ end
268
+
269
+ def add_attribute_folder(name)
270
+ return if name.nil?
271
+ return if folders[:attributes].has_key?(name)
272
+ folders[:attributes][name] = AttributeFolder.new(name)
201
273
  end
202
274
 
203
275
  def add_fact(column)
204
276
  fact = Fact.new column, self
205
- @fields << fact
206
- add_to_hash(self.facts, fact)
207
- folders[FactFolder.new(fact.folder)] = 1 if fact.folder
277
+ fields << fact
278
+ add_to_hash(facts, fact)
279
+ add_fact_folder(fact.folder)
280
+ # folders[FactFolder.new(fact.folder)] = 1 if fact.folder
281
+ end
282
+
283
+ def add_fact_folder(name)
284
+ return if name.nil?
285
+ return if folders[:facts].has_key?(name)
286
+ folders[:facts][name] = FactFolder.new(name)
287
+ end
288
+
289
+ def add_label(column)
290
+ label = Label.new(column, nil, self)
291
+ labels << label
292
+ fields << label
293
+ end
294
+
295
+ def add_reference(column)
296
+ reference = Reference.new(column, self)
297
+ fields << reference
298
+ add_to_hash(references, reference)
208
299
  end
209
300
 
210
301
  def add_date(column)
211
302
  date = DateColumn.new column, self
303
+ @fields << date
212
304
  date.parts.values.each { |p| @fields << p }
213
305
  date.facts.each { |f| add_to_hash(self.facts, f) }
214
306
  date.attributes.each { |a| add_to_hash(self.attributes, a) }
215
- @fields << date
307
+ date.references.each {|r| add_to_hash(self.references, r)}
216
308
  end
217
309
 
218
310
  def set_connection_point(column)
@@ -256,6 +348,15 @@ module GoodData
256
348
  visual
257
349
  end
258
350
 
351
+ def to_csv_header(row)
352
+ name
353
+ end
354
+
355
+ def to_csv_data(headers, row)
356
+ row[name]
357
+ end
358
+
359
+
259
360
  # Overriden to prevent long strings caused by the @schema attribute
260
361
  #
261
362
  def inspect
@@ -286,16 +387,17 @@ module GoodData
286
387
  def key ; "#{Model::to_id(@name)}#{FK_SUFFIX}" ; end
287
388
 
288
389
  def to_maql_create
289
- "CREATE ATTRIBUTE {#{identifier}} VISUAL (#{visual})" \
290
- + " AS KEYS {#{table}.#{Model::FIELD_PK}} FULLSET;\n" \
291
- + @primary_label.to_maql_create
390
+ maql = "CREATE ATTRIBUTE {#{identifier}} VISUAL (#{visual})" \
391
+ + " AS KEYS {#{table}.#{Model::FIELD_PK}} FULLSET;\n"
392
+ maql += @primary_label.to_maql_create if @primary_label
393
+ maql
292
394
  end
293
395
 
294
- def to_manifest_part
396
+ def to_manifest_part(mode)
295
397
  {
296
398
  'referenceKey' => 1,
297
399
  'populates' => [ @primary_label.identifier ],
298
- 'mode' => 'FULL',
400
+ 'mode' => mode,
299
401
  'columnName' => name
300
402
  }
301
403
  end
@@ -309,21 +411,23 @@ module GoodData
309
411
  class Label < Column
310
412
  def type_prefix ; 'label' ; end
311
413
 
414
+ # def initialize(hash, schema)
312
415
  def initialize(hash, attribute, schema)
313
416
  super hash, schema
314
- @attribute = attribute
417
+ @attribute = attribute || schema.fields.find {|field| field.name === hash["reference"]}
315
418
  end
316
419
 
317
420
  def to_maql_create
421
+ "# LABEL FROM LABEL"
318
422
  "ALTER ATTRIBUTE {#{@attribute.identifier}} ADD LABELS {#{identifier}}" \
319
423
  + " VISUAL (TITLE #{title.inspect}) AS {#{column}};\n"
320
424
  end
321
425
 
322
- def to_manifest_part
426
+ def to_manifest_part(mode)
323
427
  {
324
- 'populates' => [ identifier ],
325
- 'mode' => 'FULL',
326
- 'columnName' => name
428
+ 'populates' => [ identifier ],
429
+ 'mode' => mode,
430
+ 'columnName' => name
327
431
  }
328
432
  end
329
433
 
@@ -389,11 +493,11 @@ module GoodData
389
493
  + " AS {#{column}};\n"
390
494
  end
391
495
 
392
- def to_manifest_part
496
+ def to_manifest_part(mode)
393
497
  {
394
498
  'populates' => [ identifier ],
395
- 'mode' => 'FULL',
396
- 'columnName' => column
499
+ 'mode' => mode,
500
+ 'columnName' => name
397
501
  }
398
502
  end
399
503
  end
@@ -401,42 +505,46 @@ module GoodData
401
505
  ##
402
506
  # Reference to another data set
403
507
  #
404
- class Reference
508
+ class Reference < Column
405
509
  def initialize(column, schema)
510
+ super column, schema
511
+ # pp column
512
+
406
513
  @name = column['name']
407
514
  @reference = column['reference']
408
- @schema_ref = column['schema_ref']
515
+ @schema_ref = column['schema_reference']
409
516
  @schema = schema
410
517
  end
411
518
 
412
519
  ##
413
- # Generates an identifier of the referencing attribute using the
520
+ # Generates an identifier of the referencing attribute using the
414
521
  # schema name derived from schemaReference and column name derived
415
522
  # from the reference key.
416
523
  #
417
524
  def identifier
418
- @identifier ||= "#{ATTRIBUTE_PREFIX}.#{Model::to_id @schema_ref.title}.#{Model::to_id @reference}"
525
+ @identifier ||= "#{ATTRIBUTE_PREFIX}.#{Model::to_id @schema_ref}.#{Model::to_id @reference}"
419
526
  end
420
527
 
421
528
  def key ; "#{Model::to_id @name}_id" ; end
422
529
 
423
530
  def label_column
424
- @column ||= "#{@schema.table}.#{LABEL_COLUMN_PREFIX Model::to_id(reference)}"
531
+ "#{LABEL_PREFIX}.#{Model::to_id @schema_ref}.#{Model::to_id @reference}"
425
532
  end
426
533
 
427
534
  def to_maql_create
428
- "ALTER ATTRIBUTE {#{self.identifier} ADD KEYS {#{@schema.table}.#{key}}"
535
+ "ALTER ATTRIBUTE {#{self.identifier}} ADD KEYS {#{@schema.table}.#{key}};\n"
429
536
  end
430
537
 
431
538
  def to_maql_drop
432
- "ALTER ATTRIBUTE {#{self.identifier} DROP KEYS {#{@schema.table}.#{key}}"
539
+ "ALTER ATTRIBUTE {#{self.identifier} DROP KEYS {#{@schema.table}.#{key}};\n"
433
540
  end
434
541
 
435
- def to_manifest_part
542
+ def to_manifest_part(mode)
436
543
  {
437
- 'populates' => [ identifier ],
438
- 'mode' => 'FULL',
439
- 'columnName' => label_column
544
+ 'populates' => [ label_column ],
545
+ 'mode' => mode,
546
+ 'columnName' => name,
547
+ 'referenceKey' => 1
440
548
  }
441
549
  end
442
550
  end
@@ -445,8 +553,35 @@ module GoodData
445
553
  # Fact representation of a date.
446
554
  #
447
555
  class DateFact < Fact
556
+
557
+ attr_accessor :format, :output_format
558
+
559
+ def initialize(column, schema)
560
+ super column, schema
561
+ @output_format = column["format"] || '("dd/MM/yyyy")'
562
+ @format = @output_format.gsub('yyyy', '%Y').gsub('MM', '%m').gsub('dd', '%d')
563
+ end
564
+
448
565
  def column_prefix ; DATE_COLUMN_PREFIX ; end
449
566
  def type_prefix ; DATE_FACT_PREFIX ; end
567
+
568
+ def to_csv_header(row)
569
+ "#{name}_fact"
570
+ end
571
+
572
+ def to_csv_data(headers, row)
573
+ val = row[name]
574
+ val.nil?() ? nil : (Date.strptime(val, format) - BEGINNING_OF_TIMES).to_i
575
+ end
576
+
577
+ def to_manifest_part(mode)
578
+ {
579
+ 'populates' => [ identifier ],
580
+ 'mode' => mode,
581
+ 'columnName' => "#{name}_fact"
582
+ }
583
+ end
584
+
450
585
  end
451
586
 
452
587
  ##
@@ -454,6 +589,38 @@ module GoodData
454
589
  #
455
590
  class DateReference < Reference
456
591
 
592
+ attr_accessor :format, :output_format, :urn
593
+
594
+ def initialize(column, schema)
595
+ super column, schema
596
+ @output_format = column["format"] || '("dd/MM/yyyy")'
597
+ @format = @output_format.gsub('yyyy', '%Y').gsub('MM', '%m').gsub('dd', '%d')
598
+ @urn = column["urn"] || "URN:GOODDATA:DATE"
599
+ end
600
+
601
+ def identifier
602
+ @identifier ||= "#{Model::to_id @schema_ref}.#{DATE_ATTRIBUTE}"
603
+ end
604
+
605
+ def to_manifest_part(mode)
606
+ {
607
+ 'populates' => [ "#{identifier}.#{DATE_ATTRIBUTE_DEFAULT_DISPLAY_FORM}" ],
608
+ 'mode' => mode,
609
+ 'constraints' => {"date" => output_format},
610
+ 'columnName' => name,
611
+ 'referenceKey' => 1
612
+ }
613
+ end
614
+
615
+ def to_maql_create
616
+ # urn:chefs_warehouse_fiscal:date
617
+ super_maql = super
618
+ maql = ""
619
+ # maql = "# Include date dimensions\n"
620
+ # maql += "INCLUDE TEMPLATE \"#{urn}\" MODIFY (IDENTIFIER \"#{name}\", TITLE \"#{title || name}\");\n"
621
+ maql += super_maql
622
+ end
623
+
457
624
  end
458
625
 
459
626
  ##
@@ -461,6 +628,15 @@ module GoodData
461
628
  #
462
629
  class DateAttribute < Attribute
463
630
  def key ; "#{DATE_COLUMN_PREFIX}#{super}" ; end
631
+
632
+ def to_manifest_part(mode)
633
+ {
634
+ 'populates' => ['label.stuff.mmddyy'],
635
+ "format" => "unknown",
636
+ "mode" => mode,
637
+ "referenceKey" => 1
638
+ }
639
+ end
464
640
  end
465
641
 
466
642
  ##
@@ -492,22 +668,24 @@ module GoodData
492
668
  # parts: date fact, a date reference or attribute and an optional time component
493
669
  # that contains a time fact and a time reference or attribute.
494
670
  #
495
- class DateColumn
496
- attr_reader :parts, :facts, :attributes
671
+ class DateColumn < Column
672
+ attr_reader :parts, :facts, :attributes, :references
497
673
 
498
674
  def initialize(column, schema)
499
- @parts = {} ; @facts = [] ; @attributes = []
675
+ super column, schema
676
+ @parts = {} ; @facts = [] ; @attributes = []; @references = []
500
677
 
501
678
  @facts << @parts[:date_fact] = DateFact.new(column, schema)
502
- if column['schemaReference'] then
679
+ if column['schema_reference'] then
503
680
  @parts[:date_ref] = DateReference.new column, schema
681
+ @references << @parts[:date_ref]
504
682
  else
505
683
  @attributes << @parts[:date_attr] = DateAttribute.new(column, schema)
506
684
  end
507
685
  if column['datetime'] then
508
686
  puts "*** datetime"
509
687
  @facts << @parts[:time_fact] = TimeFact.new(column, schema)
510
- if column['schemaReference'] then
688
+ if column['schema_reference'] then
511
689
  @parts[:time_ref] = TimeReference.new column, schema
512
690
  else
513
691
  @attributes << @parts[:time_attr] = TimeAttribute.new(column, schema)
@@ -522,6 +700,19 @@ module GoodData
522
700
  def to_maql_drop
523
701
  @parts.values.map { |v| v.to_maql_drop }.join "\n"
524
702
  end
703
+
704
+ def to_csv_header(row)
705
+ SKIP_FIELD
706
+ end
707
+
708
+ def to_csv_data(headers, row)
709
+ SKIP_FIELD
710
+ end
711
+
712
+ def to_manifest_part(mode)
713
+ nil
714
+ end
715
+
525
716
  end
526
717
 
527
718
  ##
@@ -554,5 +745,16 @@ module GoodData
554
745
  def type; "FACT"; end
555
746
  def type_prefix; "ffld"; end
556
747
  end
748
+
749
+ class DateDimension < MdObject
750
+
751
+ def to_maql_create
752
+ # urn:chefs_warehouse_fiscal:date
753
+ maql = ""
754
+ maql += "INCLUDE TEMPLATE \"#{urn}\" MODIFY (IDENTIFIER \"#{name}\", TITLE \"#{title || name}\");"
755
+ maql
756
+ end
757
+ end
758
+
557
759
  end
558
760
  end
@@ -0,0 +1,241 @@
1
+ require 'fastercsv'
2
+
3
+ module GoodData
4
+
5
+ class Row < FasterCSV::Row
6
+ def ==(other)
7
+ len = length()
8
+ return false if len != other.length
9
+ result = true
10
+
11
+ len.times do |i|
12
+ result = false unless convert_field(field(i)) == convert_field(other.field(i))
13
+ end
14
+ result
15
+ end
16
+
17
+ private
18
+ def convert_field(val)
19
+ if val.is_a?(String) && val.match(/^[-+]?\d*\.?\d+(?:[eE][-+]?\d+)?$/)
20
+ val = val.scan(/[-+]?\d*\.?\d+(?:[eE][-+]?\d+)?/).first
21
+ val = val.include?('.') ? val.to_f.round : val.to_i
22
+ return val
23
+ elsif val.nil? || val == ' '
24
+ return 'N/A'
25
+ elsif val.respond_to? :round
26
+ return val.round
27
+ else
28
+ return val
29
+ end
30
+ end
31
+ end
32
+
33
+ class DataResult
34
+
35
+ attr_reader :data
36
+
37
+ def initialize(data)
38
+ @data = data
39
+ end
40
+
41
+ def print
42
+ a = to_table.to_a
43
+ a.transpose.unshift((1..a.length).to_a).each_with_index.map{|col, i|
44
+ col.unshift(i.zero?? nil : i) # inserts row labels #
45
+ w = col.map{|cell| cell.to_s.length}.max # w = "column width" #
46
+ col.each_with_index.map{|cell, i|
47
+ i.zero?? cell.to_s.center(w) : cell.to_s.ljust(w)} # alligns the column #
48
+ }.transpose.each{|row| puts "[#{row.join(' | ')}]"}
49
+ end
50
+
51
+ def to_table
52
+ raise "Should be implemented in subclass"
53
+ end
54
+
55
+ end
56
+
57
+
58
+ class SFDataResult < DataResult
59
+
60
+ def initialize(data, options = {})
61
+ super(data)
62
+ @options = options
63
+ assemble_table
64
+ end
65
+
66
+ def assemble_table
67
+ sf_data = data[:queryResponse][:result][:records]
68
+ sf_data = sf_data.is_a?(Hash) ? [sf_data] : sf_data
69
+ if @options[:soql]
70
+ @headers = @options[:soql].strip.match(/^SELECT (.*) FROM/)[1].strip.split(",").map{|item| item.strip.split(/\s/)}.map{|item| item.last.to_sym}
71
+ elsif @options[:headers]
72
+ @headers = @options[:headers]
73
+ else
74
+ @headers = sf_data.first.keys - [:type, :Id]
75
+ end
76
+ @table = FasterCSV::Table.new(sf_data.collect do |line|
77
+ GoodData::Row.new([], @headers.map {|h| line[h] || ' '}, false)
78
+ end)
79
+ rescue
80
+ fail "Unable to assemble the table. Either the data provided are empty or the SOQL is malformed."
81
+ end
82
+
83
+ def to_table
84
+ @table
85
+ end
86
+
87
+ def == (otherDataResult)
88
+ result = true
89
+ len = @table.length
90
+ other_table = otherDataResult.to_table
91
+ if len != other_table.length
92
+ # puts "TABLES ARE OF DIFFERENT SIZES"
93
+ return false
94
+ end
95
+
96
+ diff(otherDataResult).empty?() ? true : false
97
+
98
+ end
99
+
100
+ def diff(otherDataResult)
101
+ other_table = otherDataResult.to_table
102
+ differences = []
103
+
104
+ @table.each do |row|
105
+ differences << row unless other_table.detect {|r| r == row}
106
+ end
107
+ differences
108
+ end
109
+
110
+ end
111
+
112
+ class ReportDataResult < DataResult
113
+
114
+ attr_reader :row_headers, :column_headers, :table, :headers_height, :headers_width
115
+
116
+ def initialize(data)
117
+ super
118
+ @row_headers = []
119
+ @column_headers = []
120
+ @table = []
121
+
122
+ @row_headers, @headers_width = tabularize_rows
123
+ @column_headers, @headers_height = tabularize_columns
124
+
125
+ assemble_table
126
+ end
127
+
128
+ def without_column_headers
129
+ @table = table.transpose[headers_height, 1000000].transpose
130
+ self
131
+ end
132
+
133
+ def each_line
134
+ table.transpose.each {|line| yield line}
135
+ end
136
+
137
+ def to_table
138
+ FasterCSV::Table.new(table.transpose.map {|line| GoodData::Row.new([], line.map {|item| item || ' '}, false)})
139
+ end
140
+
141
+ def == (otherDataResult)
142
+ result = true
143
+ csv_table = to_table
144
+ len = csv_table.length
145
+ return false if len != otherDataResult.to_table.length
146
+
147
+
148
+ result
149
+ end
150
+
151
+ def diff(otherDataResult)
152
+ csv_table = to_table
153
+ other_table = otherDataResult.to_table
154
+ differences = []
155
+
156
+ csv_table.each do |row|
157
+ differences << row unless other_table.detect {|r| r == row}
158
+ end
159
+ differences
160
+ end
161
+
162
+ private
163
+ def each_level(table, level, children, lookup)
164
+ max_level = level + 1
165
+ children.each do |kid|
166
+ first = kid["first"]
167
+ last = kid["last"]
168
+ repetition = last - first + 1
169
+ repetition.times do |i|
170
+ table[first + i] ||= []
171
+ if kid["type"] == 'total'
172
+ table[first + i][level] = kid["id"]
173
+ else
174
+ table[first + i][level] = lookup[level][kid["id"].to_s]
175
+ end
176
+ end
177
+ if (!kid["children"].empty?)
178
+ new_level = each_level(table, level+1, kid["children"], lookup)
179
+ max_level = [max_level, new_level].max
180
+ end
181
+ end
182
+ max_level
183
+ end
184
+
185
+ def tabularize_rows
186
+ rows = data["xtab_data"]["rows"]
187
+ kids = rows["tree"]["children"]
188
+
189
+ if kids.empty? || (kids.size == 1 && kids.first['type'] == 'metric')
190
+ headers, size = [[nil]], 0
191
+ else
192
+ headers = []
193
+ size = each_level(headers, 0, rows["tree"]["children"], rows["lookups"])
194
+ end
195
+ return headers, size
196
+ end
197
+
198
+ def tabularize_columns
199
+ columns = data["xtab_data"]["columns"]
200
+ kids = columns["tree"]["children"]
201
+
202
+ if kids.empty? || (kids.size == 1 && kids.first['type'] == 'metric')
203
+ headers, size = [[nil]], 0
204
+ else
205
+ headers = []
206
+ size = each_level(headers, 0, columns["tree"]["children"], columns["lookups"])
207
+ end
208
+ return headers, size
209
+ end
210
+
211
+ def assemble_table()
212
+ # puts "=== COLUMNS === #{column_headers.size}x#{headers_height}"
213
+ (column_headers.size).times do |i|
214
+ (headers_height).times do |j|
215
+ table[headers_width + i] ||= []
216
+ # puts "[#{headers_width + i}][#{j}] #{column_headers[i][j]}"
217
+ table[headers_width + i][j] = column_headers[i][j]
218
+ end
219
+ end
220
+
221
+ # puts "=== ROWS ==="
222
+ (row_headers.size).times do |i|
223
+ (headers_width).times do |j|
224
+ table[j] ||= []
225
+ # puts "[#{j}][#{headers_height + i}] #{row_headers[i][j]}"
226
+ table[j][headers_height + i] = row_headers[i][j]
227
+ end
228
+ end
229
+
230
+ xtab_data = data["xtab_data"]["data"]
231
+ # puts "=== DATA === #{column_headers.size}x#{row_headers.size}"
232
+ (column_headers.size).times do |i|
233
+ (row_headers.size).times do |j|
234
+ table[headers_width + i] ||= []
235
+ # puts "[#{headers_width + i}, #{headers_height + j}] [#{i}][#{j}]=#{xtab_data[j][i]}"
236
+ table[headers_width + i][headers_height + j] = xtab_data[j][i]
237
+ end
238
+ end
239
+ end
240
+ end
241
+ end
@@ -39,6 +39,14 @@ module GoodData
39
39
  GoodData.delete @json['links']['self']
40
40
  end
41
41
 
42
+ def obj_id
43
+ uri.split('/').last
44
+ end
45
+
46
+ def links
47
+ @json['links']
48
+ end
49
+
42
50
  def uri
43
51
  meta['uri']
44
52
  end
@@ -51,6 +59,10 @@ module GoodData
51
59
  meta['title']
52
60
  end
53
61
 
62
+ def summary
63
+ meta['summary']
64
+ end
65
+
54
66
  def meta
55
67
  @json['meta']
56
68
  end
@@ -87,6 +87,10 @@ module GoodData
87
87
  @json['links']['self'] if @json['links'] && @json['links']['self']
88
88
  end
89
89
 
90
+ def obj_id
91
+ uri.split('/').last
92
+ end
93
+
90
94
  def title
91
95
  @json['meta']['title'] if @json['meta']
92
96
  end
@@ -112,8 +116,8 @@ module GoodData
112
116
  Model.add_schema schema, self
113
117
  end
114
118
 
115
- def upload(file, schema)
116
- schema.upload file, self
119
+ def upload(file, schema, mode = "FULL")
120
+ schema.upload file, self, mode
117
121
  end
118
122
 
119
123
  def slis
@@ -0,0 +1,29 @@
1
+ module GoodData
2
+ class Report < GoodData::MdObject
3
+
4
+ class << self
5
+ def [](id)
6
+ if id == :all
7
+ GoodData.get(GoodData.project.md['query'] + '/reports/')['query']['entries']
8
+ else
9
+ super
10
+ end
11
+ end
12
+ end
13
+
14
+ def execute
15
+ # puts "Executing report #{uri}"
16
+ result = GoodData.post '/gdc/xtab2/executor3', {"report_req" => {"report" => uri}}
17
+ dataResultUri = result["reportResult2"]["content"]["dataResult"]
18
+
19
+ result = GoodData.get dataResultUri
20
+ while result["taskState"] && result["taskState"]["state"] == "WAIT" do
21
+ sleep 10
22
+ result = GoodData.get dataResultUri
23
+ end
24
+ data_result = ReportDataResult.new(GoodData.get dataResultUri)
25
+ data_result
26
+ end
27
+
28
+ end
29
+ end
@@ -1,3 +1,3 @@
1
1
  module GoodData
2
- VERSION = "0.1.0"
2
+ VERSION = "0.4.0"
3
3
  end
@@ -6,5 +6,8 @@ $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
6
6
  $LOAD_PATH.unshift(File.dirname(__FILE__))
7
7
  require 'gooddata'
8
8
 
9
+ # used in test that expect an existing accessible project
10
+ $DEMO_PROJECT = 'ca6a1r1lbfwpt2v05k36nbc0cjpu7lh9'
11
+
9
12
  class Test::Unit::TestCase
10
13
  end
@@ -32,7 +32,7 @@ class TestRestApiBasic < Test::Unit::TestCase
32
32
  }
33
33
 
34
34
  should "list datasets" do
35
- GoodData::Command.run "datasets", [ "--project", "FoodMartDemo" ]
35
+ GoodData::Command.run "datasets", [ "--project", $DEMO_PROJECT ]
36
36
  end
37
37
 
38
38
  should "apply a dataset model" do
@@ -60,8 +60,8 @@ class TestRestApiBasic < Test::Unit::TestCase
60
60
  GoodData::Command.run "api:test", []
61
61
  end
62
62
 
63
- should "get FoodMartDemo metadata" do
64
- GoodData::Command.run "api:get", [ '/gdc/md/FoodMartDemo' ]
63
+ should "get demo project's metadata" do
64
+ GoodData::Command.run "api:get", [ "/gdc/md/#{$DEMO_PROJECT}" ]
65
65
  end
66
66
  end
67
67
 
@@ -45,6 +45,23 @@ class TestModel < Test::Unit::TestCase
45
45
  ds = GoodData::DataSet['dataset.mrkev']
46
46
  assert_not_nil ds
47
47
 
48
+ project.delete
49
+ end
50
+
51
+ should "create a simple model with no CP in a sandbox project using Model.add_dataset" do
52
+ project = GoodData::Project.create :title => "gooddata-ruby test #{Time.new.to_i}"
53
+ GoodData.use project
54
+
55
+ # create a similar data set but without the connection point column
56
+ cols_no_cp = COLUMNS.select { |c| c['type'] != 'CONNECTION_POINT' }
57
+ objects = GoodData::Model.add_dataset 'No CP', cols_no_cp
58
+ uris = objects['uris']
59
+
60
+ # Repeat check of metadata objects expected to be created on the server side
61
+ GoodData.get uris[uris.length - 1]
62
+ ds = GoodData::DataSet['dataset.nocp']
63
+ assert_not_nil ds
64
+
48
65
  # clean-up
49
66
  project.delete
50
67
  end
@@ -13,29 +13,29 @@ class TestRestApiBasic < Test::Unit::TestCase
13
13
  GoodData::Command::connect
14
14
  end
15
15
 
16
- should "get the FoodMartDemo" do
17
- p_by_hash = GoodData::Project['FoodMartDemo']
18
- p_by_uri = GoodData::Project['/gdc/projects/FoodMartDemo']
19
- p_by_md_uri = GoodData::Project['/gdc/md/FoodMartDemo']
16
+ should "get the demo project" do
17
+ p_by_hash = GoodData::Project[$DEMO_PROJECT]
18
+ p_by_uri = GoodData::Project["/gdc/projects/#{$DEMO_PROJECT}"]
19
+ p_by_md_uri = GoodData::Project["/gdc/md/#{$DEMO_PROJECT}"]
20
20
  assert_not_nil p_by_hash
21
21
  assert_equal p_by_hash.uri, p_by_uri.uri
22
22
  assert_equal p_by_hash.title, p_by_uri.title
23
23
  assert_equal p_by_hash.title, p_by_md_uri.title
24
24
  end
25
25
 
26
- should "connect to the FoodMartDemo" do
27
- GoodData.use 'FoodMartDemo'
26
+ should "connect to the demo project" do
27
+ GoodData.use $DEMO_PROJECT
28
28
  GoodData.project.datasets # should not fail on unknown project or access denied
29
29
  # TODO: should be equal to Dataset.all once implemented
30
30
  end
31
31
 
32
32
  # Not supported yet
33
33
  # should "fetch dataset by numerical or string identifier" do
34
- # GoodData.use 'FoodMartDemo'
34
+ # GoodData.use $DEMO_PROJECT
35
35
  # ds_by_hash = Dataset['amJoIYHjgESv']
36
36
  # ds_by_id = Dataset[34]
37
37
  # assert_not_nil ds_by_hash
38
38
  # assert_equal ds_by_hash.uri, ds_by_id.uri
39
39
  # end
40
40
  end
41
- end
41
+ end
@@ -46,7 +46,9 @@ class TestModel < Test::Unit::TestCase
46
46
 
47
47
  should "upload CSV in a full mode" do
48
48
  @project.add_dataset SCHEMA
49
- @project.upload @file.path, SCHEMA
49
+ assert_equal 1, @project.datasets.size
50
+ assert_equal "test", @project.datasets.first.title
51
+ @project.upload @file.path, SCHEMA, "FULL"
50
52
  end
51
53
  end
52
54
  end
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: gooddata
3
3
  version: !ruby/object:Gem::Version
4
- hash: 23
5
- prerelease: false
4
+ hash: 15
5
+ prerelease:
6
6
  segments:
7
7
  - 0
8
- - 2
8
+ - 4
9
9
  - 0
10
- version: 0.2.0
10
+ version: 0.4.0
11
11
  platform: ruby
12
12
  authors:
13
13
  - Pavel Kolesnikov
@@ -16,12 +16,9 @@ autorequire:
16
16
  bindir: bin
17
17
  cert_chain: []
18
18
 
19
- date: 2011-05-10 00:00:00 -07:00
20
- default_executable:
19
+ date: 2011-09-20 00:00:00 Z
21
20
  dependencies:
22
21
  - !ruby/object:Gem::Dependency
23
- name: thoughtbot-shoulda
24
- prerelease: false
25
22
  requirement: &id001 !ruby/object:Gem::Requirement
26
23
  none: false
27
24
  requirements:
@@ -31,11 +28,11 @@ dependencies:
31
28
  segments:
32
29
  - 0
33
30
  version: "0"
34
- type: :development
35
31
  version_requirements: *id001
36
- - !ruby/object:Gem::Dependency
37
- name: parseconfig
32
+ name: thoughtbot-shoulda
38
33
  prerelease: false
34
+ type: :development
35
+ - !ruby/object:Gem::Dependency
39
36
  requirement: &id002 !ruby/object:Gem::Requirement
40
37
  none: false
41
38
  requirements:
@@ -45,11 +42,11 @@ dependencies:
45
42
  segments:
46
43
  - 0
47
44
  version: "0"
48
- type: :runtime
49
45
  version_requirements: *id002
50
- - !ruby/object:Gem::Dependency
51
- name: json_pure
46
+ name: parseconfig
52
47
  prerelease: false
48
+ type: :runtime
49
+ - !ruby/object:Gem::Dependency
53
50
  requirement: &id003 !ruby/object:Gem::Requirement
54
51
  none: false
55
52
  requirements:
@@ -59,11 +56,11 @@ dependencies:
59
56
  segments:
60
57
  - 0
61
58
  version: "0"
62
- type: :runtime
63
59
  version_requirements: *id003
64
- - !ruby/object:Gem::Dependency
65
- name: rest-client
60
+ name: json_pure
66
61
  prerelease: false
62
+ type: :runtime
63
+ - !ruby/object:Gem::Dependency
67
64
  requirement: &id004 !ruby/object:Gem::Requirement
68
65
  none: false
69
66
  requirements:
@@ -73,11 +70,11 @@ dependencies:
73
70
  segments:
74
71
  - 0
75
72
  version: "0"
76
- type: :runtime
77
73
  version_requirements: *id004
78
- - !ruby/object:Gem::Dependency
79
- name: fastercsv
74
+ name: rest-client
80
75
  prerelease: false
76
+ type: :runtime
77
+ - !ruby/object:Gem::Dependency
81
78
  requirement: &id005 !ruby/object:Gem::Requirement
82
79
  none: false
83
80
  requirements:
@@ -87,11 +84,11 @@ dependencies:
87
84
  segments:
88
85
  - 0
89
86
  version: "0"
90
- type: :runtime
91
87
  version_requirements: *id005
92
- - !ruby/object:Gem::Dependency
93
- name: json
88
+ name: fastercsv
94
89
  prerelease: false
90
+ type: :runtime
91
+ - !ruby/object:Gem::Dependency
95
92
  requirement: &id006 !ruby/object:Gem::Requirement
96
93
  none: false
97
94
  requirements:
@@ -101,11 +98,11 @@ dependencies:
101
98
  segments:
102
99
  - 0
103
100
  version: "0"
104
- type: :runtime
105
101
  version_requirements: *id006
106
- - !ruby/object:Gem::Dependency
107
- name: rubyzip
102
+ name: json
108
103
  prerelease: false
104
+ type: :runtime
105
+ - !ruby/object:Gem::Dependency
109
106
  requirement: &id007 !ruby/object:Gem::Requirement
110
107
  none: false
111
108
  requirements:
@@ -115,13 +112,15 @@ dependencies:
115
112
  segments:
116
113
  - 0
117
114
  version: "0"
118
- type: :runtime
119
115
  version_requirements: *id007
116
+ name: rubyzip
117
+ prerelease: false
118
+ type: :runtime
120
119
  description: Use the Gooddata::Client class to integrate GoodData into your own application or use the CLI to work with GoodData directly from the command line.
121
120
  email: pavel@gooddata.com
122
121
  executables:
123
- - gooddata
124
122
  - igd.rb
123
+ - gooddata
125
124
  extensions: []
126
125
 
127
126
  extra_rdoc_files:
@@ -148,10 +147,12 @@ files:
148
147
  - lib/gooddata/extract.rb
149
148
  - lib/gooddata/helpers.rb
150
149
  - lib/gooddata/model.rb
150
+ - lib/gooddata/models/data_result.rb
151
151
  - lib/gooddata/models/links.rb
152
152
  - lib/gooddata/models/metadata.rb
153
153
  - lib/gooddata/models/profile.rb
154
154
  - lib/gooddata/models/project.rb
155
+ - lib/gooddata/models/report.rb
155
156
  - lib/gooddata/version.rb
156
157
  - test/helper.rb
157
158
  - test/test_commands.rb
@@ -159,13 +160,12 @@ files:
159
160
  - test/test_model.rb
160
161
  - test/test_rest_api_basic.rb
161
162
  - test/test_upload.rb
162
- has_rdoc: true
163
163
  homepage: http://github.com/gooddata/gooddata-ruby
164
164
  licenses: []
165
165
 
166
166
  post_install_message:
167
- rdoc_options:
168
- - --charset=UTF-8
167
+ rdoc_options: []
168
+
169
169
  require_paths:
170
170
  - lib
171
171
  required_ruby_version: !ruby/object:Gem::Requirement
@@ -189,14 +189,9 @@ required_rubygems_version: !ruby/object:Gem::Requirement
189
189
  requirements: []
190
190
 
191
191
  rubyforge_project:
192
- rubygems_version: 1.3.7
192
+ rubygems_version: 1.8.10
193
193
  signing_key:
194
194
  specification_version: 3
195
195
  summary: A convenient Ruby wrapper around the GoodData RESTful API
196
- test_files:
197
- - test/helper.rb
198
- - test/test_commands.rb
199
- - test/test_guessing.rb
200
- - test/test_model.rb
201
- - test/test_rest_api_basic.rb
202
- - test/test_upload.rb
196
+ test_files: []
197
+