gooddata 0.6.18 → 0.6.19

Sign up to get free protection for your applications and to get access to all the features.
Files changed (133) hide show
  1. checksums.yaml +4 -4
  2. data/.gitignore +2 -1
  3. data/.travis.yml +8 -19
  4. data/Guardfile +5 -0
  5. data/README.md +1 -3
  6. data/bin/gooddata +1 -1
  7. data/gooddata.gemspec +6 -4
  8. data/lib/gooddata.rb +1 -1
  9. data/lib/gooddata/bricks/middleware/aws_middleware.rb +24 -0
  10. data/lib/gooddata/cli/commands/console_cmd.rb +1 -1
  11. data/lib/gooddata/cli/commands/project_cmd.rb +29 -9
  12. data/lib/gooddata/cli/hooks.rb +9 -3
  13. data/lib/gooddata/commands/datawarehouse.rb +1 -7
  14. data/lib/gooddata/commands/project.rb +4 -3
  15. data/lib/gooddata/core/logging.rb +14 -2
  16. data/lib/gooddata/exceptions/execution_limit_exceeded.rb +9 -0
  17. data/lib/gooddata/exceptions/uncomputable_report.rb +8 -0
  18. data/lib/gooddata/exceptions/validation_error.rb +1 -1
  19. data/lib/gooddata/goodzilla/goodzilla.rb +5 -1
  20. data/lib/gooddata/helpers/data_helper.rb +40 -9
  21. data/lib/gooddata/mixins/md_finders.rb +35 -0
  22. data/lib/gooddata/models/blueprint/anchor_field.rb +46 -0
  23. data/lib/gooddata/models/blueprint/attribute_field.rb +25 -0
  24. data/lib/gooddata/models/blueprint/blueprint.rb +7 -0
  25. data/lib/gooddata/models/blueprint/blueprint_field.rb +66 -0
  26. data/lib/gooddata/models/{dashboard_builder.rb → blueprint/dashboard_builder.rb} +0 -0
  27. data/lib/gooddata/models/{schema_blueprint.rb → blueprint/dataset_blueprint.rb} +176 -117
  28. data/lib/gooddata/models/blueprint/date_dimension.rb +10 -0
  29. data/lib/gooddata/models/blueprint/fact_field.rb +16 -0
  30. data/lib/gooddata/models/blueprint/label_field.rb +39 -0
  31. data/lib/gooddata/models/{project_blueprint.rb → blueprint/project_blueprint.rb} +366 -168
  32. data/lib/gooddata/models/blueprint/project_builder.rb +79 -0
  33. data/lib/gooddata/models/blueprint/reference_field.rb +39 -0
  34. data/lib/gooddata/models/blueprint/schema_blueprint.rb +156 -0
  35. data/lib/gooddata/models/blueprint/schema_builder.rb +85 -0
  36. data/lib/gooddata/models/{to_manifest.rb → blueprint/to_manifest.rb} +25 -20
  37. data/lib/gooddata/models/{to_wire.rb → blueprint/to_wire.rb} +33 -52
  38. data/lib/gooddata/models/datawarehouse.rb +2 -2
  39. data/lib/gooddata/models/domain.rb +3 -2
  40. data/lib/gooddata/models/execution.rb +2 -2
  41. data/lib/gooddata/models/execution_detail.rb +7 -2
  42. data/lib/gooddata/models/from_wire.rb +60 -71
  43. data/lib/gooddata/models/from_wire_parse.rb +125 -125
  44. data/lib/gooddata/models/metadata.rb +14 -0
  45. data/lib/gooddata/models/metadata/dashboard.rb +2 -2
  46. data/lib/gooddata/models/metadata/label.rb +1 -1
  47. data/lib/gooddata/models/metadata/report.rb +6 -5
  48. data/lib/gooddata/models/metadata/report_definition.rb +44 -59
  49. data/lib/gooddata/models/model.rb +131 -43
  50. data/lib/gooddata/models/process.rb +13 -11
  51. data/lib/gooddata/models/profile.rb +12 -1
  52. data/lib/gooddata/models/project.rb +223 -19
  53. data/lib/gooddata/models/project_creator.rb +4 -15
  54. data/lib/gooddata/models/schedule.rb +1 -0
  55. data/lib/gooddata/models/user_filters/user_filter_builder.rb +2 -2
  56. data/lib/gooddata/rest/client.rb +18 -18
  57. data/lib/gooddata/rest/connection.rb +113 -94
  58. data/lib/gooddata/version.rb +1 -1
  59. data/lib/templates/project/model/model.rb.erb +15 -16
  60. data/spec/data/blueprints/additional_dataset_module.json +32 -0
  61. data/spec/data/blueprints/big_blueprint_not_pruned.json +2079 -0
  62. data/spec/data/blueprints/invalid_blueprint.json +103 -0
  63. data/spec/data/blueprints/m_n_model.json +104 -0
  64. data/spec/data/blueprints/model_module.json +25 -0
  65. data/spec/data/blueprints/test_blueprint.json +38 -0
  66. data/spec/data/blueprints/test_project_model_spec.json +106 -0
  67. data/spec/data/gd_gse_data_manifest.json +34 -34
  68. data/spec/data/manifests/test_blueprint.json +32 -0
  69. data/spec/data/{manifest_test_project.json → manifests/test_project.json} +9 -18
  70. data/spec/data/wire_models/test_blueprint.json +63 -0
  71. data/spec/data/wire_test_project.json +5 -5
  72. data/spec/environment/default.rb +33 -0
  73. data/spec/environment/develop.rb +26 -0
  74. data/spec/environment/environment.rb +14 -0
  75. data/spec/environment/hotfix.rb +17 -0
  76. data/spec/environment/production.rb +31 -0
  77. data/spec/environment/release.rb +17 -0
  78. data/spec/helpers/blueprint_helper.rb +10 -7
  79. data/spec/helpers/cli_helper.rb +24 -22
  80. data/spec/helpers/connection_helper.rb +27 -25
  81. data/spec/helpers/crypto_helper.rb +7 -5
  82. data/spec/helpers/csv_helper.rb +5 -3
  83. data/spec/helpers/process_helper.rb +15 -10
  84. data/spec/helpers/project_helper.rb +40 -33
  85. data/spec/helpers/schedule_helper.rb +15 -9
  86. data/spec/helpers/spec_helper.rb +11 -0
  87. data/spec/integration/blueprint_updates_spec.rb +93 -0
  88. data/spec/integration/command_datawarehouse_spec.rb +2 -1
  89. data/spec/integration/command_projects_spec.rb +9 -8
  90. data/spec/integration/create_from_template_spec.rb +1 -1
  91. data/spec/integration/create_project_spec.rb +1 -1
  92. data/spec/integration/full_process_schedule_spec.rb +1 -1
  93. data/spec/integration/full_project_spec.rb +91 -30
  94. data/spec/integration/over_to_user_filters_spec.rb +24 -28
  95. data/spec/integration/partial_md_export_import_spec.rb +4 -4
  96. data/spec/integration/project_spec.rb +1 -1
  97. data/spec/integration/rest_spec.rb +1 -1
  98. data/spec/integration/user_filters_spec.rb +19 -24
  99. data/spec/integration/variables_spec.rb +7 -9
  100. data/spec/logging_in_logging_out_spec.rb +1 -1
  101. data/spec/spec_helper.rb +10 -1
  102. data/spec/unit/bricks/middleware/aws_middelware_spec.rb +47 -0
  103. data/spec/unit/core/connection_spec.rb +2 -2
  104. data/spec/unit/core/logging_spec.rb +12 -4
  105. data/spec/unit/helpers/data_helper_spec.rb +60 -0
  106. data/spec/unit/models/blueprint/attributes_spec.rb +24 -0
  107. data/spec/unit/models/blueprint/dataset_spec.rb +116 -0
  108. data/spec/unit/models/blueprint/labels_spec.rb +39 -0
  109. data/spec/unit/models/blueprint/project_blueprint_spec.rb +643 -0
  110. data/spec/unit/models/blueprint/reference_spec.rb +24 -0
  111. data/spec/unit/models/{schema_builder_spec.rb → blueprint/schema_builder_spec.rb} +12 -4
  112. data/spec/unit/models/blueprint/to_wire_spec.rb +169 -0
  113. data/spec/unit/models/domain_spec.rb +13 -2
  114. data/spec/unit/models/from_wire_spec.rb +277 -98
  115. data/spec/unit/models/metadata_spec.rb +22 -4
  116. data/spec/unit/models/model_spec.rb +49 -39
  117. data/spec/unit/models/profile_spec.rb +1 -0
  118. data/spec/unit/models/project_spec.rb +7 -7
  119. data/spec/unit/models/schedule_spec.rb +20 -0
  120. data/spec/unit/models/to_manifest_spec.rb +31 -11
  121. data/spec/unit/rest/polling_spec.rb +86 -0
  122. metadata +102 -30
  123. data/lib/gooddata/models/project_builder.rb +0 -136
  124. data/lib/gooddata/models/schema_builder.rb +0 -77
  125. data/out.txt +0 -0
  126. data/spec/data/additional_dataset_module.json +0 -18
  127. data/spec/data/blueprint_invalid.json +0 -38
  128. data/spec/data/m_n_model/blueprint.json +0 -76
  129. data/spec/data/model_module.json +0 -18
  130. data/spec/data/test_project_model_spec.json +0 -76
  131. data/spec/unit/models/attribute_column_spec.rb +0 -7
  132. data/spec/unit/models/project_blueprint_spec.rb +0 -239
  133. data/spec/unit/models/to_wire_spec.rb +0 -71
@@ -1,9 +1,11 @@
1
1
  require 'securerandom'
2
2
 
3
- module CryptoHelper
4
- class << self
5
- def generate_password
6
- SecureRandom.hex(16)
3
+ module GoodData::Helpers
4
+ module CryptoHelper
5
+ class << self
6
+ def generate_password
7
+ SecureRandom.hex(16)
8
+ end
7
9
  end
8
10
  end
9
- end
11
+ end
@@ -6,7 +6,9 @@ require 'multi_json'
6
6
  # Local requires
7
7
  require 'gooddata/models/models'
8
8
 
9
- module CsvHelper
10
- CSV_PATH_EXPORT = 'out.txt'
11
- CSV_PATH_IMPORT = File.join(File.dirname(__FILE__), '..', 'data', 'users.csv')
9
+ module GoodData::Helpers
10
+ module CsvHelper
11
+ CSV_PATH_EXPORT = 'out.txt'
12
+ CSV_PATH_IMPORT = File.join(File.dirname(__FILE__), '..', 'data', 'users.csv')
13
+ end
12
14
  end
@@ -7,17 +7,22 @@ require 'pmap'
7
7
  # Local requires
8
8
  require 'gooddata/models/models'
9
9
 
10
- module ProcessHelper
11
- PROCESS_ID = '81fa71a4-69fd-4c58-aa09-66e7f53f4647'
12
- DEPLOY_NAME = 'graph.grf'
10
+ require_relative '../environment/environment'
13
11
 
14
- class << self
15
- def remove_old_processes(project)
16
- processes = project.processes
17
- processes.pmap do |process|
18
- next if process.obj_id == PROCESS_ID
19
- puts "Deleting #{process.inspect}"
20
- process.delete
12
+ GoodData::Environment.load
13
+
14
+ module GoodData::Helpers
15
+ module ProcessHelper
16
+ include GoodData::Environment::ProcessHelper
17
+
18
+ class << self
19
+ def remove_old_processes(project)
20
+ processes = project.processes
21
+ processes.pmap do |process|
22
+ next if process.obj_id == PROCESS_ID
23
+ puts "Deleting #{process.inspect}"
24
+ process.delete
25
+ end
21
26
  end
22
27
  end
23
28
  end
@@ -6,42 +6,49 @@ require 'multi_json'
6
6
  # Local requires
7
7
  require 'gooddata/models/models'
8
8
 
9
- module ProjectHelper
10
- PROJECT_ID = 'we1vvh4il93r0927r809i3agif50d7iz'
11
- PROJECT_URL = "/gdc/projects/#{PROJECT_ID}"
12
- PROJECT_TITLE = 'GoodTravis'
13
- PROJECT_SUMMARY = 'No summary'
14
-
15
- def self.get_default_project(opts = { :client => GoodData.connection })
16
- GoodData::Project[PROJECT_ID, opts]
17
- end
9
+ require_relative '../environment/environment'
10
+
11
+ GoodData::Environment.load
12
+
13
+ module GoodData::Helpers
14
+ module ProjectHelper
15
+ include GoodData::Environment::ProjectHelper
16
+
17
+ ENVIRONMENT = 'TESTING'
18
+
19
+ def self.get_default_project(opts = {:client => GoodData.connection})
20
+ GoodData::Project[PROJECT_ID, opts]
21
+ end
18
22
 
19
- def self.delete_old_projects(opts = { :client => GoodData.connection })
20
- projects = opts[:client].projects
21
- projects.each do |project|
22
- next if project.json['project']['meta']['author'] != client.user.uri
23
- next if project.pid == 'we1vvh4il93r0927r809i3agif50d7iz'
24
- begin
25
- puts "Deleting project #{project.title}"
26
- project.delete
27
- rescue e
28
- puts 'ERROR: ' + e.to_s
23
+ def self.delete_old_projects(opts = {:client => GoodData.connection})
24
+ projects = opts[:client].projects
25
+ projects.each do |project|
26
+ next if project.json['project']['meta']['author'] != client.user.uri
27
+ next if project.pid == 'we1vvh4il93r0927r809i3agif50d7iz'
28
+ begin
29
+ puts "Deleting project #{project.title}"
30
+ project.delete
31
+ rescue e
32
+ puts 'ERROR: ' + e.to_s
33
+ end
29
34
  end
30
35
  end
31
- end
32
36
 
33
- def self.create_random_user(client)
34
- num = rand(1e7)
35
- login = "gemtest#{num}@gooddata.com"
36
-
37
- GoodData::Membership.create({
38
- email: login,
39
- login: login,
40
- first_name: 'the',
41
- last_name: num.to_s,
42
- role: 'editor',
43
- password: CryptoHelper.generate_password,
44
- domain: ConnectionHelper::DEFAULT_DOMAIN
45
- }, client: client)
37
+ def self.create_random_user(client)
38
+ num = rand(1e7)
39
+ login = "gemtest#{num}@gooddata.com"
40
+
41
+ opts = {
42
+ email: login,
43
+ login: login,
44
+ first_name: 'the',
45
+ last_name: num.to_s,
46
+ role: 'editor',
47
+ password: CryptoHelper.generate_password,
48
+ domain: ConnectionHelper::DEFAULT_DOMAIN
49
+ }
50
+ GoodData::Membership.create(opts, client: client)
51
+ end
46
52
  end
53
+
47
54
  end
@@ -4,17 +4,23 @@ require 'pmap'
4
4
 
5
5
  require_relative 'process_helper'
6
6
 
7
- module ScheduleHelper
8
- SCHEDULE_ID = '54b90771e4b067429a27a549'
7
+ require_relative '../environment/environment'
9
8
 
10
- class << self
11
- def remove_old_schedules(project)
12
- schedules = project.schedules
13
- schedules.pmap do |schedule|
14
- next if schedule.obj_id == SCHEDULE_ID
9
+ GoodData::Environment.load
15
10
 
16
- puts "Deleting #{schedule.inspect}"
17
- schedule.delete
11
+ module GoodData::Helpers
12
+ module ScheduleHelper
13
+ include GoodData::Environment::ScheduleHelper
14
+
15
+ class << self
16
+ def remove_old_schedules(project)
17
+ schedules = project.schedules
18
+ schedules.pmap do |schedule|
19
+ next if schedule.obj_id == SCHEDULE_ID
20
+
21
+ puts "Deleting #{schedule.inspect}"
22
+ schedule.delete
23
+ end
18
24
  end
19
25
  end
20
26
  end
@@ -0,0 +1,11 @@
1
+ # encoding: UTF-8
2
+
3
+ module GoodData::Helpers
4
+ module SpecHelper
5
+ class << self
6
+ def random_choice(possibilities, current_value)
7
+ (possibilities - Array(current_value)).sample
8
+ end
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,93 @@
1
+ require 'gooddata'
2
+
3
+ describe 'Create project using GoodData client', :constraint => 'slow' do
4
+ before(:all) do
5
+ @client = ConnectionHelper.create_default_connection
6
+ @blueprint = GoodData::Model::ProjectBlueprint.from_json('./spec/data/blueprints/test_project_model_spec.json')
7
+ @project = @client.create_project_from_blueprint(@blueprint, auth_token: ConnectionHelper::GD_PROJECT_TOKEN, environment: ProjectHelper::ENVIRONMENT)
8
+ end
9
+
10
+ after(:all) do
11
+ @project.delete
12
+ @client.disconnect
13
+ end
14
+
15
+ it 'Should create project using GoodData::Rest::Client#create_project' do
16
+ data = [
17
+ ['dev_id', 'email'],
18
+ ['1', 'tomas'],
19
+ ['2', 'petr'],
20
+ ['3', 'jirka']]
21
+ @project.upload(data, @blueprint, 'dataset.repos')
22
+
23
+ data = [
24
+ ['dev_id', 'email'],
25
+ ['1', 'tomas@gmail.com'],
26
+ ['2', 'petr@gmail.com'],
27
+ ['3', 'jirka@gmail.com']]
28
+ @project.upload(data, @blueprint, 'dataset.devs')
29
+
30
+ data = [
31
+ ['lines_changed', 'committed_on', 'dev_id', 'repo_id'],
32
+ [1, '01/01/2011', '1', '1'],
33
+ [2, '01/01/2011', '2', '2'],
34
+ [3, '01/01/2011', '3', '3']]
35
+ @project.upload(data, @blueprint, 'dataset.commits')
36
+ end
37
+
38
+ it "should be able to add anchor's labels" do
39
+ skip('failing on server need to clear out with MSF')
40
+ bp = @project.blueprint
41
+ bp.datasets('dataset.commits').change do |d|
42
+ d.add_label('label.commits.factsof.id',
43
+ reference: 'attr.commits.factsof',
44
+ name: 'anchor_label')
45
+ end
46
+ @project.update_from_blueprint(bp)
47
+ data = [
48
+ ['anchor_label', 'some_id_name', 'lines_changed', 'committed_on', 'dev_id', 'repo_id'],
49
+ ['111', 1, 3, '01/01/2011', '1', '1'],
50
+ ['222', 2, 9, '01/01/2011', '2', '2'],
51
+ ['333', 3, 4, '01/01/2011', '3', '3']]
52
+ @project.upload(data, @blueprint, 'dataset.commits')
53
+ m = @project.facts.first.create_metric
54
+ @project.compute_report(top: [m], left: ['label.commits.factsof.id'])
55
+ end
56
+
57
+ it "be able to remove anchor's labels" do
58
+ bp = @project.blueprint
59
+ bp.datasets('dataset.commits').anchor.strip!
60
+ @project.update_from_blueprint(bp)
61
+ bp = @project.blueprint
62
+ expect(bp.datasets('dataset.commits').anchor.labels.count).to eq 0
63
+ expect(@project.labels('label.commits.factsof.id')).to eq nil
64
+ end
65
+
66
+ it "is possible to move attribute. Let's make a fast attribute." do
67
+ # define stuff
68
+ m = @project.facts.first.create_metric.save
69
+ report = @project.create_report(title: 'Test report', top: [m], left: ['label.devs.dev_id.email'])
70
+ #both compute
71
+ expect(m.execute).to eq 6
72
+ expect(report.execute.to_a).to eq [['jirka@gmail.com', 'petr@gmail.com', 'tomas@gmail.com'],
73
+ [3.0, 2.0, 1.0]]
74
+
75
+ # We move attribute
76
+ @blueprint.move!('some_attr_id', 'dataset.repos', 'dataset.commits')
77
+ @project.update_from_blueprint(@blueprint)
78
+
79
+ # load new data
80
+ data = [
81
+ ['lines_changed', 'committed_on', 'dev_id', 'repo_id', 'email'],
82
+ [1, '01/01/2011', '1', '1', 'tomas'],
83
+ [2, '01/01/2011', '2', '2', 'petr'],
84
+ [3, '01/01/2011', '3', '3', 'jirka']]
85
+ @project.upload(data, @blueprint, 'dataset.commits')
86
+
87
+ # both still compute
88
+ # since we did not change the grain the results are the same
89
+ expect(m.execute).to eq 6
90
+ expect(report.execute.to_a).to eq [['jirka@gmail.com', 'petr@gmail.com', 'tomas@gmail.com'],
91
+ [3.0, 2.0, 1.0]]
92
+ end
93
+ end
@@ -20,7 +20,8 @@ describe GoodData::Command::DataWarehouse do
20
20
  dwh = nil
21
21
 
22
22
  begin
23
- dwh = GoodData::Command::DataWarehouse.create(title: title, summary: summary, token: ConnectionHelper::GD_PROJECT_TOKEN, client: @client)
23
+ dwh = GoodData::Command::DataWarehouse.create(title: title, summary: summary, token: ConnectionHelper::GD_PROJECT_TOKEN, environment: ProjectHelper::ENVIRONMENT, client: @client)
24
+
24
25
  expect(dwh.title).to eq(title)
25
26
  expect(dwh.summary).to eq(summary)
26
27
  expect(dwh.id).not_to be_nil
@@ -5,25 +5,26 @@ describe GoodData::Command::Project, :constraint => 'slow' do
5
5
  before(:all) do
6
6
  @client = ConnectionHelper.create_default_connection
7
7
 
8
- @blueprint = GoodData::Model::ProjectBlueprint.from_json("./spec/data/test_project_model_spec.json")
9
- @module_blueprint = GoodData::Model::ProjectBlueprint.from_json("./spec/data/additional_dataset_module.json")
8
+ @blueprint = GoodData::Model::ProjectBlueprint.from_json("./spec/data/blueprints/test_project_model_spec.json")
9
+ @module_blueprint = GoodData::Model::ProjectBlueprint.from_json("./spec/data/blueprints/additional_dataset_module.json")
10
10
 
11
- @project = GoodData::Command::Project.build({:spec => @blueprint, :token => ConnectionHelper::GD_PROJECT_TOKEN, :client => @client})
11
+ GoodData.logging_on
12
+ GoodData.logger.level = Logger::DEBUG
13
+
14
+ @project = GoodData::Command::Project.build({:spec => @blueprint, :token => ConnectionHelper::GD_PROJECT_TOKEN, environment: ProjectHelper::ENVIRONMENT, :client => @client})
12
15
  end
13
16
 
14
17
  after(:all) do
15
18
  @project.delete unless @project.nil?
16
-
17
19
  @client.disconnect
18
20
  end
19
21
 
20
22
  it "should update the project" do
21
23
  @blueprint.merge!(@module_blueprint)
22
24
  @project.blueprint.datasets.count.should == 3
23
- @project.blueprint.datasets(:include_date_dimensions => true).count.should == 4
24
- GoodData::Command::Project.update({:spec => @blueprint, :client => @client, :project => @project})
25
+ @project.blueprint.datasets(:all, :include_date_dimensions => true).count.should == 4
26
+ @project.update_from_blueprint(@blueprint)
25
27
  @project.blueprint.datasets.count.should == 4
26
- @project.blueprint.datasets(:include_date_dimensions => true).count.should == 5
27
-
28
+ @project.blueprint.datasets(:all, :include_date_dimensions => true).count.should == 5
28
29
  end
29
30
  end
@@ -10,7 +10,7 @@ describe "Spin a project from template", :constraint => 'slow' do
10
10
  end
11
11
 
12
12
  it "should spin a project from a template that does not exist. It should throw an error" do
13
- expect{GoodData::Project.create(:title => "Test project", :template => "/some/nonexisting/template/uri", :auth_token => ConnectionHelper::GD_PROJECT_TOKEN, :client => @client)}.to raise_error
13
+ expect{GoodData::Project.create(:title => "Test project", :template => "/some/nonexisting/template/uri", :auth_token => ConnectionHelper::GD_PROJECT_TOKEN, environment: ProjectHelper::ENVIRONMENT, :client => @client)}.to raise_error
14
14
  end
15
15
 
16
16
  end
@@ -11,7 +11,7 @@ describe 'Create project using GoodData client', :constraint => 'slow' do
11
11
 
12
12
  it 'Should create project using GoodData::Rest::Client#create_project' do
13
13
  project_title = 'Test #create_project'
14
- project = @client.create_project(:title => project_title, :auth_token => ConnectionHelper::GD_PROJECT_TOKEN)
14
+ project = @client.create_project(:title => project_title, :auth_token => ConnectionHelper::GD_PROJECT_TOKEN, environment: ProjectHelper::ENVIRONMENT)
15
15
  expect(project.title).to eq(project_title)
16
16
  project.delete
17
17
  end
@@ -22,7 +22,7 @@ describe "Full process and schedule exercise", :constraint => 'slow' do
22
22
 
23
23
  before(:all) do
24
24
  @client = ConnectionHelper::create_default_connection
25
- @project = @client.create_project(title: 'Project for schedule testing', auth_token: ConnectionHelper::GD_PROJECT_TOKEN)
25
+ @project = @client.create_project(title: 'Project for schedule testing', auth_token: ConnectionHelper::GD_PROJECT_TOKEN, environment: ProjectHelper::ENVIRONMENT)
26
26
  @process = @project.deploy_process('./spec/data/ruby_process',
27
27
  type: 'RUBY',
28
28
  name: 'Test ETL Process (Ruby)')
@@ -2,10 +2,13 @@ require 'gooddata'
2
2
 
3
3
  describe "Full project implementation", :constraint => 'slow' do
4
4
  before(:all) do
5
- @spec = JSON.parse(File.read("./spec/data/test_project_model_spec.json"), :symbolize_names => true)
6
- @invalid_spec = JSON.parse(File.read("./spec/data/blueprint_invalid.json"), :symbolize_names => true)
5
+ @spec = JSON.parse(File.read("./spec/data/blueprints/test_project_model_spec.json"), :symbolize_names => true)
6
+ @invalid_spec = JSON.parse(File.read("./spec/data/blueprints/invalid_blueprint.json"), :symbolize_names => true)
7
7
  @client = ConnectionHelper::create_default_connection
8
- @project = @client.create_project_from_blueprint(@spec, auth_token: ConnectionHelper::GD_PROJECT_TOKEN)
8
+ @blueprint = GoodData::Model::ProjectBlueprint.new(@spec)
9
+ @invalid_blueprint = GoodData::Model::ProjectBlueprint.new(@invalid_spec)
10
+
11
+ @project = @client.create_project_from_blueprint(@blueprint, token: ConnectionHelper::GD_PROJECT_TOKEN, environment: ProjectHelper::ENVIRONMENT)
9
12
  end
10
13
 
11
14
  after(:all) do
@@ -16,7 +19,7 @@ describe "Full project implementation", :constraint => 'slow' do
16
19
 
17
20
  it "should not build an invalid model" do
18
21
  expect {
19
- @client.create_project_from_blueprint(@invalid_spec, auth_token: ConnectionHelper::GD_PROJECT_TOKEN)
22
+ @client.create_project_from_blueprint(@invalid_spec, auth_token: ConnectionHelper::GD_PROJECT_TOKEN, environment: ProjectHelper::ENVIRONMENT)
20
23
  }.to raise_error(GoodData::ValidationError)
21
24
  end
22
25
 
@@ -31,8 +34,9 @@ describe "Full project implementation", :constraint => 'slow' do
31
34
  dataset.save
32
35
 
33
36
  # Now the update of project using the original blueprint should offer update of the title. Nothing else.
37
+ results = GoodData::Model::ProjectCreator.migrate_datasets(@blueprint, project: @project, client: @client, dry_run: true)
34
38
  results = GoodData::Model::ProjectCreator.migrate_datasets(@spec, project: @project, client: @client, dry_run: true)
35
- expect(results['updateScript']['maqlDdl']).to eq "ALTER DATASET {dataset.repos} VISUAL(TITLE \"Repos\", DESCRIPTION \"\");\n"
39
+ expect(results['updateScript']['maqlDdl']).to eq "ALTER DATASET {dataset.repos} VISUAL(TITLE \"Repositories\", DESCRIPTION \"\");\n"
36
40
 
37
41
  # Update using a freshly gained blueprint should offer no changes.
38
42
  new_blueprint = @project.blueprint
@@ -47,10 +51,9 @@ describe "Full project implementation", :constraint => 'slow' do
47
51
  end
48
52
 
49
53
  it "should contain datasets" do
50
- @project.blueprint.tap do |bp|
51
- expect(bp.datasets.count).to eq 3
52
- expect(bp.datasets(:include_date_dimensions => true).count).to eq 4
53
- end
54
+ bp = @project.blueprint
55
+ expect(bp.datasets.count).to eq 3
56
+ expect(bp.datasets(:all, :include_date_dimensions => true).count).to eq 4
54
57
  end
55
58
 
56
59
  it "should contain metadata datasets" do
@@ -88,22 +91,73 @@ describe "Full project implementation", :constraint => 'slow' do
88
91
 
89
92
  it "should load the data" do
90
93
  GoodData.with_project(@project) do |p|
91
- blueprint = GoodData::Model::ProjectBlueprint.new(@spec)
94
+ # blueprint = GoodData::Model::ProjectBlueprint.new(@spec)
92
95
  commits_data = [
93
96
  ["lines_changed","committed_on","dev_id","repo_id"],
94
97
  [1,"01/01/2014",1,1],
95
98
  [3,"01/02/2014",2,2],
96
99
  [5,"05/02/2014",3,1]]
97
- GoodData::Model.upload_data(commits_data, blueprint, 'commits', :client => @client, :project => @project)
98
- # blueprint.find_dataset('commits').upload(commits_data)
100
+ @project.upload(commits_data, @blueprint, 'dataset.commits')
99
101
 
100
102
  devs_data = [
101
103
  ["dev_id", "email"],
102
104
  [1, "tomas@gooddata.com"],
103
105
  [2, "petr@gooddata.com"],
104
106
  [3, "jirka@gooddata.com"]]
105
- GoodData::Model.upload_data(devs_data, blueprint, 'devs', :client => @client, :project => @project)
106
- # blueprint.find_dataset('devs').upload(devs_data)
107
+ @project.upload(devs_data, @blueprint, 'dataset.devs')
108
+ end
109
+ end
110
+
111
+ it "it silently ignores extra columns" do
112
+ GoodData.with_project(@project) do |p|
113
+ blueprint = GoodData::Model::ProjectBlueprint.new(@spec)
114
+ commits_data = [
115
+ ["lines_changed","committed_on","dev_id","repo_id", "extra_column"],
116
+ [1,"01/01/2014",1,1,"something"],
117
+ [3,"01/02/2014",2,2,"something"],
118
+ [5,"05/02/2014",3,1,"something else"]
119
+ ]
120
+ @project.upload(commits_data, blueprint, 'dataset.commits')
121
+ end
122
+ end
123
+
124
+ context "it should give you a reasonable error message" do
125
+ it "if you omit a column" do
126
+ GoodData.with_project(@project) do |p|
127
+ blueprint = GoodData::Model::ProjectBlueprint.new(@spec)
128
+ commits_data = [
129
+ ["lines_changed","committed_on","dev_id"],
130
+ [1,"01/01/2014",1],
131
+ [3,"01/02/2014",2],
132
+ [5,"05/02/2014",3]
133
+ ]
134
+ expect {@project.upload(commits_data, blueprint, 'dataset.commits')}.to raise_error(/repo_id/)
135
+ end
136
+ end
137
+ it "if you give it a malformed CSV" do
138
+ GoodData.with_project(@project) do |p|
139
+ blueprint = GoodData::Model::ProjectBlueprint.new(@spec)
140
+ # 4 cols in header but not in the data
141
+ commits_data = [
142
+ ["lines_changed","committed_on","dev_id","repo_id"],
143
+ [1,"01/01/2014",1],
144
+ [3,"01/02/2014",2],
145
+ [5,"05/02/2014",3]
146
+ ]
147
+ expect {@project.upload(commits_data, blueprint, 'dataset.commits')}.to raise_error(/Number of columns/)
148
+ end
149
+ end
150
+ it "if you give it wrong date format" do
151
+ GoodData.with_project(@project) do |p|
152
+ blueprint = GoodData::Model::ProjectBlueprint.new(@spec)
153
+ commits_data = [
154
+ ["lines_changed","committed_on","dev_id","repo_id"],
155
+ [1,"01/01/2014",1,1],
156
+ [3,"45/50/2014",2,2],
157
+ [5,"05/02/2014",3,1]
158
+ ]
159
+ expect {@project.upload(commits_data, blueprint, 'dataset.commits')}.to raise_error(%r{45/50/2014})
160
+ end
107
161
  end
108
162
  end
109
163
 
@@ -113,6 +167,14 @@ describe "Full project implementation", :constraint => 'slow' do
113
167
  expect(metric.execute).to eq 9
114
168
  end
115
169
 
170
+ it "should compute a count metric from dataset" do
171
+ # works on anchor without label
172
+ expect(@blueprint.datasets('dataset.commits').count(@project)).to eq 3
173
+
174
+ # works on anchor with label
175
+ expect(@blueprint.datasets('dataset.devs').count(@project)).to eq 3
176
+ end
177
+
116
178
  it "should execute an anonymous metric twice and not fail" do
117
179
  f = @project.fact_by_title('Lines Changed')
118
180
  metric = @project.create_metric("SELECT SUM(#\"#{f.title}\")")
@@ -278,14 +340,6 @@ describe "Full project implementation", :constraint => 'slow' do
278
340
  expect(tagged_facts.count).to eq 1
279
341
  end
280
342
 
281
- it "should contain metadata for each dataset in project metadata" do
282
- k = @project.metadata.keys
283
- expect(k).to include("manifest_devs")
284
- expect(@project.metadata("manifest_devs")).not_to be_nil
285
- @project.set_metadata('a', 'b')
286
- expect(@project.metadata('a')).to eq 'b'
287
- end
288
-
289
343
  it "should be able to interpolate metric based on" do
290
344
  res = @project.compute_metric "SELECT SUM(![fact.commits.lines_changed])"
291
345
  expect(res).to eq 9
@@ -306,12 +360,10 @@ describe "Full project implementation", :constraint => 'slow' do
306
360
  end
307
361
 
308
362
  it "should load the data" do
309
- blueprint = GoodData::Model::ProjectBlueprint.new(@spec)
310
363
  devs_data = [
311
364
  ["dev_id", "email"],
312
365
  [4, "josh@gooddata.com"]]
313
- GoodData::Model.upload_data(devs_data, blueprint, 'devs', mode: 'INCREMENTAL', :client => @client, :project => @project)
314
- # blueprint.find_dataset('devs').upload(devs_data, :load => 'INCREMENTAL')
366
+ @project.upload(devs_data, @blueprint, 'dataset.devs', mode: 'INCREMENTAL')
315
367
  end
316
368
 
317
369
  it "should have more users" do
@@ -396,10 +448,10 @@ describe "Full project implementation", :constraint => 'slow' do
396
448
 
397
449
  it "should be able to give you label by name" do
398
450
  attribute = @project.attributes('attr.devs.dev_id')
399
- label = attribute.label_by_name('email')
451
+ label = attribute.label_by_name('Id')
400
452
  expect(label.label?).to eq true
401
- expect(label.title).to eq 'Email'
402
- expect(label.identifier).to eq 'label.devs.dev_id.email'
453
+ expect(label.title).to eq 'Id'
454
+ expect(label.identifier).to eq 'label.devs.dev_id.id'
403
455
  expect(label.attribute_uri).to eq attribute.uri
404
456
  expect(label.attribute).to eq attribute
405
457
  end
@@ -422,7 +474,7 @@ describe "Full project implementation", :constraint => 'slow' do
422
474
 
423
475
  it "should be able to clone a project" do
424
476
  title = 'My new clone proejct'
425
- cloned_project = @project.clone(title: title, auth_token: ConnectionHelper::GD_PROJECT_TOKEN)
477
+ cloned_project = @project.clone(title: title, auth_token: ConnectionHelper::GD_PROJECT_TOKEN, environment: ProjectHelper::ENVIRONMENT)
426
478
  expect(cloned_project.title).to eq title
427
479
  expect(cloned_project.facts.first.create_metric.execute).to eq 9
428
480
  cloned_project.delete
@@ -430,12 +482,21 @@ describe "Full project implementation", :constraint => 'slow' do
430
482
 
431
483
  it "should be able to clone a project without data" do
432
484
  title = 'My new clone project'
433
- cloned_project = @project.clone(title: title, auth_token: ConnectionHelper::GD_PROJECT_TOKEN, data: false)
485
+ cloned_project = @project.clone(title: title, auth_token: ConnectionHelper::GD_PROJECT_TOKEN, environment: ProjectHelper::ENVIRONMENT, data: false)
434
486
  expect(cloned_project.title).to eq title
435
487
  expect(cloned_project.facts.first.create_metric.execute).to eq nil
436
488
  cloned_project.delete
437
489
  end
438
490
 
491
+ it "should be able to export report" do
492
+ m = @project.metrics.first
493
+ r = @project.create_report(top: [m], title: 'Report to export')
494
+ r.save
495
+ r.export(:csv)
496
+ r.export(:pdf)
497
+ r.delete
498
+ end
499
+
439
500
  it "should be able to delete report along with its definitions" do
440
501
  m = @project.metrics.first
441
502
  r = @project.create_report(top: [m], title: 'Report to delete')