power-bi 2.5.0 → 2.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 98d26348ce002789d1f065e91735ae2d441a64bb32c8c935993961036f2d2379
4
- data.tar.gz: e3527606c13a8f62a4918b98e0dfc5ed441f691b836b4f670251e80442fcf104
3
+ metadata.gz: ad399c89996fb5bc9d8503233c2827c7f57366cd849e16c3e780048a8f1973ae
4
+ data.tar.gz: a2ffbbac846e08cc9e9fefd52b86e9a996802ec51e5a4f748f0ab8c8799f4b87
5
5
  SHA512:
6
- metadata.gz: 5e1674d67c5cf55473e0325601b65b532dc374cd9e5a08a2345a282c4b5dd0a984392d1c0196857a649a00ceaf45362d21980e85c9050868b5538b4377f20dd6
7
- data.tar.gz: 1c7517a30c1d32ddcc7e6d6883ef6a5b91fcac12500c76b89bdbebb473c405dde6b58f45a2b60dc3c60b7dfac440973ea81daa6a49f308c9d22b23670484293a
6
+ metadata.gz: 88c0f92c90ae4392b20ea6a85f7be737302857735135b73cba780c2afd31349b672772ac2d95e38fa40a4cd01d8ae226edf49acab7caf4219c118c99021559c8
7
+ data.tar.gz: 78f8483bfc046ef783b852ec66b6ce7c5645c123e793dd475416de440ac83fb2ae65f3968553e02e8474e983ba06f9b27f32bf0e6ff57f3c406586b8e693427c
data/README.md CHANGED
@@ -112,6 +112,10 @@ Note 2: to limit the number of API calls, it is best to directly use the _getter
112
112
  * Delete the dataset: `dataset.delete`
113
113
  * Bind dataset to a gateway datasource: `dataset.bind_to_gateway(gateway, gateway_datasource)`
114
114
 
115
+ ## Datasources
116
+
117
+ * Update credentials of a datasource: `datasource.update_credentials(username, password)`
118
+
115
119
  ## Gateways
116
120
 
117
121
  * List gateways: `pbi.gateways`
@@ -30,28 +30,11 @@ module PowerBI
30
30
  end
31
31
 
32
32
  def get_workspaces(filter: nil, expand: nil)
33
- params = {}
34
- params[:$filter] = filter if filter
35
- params[:$expand] = expand if expand
33
+ base_params = {}
34
+ base_params[:$filter] = filter if filter
35
+ base_params[:$expand] = expand if expand
36
36
 
37
- url = '/admin/groups'
38
-
39
- nr_records = 5000
40
- count = 0
41
-
42
- data = []
43
-
44
- loop do
45
- params[:$top] = nr_records
46
- params[:$skip] = count
47
- resp = @tenant.get(url, params)
48
- data += resp[:value]
49
- batch_count = resp[:value].size
50
- count += batch_count
51
- break if batch_count < nr_records
52
- end
53
-
54
- data
37
+ @tenant.get_paginated('/admin/groups', base_params: base_params)
55
38
  end
56
39
 
57
40
  def force_delete_workspace_by_workspace_name(user_email, workspace_name)
@@ -87,7 +87,7 @@ module PowerBI
87
87
  end
88
88
 
89
89
  def get_data
90
- @tenant.get("/groups/#{@workspace.id}/datasets")[:value]
90
+ @tenant.get_paginated("/groups/#{@workspace.id}/datasets")
91
91
  end
92
92
  end
93
93
  end
@@ -16,6 +16,24 @@ module PowerBI
16
16
  }
17
17
  end
18
18
 
19
+ # only MySQL type is currently supported
20
+ def update_credentials(username, password)
21
+ @tenant.patch("/gateways/#{gateway_id}/datasources/#{datasource_id}") do |req|
22
+ req.body = {
23
+ credentialDetails: {
24
+ credentialType: "Basic",
25
+ credentials: "{\"credentialData\":[{\"name\":\"username\", \"value\":\"#{username}\"},{\"name\":\"password\", \"value\":\"#{password}\"}]}",
26
+ encryptedConnection: "Encrypted",
27
+ encryptionAlgorithm: "None",
28
+ privacyLevel: "None",
29
+ useCallerAADIdentity: false,
30
+ useEndUserOAuth2Credentials: false,
31
+ },
32
+ }.to_json
33
+ end
34
+ true
35
+ end
36
+
19
37
  end
20
38
 
21
39
  class DatasourceArray < Array
@@ -18,6 +18,14 @@ module PowerBI
18
18
  }
19
19
  end
20
20
 
21
+ # currently only implemented for users or service principals (not yet service principal profiles)
22
+ def delete
23
+ email_address_or_object_id = email_address || identifier
24
+ @tenant.delete("/gateways/#{@gateway_datasource.gateway.id}/datasources/#{@gateway_datasource.id}/users/#{email_address_or_object_id}", use_profile: false)
25
+ @gateway_datasource.gateway_datasource_users.reload
26
+ true
27
+ end
28
+
21
29
  end
22
30
 
23
31
  class GatewayDatasourceUserArray < Array
@@ -38,7 +38,7 @@ module PowerBI
38
38
  end
39
39
 
40
40
  def get_data
41
- @tenant.get("/profiles", use_profile: false)[:value]
41
+ @tenant.get_paginated("/profiles", use_profile: false)
42
42
  end
43
43
  end
44
44
  end
@@ -106,7 +106,7 @@ module PowerBI
106
106
  end
107
107
 
108
108
  def get_data
109
- @tenant.get("/groups/#{@workspace.id}/reports")[:value]
109
+ @tenant.get_paginated("/groups/#{@workspace.id}/reports")
110
110
  end
111
111
  end
112
112
  end
@@ -74,7 +74,7 @@ module PowerBI
74
74
  unless [200, 202].include? response.status
75
75
  raise APIError.new("Error calling Power BI API (status #{response.status}): #{response.body}")
76
76
  end
77
- log "Calling (GET) #{response.env.url.to_s} - took #{((Time.now - t0) * 1000).to_i} ms"
77
+ log "Calling (GET) #{response.env.url.to_s} - took #{((Time.now - t0) * 1000).to_i} ms - status: #{response.status}"
78
78
  unless response.body.empty?
79
79
  JSON.parse(response.body, symbolize_names: true)
80
80
  end
@@ -93,7 +93,7 @@ module PowerBI
93
93
  end
94
94
  yield req if block_given?
95
95
  end
96
- log "Calling (GET - raw) #{response.env.url.to_s} - took #{((Time.now - t0) * 1000).to_i} ms"
96
+ log "Calling (GET - raw) #{response.env.url.to_s} - took #{((Time.now - t0) * 1000).to_i} ms - status: #{response.status}"
97
97
  unless [200, 202].include? response.status
98
98
  raise APIError.new("Error calling Power BI API (status #{response.status}): #{response.body}")
99
99
  end
@@ -115,7 +115,7 @@ module PowerBI
115
115
  end
116
116
  yield req if block_given?
117
117
  end
118
- log "Calling (POST) #{response.env.url.to_s} - took #{((Time.now - t0) * 1000).to_i} ms"
118
+ log "Calling (POST) #{response.env.url.to_s} - took #{((Time.now - t0) * 1000).to_i} ms - status: #{response.status}"
119
119
  unless [200, 201, 202].include? response.status
120
120
  raise APIError.new("Error calling Power BI API (status #{response.status}): #{response.body}")
121
121
  end
@@ -139,7 +139,7 @@ module PowerBI
139
139
  end
140
140
  yield req if block_given?
141
141
  end
142
- log "Calling (PATCH) #{response.env.url.to_s} - took #{((Time.now - t0) * 1000).to_i} ms"
142
+ log "Calling (PATCH) #{response.env.url.to_s} - took #{((Time.now - t0) * 1000).to_i} ms - status: #{response.status}"
143
143
  unless [200, 202].include? response.status
144
144
  raise APIError.new("Error calling Power BI API (status #{response.status}): #{response.body}")
145
145
  end
@@ -162,7 +162,7 @@ module PowerBI
162
162
  end
163
163
  yield req if block_given?
164
164
  end
165
- log "Calling (DELETE) #{response.env.url.to_s} - took #{((Time.now - t0) * 1000).to_i} ms"
165
+ log "Calling (DELETE) #{response.env.url.to_s} - took #{((Time.now - t0) * 1000).to_i} ms - status: #{response.status}"
166
166
  if [400, 401, 404].include? response.status
167
167
  raise NotFoundError
168
168
  end
@@ -191,13 +191,76 @@ module PowerBI
191
191
  req.body = {value: Faraday::UploadIO.new(file, 'application/octet-stream')}
192
192
  req.options.timeout = 120 # default is 60 seconds Net::ReadTimeout
193
193
  end
194
- log "Calling (POST - file) #{response.env.url.to_s} - took #{((Time.now - t0) * 1000).to_i} ms"
194
+ log "Calling (POST - file) #{response.env.url.to_s} - took #{((Time.now - t0) * 1000).to_i} ms - status: #{response.status}"
195
195
  if response.status != 202
196
196
  raise APIError.new("Error calling Power BI API (status #{response.status}): #{response.body}")
197
197
  end
198
198
  JSON.parse(response.body, symbolize_names: true)
199
199
  end
200
200
 
201
+ # Fetches paginated data from the Power BI API using OData-style pagination.
202
+ #
203
+ # Power BI API has a documented limit of 5000 records per request.
204
+ # This method handles pagination automatically by:
205
+ # 1. Requesting 5000 records at a time using $top and $skip
206
+ # 2. If exactly 5000 records are returned, fetching the next page
207
+ # 3. Accumulating all results across pages
208
+ # 4. Deduplicating records by ID (to handle insertions between requests)
209
+ #
210
+ # Note: $skip-based pagination is not fully protected against deletions
211
+ # between requests (a deleted record may cause a subsequent record to go
212
+ # unseen). This risk is acceptable given the short pagination window.
213
+ MAX_PAGE_SIZE = 5000
214
+ MAX_ITERATIONS = 100
215
+ def get_paginated(url, page_size: MAX_PAGE_SIZE, base_params: {}, use_profile: true, max_iterations: MAX_ITERATIONS)
216
+ page_size = [page_size, MAX_PAGE_SIZE].min
217
+
218
+ skip = 0
219
+ all_data = []
220
+ iteration = 0
221
+
222
+ loop do
223
+ iteration += 1
224
+ if iteration > max_iterations
225
+ log "WARNING: Reached maximum iteration limit (#{max_iterations}). " \
226
+ "Fetched #{all_data.size} records so far. This may indicate an API issue or " \
227
+ "an extremely large dataset. Consider using API filters to reduce the result set.",
228
+ level: :warn
229
+ break
230
+ end
231
+
232
+ params = base_params.merge({
233
+ '$top' => page_size,
234
+ '$skip' => skip
235
+ })
236
+
237
+ log "Fetching paginated data from #{url} (skip: #{skip}, top: #{page_size}, iteration: #{iteration})"
238
+
239
+ resp = get(url, params, use_profile: use_profile)
240
+ batch = resp[:value] || []
241
+ all_data += batch
242
+ batch_count = batch.size
243
+
244
+ log "Received #{batch_count} records (total so far: #{all_data.size})"
245
+
246
+ # If we got fewer records than requested, we've reached the last page
247
+ break if batch_count < page_size
248
+
249
+ skip += batch_count
250
+ end
251
+
252
+ # Deduplicate by ID to handle any records that were inserted between requests.
253
+ # Insertions before the current $skip position shift items right, which can
254
+ # cause duplicates across pages.
255
+ deduplicated_data = all_data.uniq{|r| r[:id]}
256
+
257
+ if deduplicated_data.size < all_data.size
258
+ log "Removed #{all_data.size - deduplicated_data.size} duplicate records during deduplication"
259
+ end
260
+
261
+ deduplicated_data
262
+ end
263
+
201
264
  private
202
265
 
203
266
  def add_spp_header(req)
@@ -113,7 +113,7 @@ module PowerBI
113
113
  end
114
114
 
115
115
  def get_data
116
- @tenant.get("/groups")[:value]
116
+ @tenant.get_paginated("/groups")
117
117
  end
118
118
  end
119
119
  end
data/lib/power-bi.rb CHANGED
@@ -1,4 +1,3 @@
1
- require 'pry' # TODO remove in final product
2
1
  require 'faraday'
3
2
  require 'json'
4
3
  require 'date'
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: power-bi
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.5.0
4
+ version: 2.6.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Lode Cools
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-05-21 00:00:00.000000000 Z
11
+ date: 2026-02-11 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: faraday
@@ -110,7 +110,7 @@ homepage: https://github.com/piloos/power-bi
110
110
  licenses:
111
111
  - MIT
112
112
  metadata: {}
113
- post_install_message:
113
+ post_install_message:
114
114
  rdoc_options: []
115
115
  require_paths:
116
116
  - lib
@@ -126,7 +126,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
126
126
  version: '0'
127
127
  requirements: []
128
128
  rubygems_version: 3.4.6
129
- signing_key:
129
+ signing_key:
130
130
  specification_version: 4
131
131
  summary: Ruby wrapper for the Power BI API
132
132
  test_files: []