aspera-cli 4.20.0 → 4.21.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/CHANGELOG.md +20 -2
  4. data/README.md +281 -156
  5. data/bin/asession +2 -2
  6. data/lib/aspera/agent/alpha.rb +7 -12
  7. data/lib/aspera/agent/connect.rb +19 -1
  8. data/lib/aspera/agent/direct.rb +20 -29
  9. data/lib/aspera/agent/node.rb +1 -11
  10. data/lib/aspera/agent/trsdk.rb +4 -25
  11. data/lib/aspera/api/aoc.rb +5 -0
  12. data/lib/aspera/api/node.rb +45 -28
  13. data/lib/aspera/ascp/installation.rb +69 -38
  14. data/lib/aspera/ascp/management.rb +27 -6
  15. data/lib/aspera/cli/formatter.rb +149 -141
  16. data/lib/aspera/cli/info.rb +1 -1
  17. data/lib/aspera/cli/manager.rb +1 -0
  18. data/lib/aspera/cli/plugin.rb +2 -2
  19. data/lib/aspera/cli/plugins/aoc.rb +27 -17
  20. data/lib/aspera/cli/plugins/config.rb +31 -21
  21. data/lib/aspera/cli/plugins/faspex.rb +1 -1
  22. data/lib/aspera/cli/plugins/faspex5.rb +11 -3
  23. data/lib/aspera/cli/plugins/node.rb +44 -38
  24. data/lib/aspera/cli/version.rb +1 -1
  25. data/lib/aspera/command_line_builder.rb +1 -1
  26. data/lib/aspera/environment.rb +5 -6
  27. data/lib/aspera/node_simulator.rb +228 -112
  28. data/lib/aspera/oauth/base.rb +31 -42
  29. data/lib/aspera/oauth/factory.rb +41 -2
  30. data/lib/aspera/persistency_folder.rb +20 -2
  31. data/lib/aspera/preview/generator.rb +1 -1
  32. data/lib/aspera/preview/utils.rb +1 -1
  33. data/lib/aspera/products/alpha.rb +30 -0
  34. data/lib/aspera/products/connect.rb +48 -0
  35. data/lib/aspera/products/other.rb +82 -0
  36. data/lib/aspera/products/trsdk.rb +54 -0
  37. data/lib/aspera/rest.rb +18 -13
  38. data/lib/aspera/ssh.rb +28 -24
  39. data/lib/aspera/transfer/spec.yaml +22 -20
  40. data.tar.gz.sig +0 -0
  41. metadata +21 -4
  42. metadata.gz.sig +0 -0
  43. data/lib/aspera/ascp/products.rb +0 -168
@@ -7,28 +7,239 @@ require 'webrick'
7
7
  require 'json'
8
8
 
9
9
  module Aspera
10
+ class NodeSimulator
11
+ def initialize
12
+ @agent = Agent::Direct.new(management_cb: ->(event){process_event(event)})
13
+ @sessions = {}
14
+ end
15
+
16
+ def start(ts)
17
+ @agent.start_transfer(ts)
18
+ end
19
+
20
+ def all_sessions
21
+ @agent.sessions.map { |session| session[:job_id] }.uniq.each.map{|job_id|job_to_transfer(job_id)}
22
+ end
23
+
24
+ # status: ('waiting', 'partially_completed', 'unknown', 'waiting(read error)',] 'running', 'completed', 'failed'
25
+ def job_to_transfer(job_id)
26
+ jobs = @agent.sessions_by_job(job_id)
27
+ ts = nil
28
+ sessions = jobs.map do |j|
29
+ ts ||= j[:ts]
30
+ {
31
+ id: j[:id],
32
+ client_node_id: '',
33
+ server_node_id: '2bbdcc39-f789-4d47-8163-6767fc14f421',
34
+ client_ip_address: '192.168.0.100',
35
+ server_ip_address: '5.10.114.4',
36
+ status: 'running',
37
+ retry_timeout: 3600,
38
+ retry_count: 0,
39
+ start_time_usec: 1701094040000000,
40
+ end_time_usec: nil,
41
+ elapsed_usec: 405312,
42
+ bytes_transferred: 26,
43
+ bytes_written: 26,
44
+ bytes_lost: 0,
45
+ files_completed: 1,
46
+ directories_completed: 0,
47
+ target_rate_kbps: 500000,
48
+ min_rate_kbps: 0,
49
+ calc_rate_kbps: 9900,
50
+ network_delay_usec: 40000,
51
+ avg_rate_kbps: 0.51,
52
+ error_code: 0,
53
+ error_desc: '',
54
+ source_statistics: {
55
+ args_scan_attempted: 1,
56
+ args_scan_completed: 1,
57
+ paths_scan_attempted: 1,
58
+ paths_scan_failed: 0,
59
+ paths_scan_skipped: 0,
60
+ paths_scan_excluded: 0,
61
+ dirs_scan_completed: 0,
62
+ files_scan_completed: 1,
63
+ dirs_xfer_attempted: 0,
64
+ dirs_xfer_fail: 0,
65
+ files_xfer_attempted: 1,
66
+ files_xfer_fail: 0,
67
+ files_xfer_noxfer: 0
68
+ },
69
+ precalc: {
70
+ enabled: true,
71
+ status: 'ready',
72
+ bytes_expected: 0,
73
+ directories_expected: 0,
74
+ files_expected: 0,
75
+ files_excluded: 0,
76
+ files_special: 0,
77
+ files_failed: 1
78
+ }
79
+ }
80
+ end
81
+ ts ||= {}
82
+ result = {
83
+ id: job_id,
84
+ status: 'running',
85
+ start_spec: ts,
86
+ sessions: sessions,
87
+ bytes_transferred: 26,
88
+ bytes_written: 26,
89
+ bytes_lost: 0,
90
+ avg_rate_kbps: 0.51,
91
+ files_completed: 1,
92
+ files_skipped: 0,
93
+ directories_completed: 0,
94
+ start_time_usec: 1701094040000000,
95
+ end_time_usec: 1701094040405312,
96
+ elapsed_usec: 405312,
97
+ error_code: 0,
98
+ error_desc: '',
99
+ precalc: {
100
+ status: 'ready',
101
+ bytes_expected: 0,
102
+ files_expected: 0,
103
+ directories_expected: 0,
104
+ files_special: 0,
105
+ files_failed: 1
106
+ },
107
+ files: [{
108
+ id: 'd1b5c112-82b75425-860745fc-93851671-64541bdd',
109
+ path: '/workspaces/45071/packages/bYA_ilq73g.asp-package/contents/data_file.bin',
110
+ start_time_usec: 1701094040000000,
111
+ elapsed_usec: 105616,
112
+ end_time_usec: 1701094040001355,
113
+ status: 'completed',
114
+ error_code: 0,
115
+ error_desc: '',
116
+ size: 26,
117
+ type: 'file',
118
+ checksum_type: 'none',
119
+ checksum: nil,
120
+ start_byte: 0,
121
+ bytes_written: 26,
122
+ session_id: 'bafc72b8-366c-4501-8095-47208183d6b8'}]
123
+ }
124
+ Log.log.trace2{Log.dump(:job, result)}
125
+ return result
126
+ end
127
+
128
+ # Process event from manegemtn port
129
+ def process_event(event)
130
+ case event['Type']
131
+ when 'NOP' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
132
+ when 'START' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
133
+ when 'QUERY' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
134
+ when 'QUERYRSP' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
135
+ when 'STATS' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
136
+ when 'STOP' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
137
+ when 'ERROR' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
138
+ when 'CANCEL' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
139
+ when 'DONE' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
140
+ when 'RATE' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
141
+ when 'FILEERROR' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
142
+ when 'SESSION' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
143
+ when 'NOTIFICATION' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
144
+ when 'INIT' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
145
+ when 'VLINK' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
146
+ when 'PUT' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
147
+ when 'WRITE' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
148
+ when 'CLOSE' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
149
+ when 'SKIP' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
150
+ when 'ARGSTOP' then Aspera.Log.debug{"event not managed: #{event['Type']}"}
151
+ else Aspera.error_unreachable_line
152
+ end
153
+ end
154
+ end
155
+
10
156
  # this class answers the Faspex /send API and creates a package on Aspera on Cloud
157
+ # a new instance is created for each request
11
158
  class NodeSimulatorServlet < WEBrick::HTTPServlet::AbstractServlet
12
159
  PATH_TRANSFERS = '/ops/transfers'
13
160
  PATH_ONE_TRANSFER = %r{/ops/transfers/(.+)$}
161
+ PATH_BROWSE = '/files/browse'
14
162
  # @param app_api [Api::AoC]
15
163
  # @param app_context [String]
16
- def initialize(server, credentials, transfer)
164
+ def initialize(server, credentials, simulator)
17
165
  super(server)
18
166
  @credentials = credentials
19
- @xfer_manager = Agent::Direct.new
167
+ @simulator = simulator
168
+ end
169
+
170
+ require 'json'
171
+ require 'time'
172
+
173
+ def folder_to_structure(folder_path)
174
+ raise "Path does not exist or is not a directory: #{folder_path}" unless Dir.exist?(folder_path)
175
+
176
+ # Build self structure
177
+ folder_stat = File.stat(folder_path)
178
+ structure = {
179
+ 'self' => {
180
+ 'path' => folder_path,
181
+ 'basename' => File.basename(folder_path),
182
+ 'type' => 'directory',
183
+ 'size' => folder_stat.size,
184
+ 'mtime' => folder_stat.mtime.utc.iso8601,
185
+ 'permissions' => [
186
+ { 'name' => 'view' },
187
+ { 'name' => 'edit' },
188
+ { 'name' => 'delete' }
189
+ ]
190
+ },
191
+ 'items' => []
192
+ }
193
+
194
+ # Iterate over folder contents
195
+ Dir.foreach(folder_path) do |entry|
196
+ next if entry == '.' || entry == '..' # Skip current and parent directory
197
+
198
+ item_path = File.join(folder_path, entry)
199
+ item_type = File.ftype(item_path) rescue 'unknown' # Get the type of file
200
+ item_stat = File.lstat(item_path) # Use lstat to handle symbolic links correctly
201
+
202
+ item = {
203
+ 'path' => item_path,
204
+ 'basename' => entry,
205
+ 'type' => item_type,
206
+ 'size' => item_stat.size,
207
+ 'mtime' => item_stat.mtime.utc.iso8601,
208
+ 'permissions' => [
209
+ { 'name' => 'view' },
210
+ { 'name' => 'edit' },
211
+ { 'name' => 'delete' }
212
+ ]
213
+ }
214
+
215
+ # Add additional details for specific types
216
+ case item_type
217
+ when 'file'
218
+ item['partial_file'] = false
219
+ when 'link'
220
+ item['target'] = File.readlink(item_path) rescue nil # Add the target of the symlink
221
+ when 'unknown'
222
+ item['note'] = 'File type could not be determined'
223
+ end
224
+
225
+ structure['items'] << item
226
+ end
227
+
228
+ structure
20
229
  end
21
230
 
22
231
  def do_POST(request, response)
23
232
  case request.path
24
233
  when PATH_TRANSFERS
25
- job_id = @xfer_manager.start_transfer(JSON.parse(request.body))
26
- session = @xfer_manager.sessions_by_job(job_id).first
27
- result = session[:ts].clone
28
- result['id'] = job_id
29
- set_json_response(response, result)
234
+ job_id = @simulator.start(JSON.parse(request.body))
235
+ sleep(0.5)
236
+ set_json_response(request, response, @simulator.job_to_transfer(job_id))
237
+ when PATH_BROWSE
238
+ req = JSON.parse(request.body)
239
+ # req['count']
240
+ set_json_response(request, response, folder_to_structure(req['path']))
30
241
  else
31
- set_json_response(response, [{error: 'Bad request'}], code: 400)
242
+ set_json_response(request, response, [{error: 'Bad request'}], code: 400)
32
243
  end
33
244
  end
34
245
 
@@ -36,7 +247,7 @@ module Aspera
36
247
  case request.path
37
248
  when '/info'
38
249
  info = Ascp::Installation.instance.ascp_info
39
- set_json_response(response, {
250
+ set_json_response(request, response, {
40
251
  application: 'node',
41
252
  current_time: Time.now.utc.iso8601(0),
42
253
  version: info['sdk_ascp_version'].gsub(/ .*$/, ''),
@@ -44,13 +255,13 @@ module Aspera
44
255
  license_max_rate: info['maximum_bandwidth'],
45
256
  os: %x(uname -srv).chomp,
46
257
  aej_status: 'disconnected',
47
- async_reporting: 'yes',
48
- transfer_activity_reporting: 'yes',
258
+ async_reporting: 'no',
259
+ transfer_activity_reporting: 'no',
49
260
  transfer_user: 'xfer',
50
261
  docroot: 'file:////data/aoc/eudemo-sedemo',
51
262
  node_id: '2bbdcc39-f789-4d47-8163-6767fc14f421',
52
263
  cluster_id: '6dae2844-d1a9-47a5-916d-9b3eac3ea466',
53
- acls: [],
264
+ acls: ['impersonation'],
54
265
  access_key_configuration_capabilities: {
55
266
  transfer: %w[
56
267
  cipher
@@ -99,115 +310,20 @@ module Aspera
99
310
  {name: 'wss_port', value: 443}
100
311
  ]})
101
312
  when PATH_TRANSFERS
102
- result = @xfer_manager.sessions.map { |session| job_to_transfer(session) }
103
- set_json_response(response, result)
313
+ set_json_response(request, response, @simulator.all_sessions)
104
314
  when PATH_ONE_TRANSFER
105
315
  job_id = request.path.match(PATH_ONE_TRANSFER)[1]
106
- set_json_response(response, job_to_transfer(@xfer_manager.sessions_by_job(job_id).first))
316
+ set_json_response(request, response, @simulator.job_to_transfer(job_id))
107
317
  else
108
- set_json_response(response, [{error: 'Unknown request'}], code: 400)
318
+ set_json_response(request, response, [{error: 'Unknown request'}], code: 400)
109
319
  end
110
320
  end
111
321
 
112
- def set_json_response(response, json, code: 200)
322
+ def set_json_response(request, response, json, code: 200)
113
323
  response.status = code
114
324
  response['Content-Type'] = 'application/json'
115
325
  response.body = json.to_json
116
- Log.log.trace1{Log.dump('response', json)}
117
- end
118
-
119
- def job_to_transfer(job)
120
- session = {
121
- id: 'bafc72b8-366c-4501-8095-47208183d6b8',
122
- client_node_id: '',
123
- server_node_id: '2bbdcc39-f789-4d47-8163-6767fc14f421',
124
- client_ip_address: '192.168.0.100',
125
- server_ip_address: '5.10.114.4',
126
- status: 'running',
127
- retry_timeout: 3600,
128
- retry_count: 0,
129
- start_time_usec: 1701094040000000,
130
- end_time_usec: nil,
131
- elapsed_usec: 405312,
132
- bytes_transferred: 26,
133
- bytes_written: 26,
134
- bytes_lost: 0,
135
- files_completed: 1,
136
- directories_completed: 0,
137
- target_rate_kbps: 500000,
138
- min_rate_kbps: 0,
139
- calc_rate_kbps: 9900,
140
- network_delay_usec: 40000,
141
- avg_rate_kbps: 0.51,
142
- error_code: 0,
143
- error_desc: '',
144
- source_statistics: {
145
- args_scan_attempted: 1,
146
- args_scan_completed: 1,
147
- paths_scan_attempted: 1,
148
- paths_scan_failed: 0,
149
- paths_scan_skipped: 0,
150
- paths_scan_excluded: 0,
151
- dirs_scan_completed: 0,
152
- files_scan_completed: 1,
153
- dirs_xfer_attempted: 0,
154
- dirs_xfer_fail: 0,
155
- files_xfer_attempted: 1,
156
- files_xfer_fail: 0,
157
- files_xfer_noxfer: 0
158
- },
159
- precalc: {
160
- enabled: true,
161
- status: 'ready',
162
- bytes_expected: 0,
163
- directories_expected: 0,
164
- files_expected: 0,
165
- files_excluded: 0,
166
- files_special: 0,
167
- files_failed: 1
168
- }}
169
- return {
170
- id: '609a667d-642e-4290-9312-b4d20d3c0159',
171
- status: 'running',
172
- start_spec: job[:ts],
173
- sessions: [session],
174
- bytes_transferred: 26,
175
- bytes_written: 26,
176
- bytes_lost: 0,
177
- avg_rate_kbps: 0.51,
178
- files_completed: 1,
179
- files_skipped: 0,
180
- directories_completed: 0,
181
- start_time_usec: 1701094040000000,
182
- end_time_usec: 1701094040405312,
183
- elapsed_usec: 405312,
184
- error_code: 0,
185
- error_desc: '',
186
- precalc: {
187
- status: 'ready',
188
- bytes_expected: 0,
189
- files_expected: 0,
190
- directories_expected: 0,
191
- files_special: 0,
192
- files_failed: 1
193
- },
194
- files: [{
195
- id: 'd1b5c112-82b75425-860745fc-93851671-64541bdd',
196
- path: '/workspaces/45071/packages/bYA_ilq73g.asp-package/contents/data_file.bin',
197
- start_time_usec: 1701094040000000,
198
- elapsed_usec: 105616,
199
- end_time_usec: 1701094040001355,
200
- status: 'completed',
201
- error_code: 0,
202
- error_desc: '',
203
- size: 26,
204
- type: 'file',
205
- checksum_type: 'none',
206
- checksum: nil,
207
- start_byte: 0,
208
- bytes_written: 26,
209
- session_id: 'bafc72b8-366c-4501-8095-47208183d6b8'}]
210
- }
326
+ Log.log.trace1{Log.dump("response for #{request.request_method} #{request.path}", json)}
211
327
  end
212
328
  end
213
329
  end
@@ -26,7 +26,7 @@ module Aspera
26
26
  scope: nil,
27
27
  use_query: false,
28
28
  path_token: 'token',
29
- token_field: 'access_token',
29
+ token_field: Factory::TOKEN_FIELD,
30
30
  cache_ids: nil,
31
31
  **rest_params
32
32
  )
@@ -87,48 +87,37 @@ module Aspera
87
87
  # @param cache set to false to disable cache
88
88
  # @param refresh set to true to force refresh or re-generation (if previous failed)
89
89
  def token(cache: true, refresh: false)
90
- # get token_data from cache (or nil), token_data is what is returned by /token
91
- token_data = Factory.instance.persist_mgr.get(@token_cache_id) if cache
92
- token_data = JSON.parse(token_data) unless token_data.nil?
93
- # Optional optimization: check if node token is expired based on decoded content then force refresh if close enough
94
- # might help in case the transfer agent cannot refresh himself
95
- # `direct` agent is equipped with refresh code
96
- if !refresh && !token_data.nil?
97
- decoded_token = OAuth::Factory.instance.decode_token(token_data[@token_field])
98
- Log.log.debug{Log.dump('decoded_token', decoded_token)} unless decoded_token.nil?
99
- if decoded_token.is_a?(Hash)
100
- expires_at_sec =
101
- if decoded_token['expires_at'].is_a?(String) then DateTime.parse(decoded_token['expires_at']).to_time
102
- elsif decoded_token['exp'].is_a?(Integer) then Time.at(decoded_token['exp'])
90
+ # get token info from cache (or nil), decoded with date and expiration status
91
+ token_info = Factory.instance.get_token_info(@token_cache_id) if cache
92
+ token_data = nil
93
+ unless token_info.nil?
94
+ token_data = token_info[:data]
95
+ # Optional optimization:
96
+ # check if node token is expired based on decoded content then force refresh if close enough
97
+ # might help in case the transfer agent cannot refresh himself
98
+ # `direct` agent is equipped with refresh code
99
+ # an API was already called, but failed, we need to regenerate or refresh
100
+ if refresh || token_info[:expired]
101
+ if token_data.key?('refresh_token') && token_data['refresh_token'].eql?('not_supported')
102
+ # save possible refresh token, before deleting the cache
103
+ refresh_token = token_data['refresh_token']
104
+ end
105
+ # delete cache
106
+ Factory.instance.persist_mgr.delete(@token_cache_id)
107
+ token_data = nil
108
+ # lets try the existing refresh token
109
+ if !refresh_token.nil?
110
+ Log.log.info{"refresh=[#{refresh_token}]".bg_green}
111
+ # NOTE: AoC admin token has no refresh, and lives by default 1800secs
112
+ resp = create_token_call(optional_scope_client_id.merge(grant_type: 'refresh_token', refresh_token: refresh_token))
113
+ if resp[:http].code.start_with?('2')
114
+ # save only if success
115
+ json_data = resp[:http].body
116
+ token_data = JSON.parse(json_data)
117
+ Factory.instance.persist_mgr.put(@token_cache_id, json_data)
118
+ else
119
+ Log.log.debug{"refresh failed: #{resp[:http].body}".bg_red}
103
120
  end
104
- # force refresh if we see a token too close from expiration
105
- refresh = true if expires_at_sec.is_a?(Time) && (expires_at_sec - Time.now) < OAuth::Factory.instance.parameters[:token_expiration_guard_sec]
106
- Log.log.debug{"Expiration: #{expires_at_sec} / #{refresh}"}
107
- end
108
- end
109
-
110
- # an API was already called, but failed, we need to regenerate or refresh
111
- if refresh
112
- if token_data.is_a?(Hash) && token_data.key?('refresh_token') && !token_data['refresh_token'].eql?('not_supported')
113
- # save possible refresh token, before deleting the cache
114
- refresh_token = token_data['refresh_token']
115
- end
116
- # delete cache
117
- Factory.instance.persist_mgr.delete(@token_cache_id)
118
- token_data = nil
119
- # lets try the existing refresh token
120
- if !refresh_token.nil?
121
- Log.log.info{"refresh=[#{refresh_token}]".bg_green}
122
- # try to refresh
123
- # note: AoC admin token has no refresh, and lives by default 1800secs
124
- resp = create_token_call(optional_scope_client_id.merge(grant_type: 'refresh_token', refresh_token: refresh_token))
125
- if resp[:http].code.start_with?('2')
126
- # save only if success
127
- json_data = resp[:http].body
128
- token_data = JSON.parse(json_data)
129
- Factory.instance.persist_mgr.put(@token_cache_id, json_data)
130
- else
131
- Log.log.debug{"refresh failed: #{resp[:http].body}".bg_red}
132
121
  end
133
122
  end
134
123
  end
@@ -13,6 +13,7 @@ module Aspera
13
13
  PERSIST_CATEGORY_TOKEN = 'token'
14
14
  # prefix for bearer token when in header
15
15
  BEARER_PREFIX = 'Bearer '
16
+ TOKEN_FIELD = 'access_token'
16
17
 
17
18
  private_constant :PERSIST_CATEGORY_TOKEN, :BEARER_PREFIX
18
19
 
@@ -87,7 +88,45 @@ module Aspera
87
88
 
88
89
  # delete all existing tokens
89
90
  def flush_tokens
90
- persist_mgr.garbage_collect(PERSIST_CATEGORY_TOKEN, nil)
91
+ persist_mgr.garbage_collect(PERSIST_CATEGORY_TOKEN)
92
+ end
93
+
94
+ def persisted_tokens
95
+ data = persist_mgr.current_items(PERSIST_CATEGORY_TOKEN)
96
+ data.each.map do |k, v|
97
+ info = {id: k}
98
+ info.merge!(JSON.parse(v)) rescue nil
99
+ d = decode_token(info.delete(TOKEN_FIELD))
100
+ info.merge(d) if d
101
+ info
102
+ end
103
+ end
104
+
105
+ # get token information from cache
106
+ # @param id [String] identifier of token
107
+ # @return [Hash] token internal information , including Date object for `expiration_date`
108
+ def get_token_info(id)
109
+ token_raw_string = persist_mgr.get(id)
110
+ return nil if token_raw_string.nil?
111
+ token_data = JSON.parse(token_raw_string)
112
+ Aspera.assert_type(token_data, Hash)
113
+ decoded_token = decode_token(token_data[TOKEN_FIELD])
114
+ info = { data: token_data }
115
+ Log.log.debug{Log.dump('decoded_token', decoded_token)}
116
+ if decoded_token.is_a?(Hash)
117
+ info[:decoded] = decoded_token
118
+ # TODO: move date decoding to token decoder ?
119
+ expiration_date =
120
+ if decoded_token['expires_at'].is_a?(String) then DateTime.parse(decoded_token['expires_at']).to_time
121
+ elsif decoded_token['exp'].is_a?(Integer) then Time.at(decoded_token['exp'])
122
+ end
123
+ unless expiration_date.nil?
124
+ info[:expiration] = expiration_date
125
+ info[:ttl_sec] = expiration_date - Time.now
126
+ info[:expired] = info[:ttl_sec] < @parameters[:token_expiration_guard_sec]
127
+ end
128
+ end
129
+ return info
91
130
  end
92
131
 
93
132
  # register a bearer token decoder, mainly to inspect expiry date
@@ -125,6 +164,6 @@ module Aspera
125
164
  end
126
165
  end
127
166
  # JSON Web Signature (JWS) compact serialization: https://datatracker.ietf.org/doc/html/rfc7515
128
- Factory.instance.register_decoder(lambda { |token| parts = token.split('.'); Aspera.assert(parts.length.eql?(3)){'not aoc token'}; JSON.parse(Base64.decode64(parts[1]))}) # rubocop:disable Style/Semicolon, Layout/LineLength
167
+ Factory.instance.register_decoder(lambda { |token| parts = token.split('.'); Aspera.assert(parts.length.eql?(3)){'not JWS token'}; JSON.parse(Base64.decode64(parts[1]))}) # rubocop:disable Style/Semicolon, Layout/LineLength
129
168
  end
130
169
  end
@@ -18,7 +18,8 @@ module Aspera
18
18
  Log.log.debug{"persistency folder: #{@folder}"}
19
19
  end
20
20
 
21
- # @return String or nil string on existing persist, else nil
21
+ # Get value of persisted item
22
+ # @return [String,nil] Value of persisted id
22
23
  def get(object_id)
23
24
  Log.log.debug{"persistency get: #{object_id}"}
24
25
  if @cache.key?(object_id)
@@ -34,6 +35,10 @@ module Aspera
34
35
  return @cache[object_id]
35
36
  end
36
37
 
38
+ # Set value of persisted item
39
+ # @param object_id [String] Identifier of persisted item
40
+ # @param value [String] Value of persisted item
41
+ # @return [nil]
37
42
  def put(object_id, value)
38
43
  Aspera.assert_type(value, String)
39
44
  persist_filepath = id_to_filepath(object_id)
@@ -42,8 +47,11 @@ module Aspera
42
47
  File.write(persist_filepath, value)
43
48
  Environment.restrict_file_access(persist_filepath)
44
49
  @cache[object_id] = value
50
+ nil
45
51
  end
46
52
 
53
+ # Delete persisted item
54
+ # @param object_id [String] Identifier of persisted item
47
55
  def delete(object_id)
48
56
  persist_filepath = id_to_filepath(object_id)
49
57
  Log.log.debug{"persistency deleting: #{persist_filepath}"}
@@ -51,8 +59,9 @@ module Aspera
51
59
  @cache.delete(object_id)
52
60
  end
53
61
 
62
+ # Delete persisted items
54
63
  def garbage_collect(persist_category, max_age_seconds=nil)
55
- garbage_files = Dir[File.join(@folder, persist_category + '*' + FILE_SUFFIX)]
64
+ garbage_files = current_files(persist_category)
56
65
  if !max_age_seconds.nil?
57
66
  current_time = Time.now
58
67
  garbage_files.select! { |filepath| (current_time - File.stat(filepath).mtime).to_i > max_age_seconds}
@@ -61,9 +70,18 @@ module Aspera
61
70
  File.delete(filepath)
62
71
  Log.log.debug{"persistency deleted expired: #{filepath}"}
63
72
  end
73
+ @cache.clear
64
74
  return garbage_files
65
75
  end
66
76
 
77
+ def current_files(persist_category)
78
+ Dir[File.join(@folder, persist_category + '*' + FILE_SUFFIX)]
79
+ end
80
+
81
+ def current_items(persist_category)
82
+ current_files(persist_category).each_with_object({}) {|i, h| h[File.basename(i, FILE_SUFFIX)] = File.read(i)}
83
+ end
84
+
67
85
  private
68
86
 
69
87
  # @param object_id String or Array
@@ -66,7 +66,7 @@ module Aspera
66
66
  result_size = File.size(@destination_file_path)
67
67
  Log.log.warn{"preview size exceeds maximum allowed #{result_size} > #{@options.max_size}"} if result_size > @options.max_size
68
68
  rescue StandardError => e
69
- Log.log.error{"Ignoring: #{e.message}"}
69
+ Log.log.error{"Ignoring: #{e.class} #{e.message}"}
70
70
  Log.log.debug(e.backtrace.join("\n").red)
71
71
  FileUtils.cp(File.expand_path(@preview_format_sym.eql?(:mp4) ? 'video_error.png' : 'image_error.png', File.dirname(__FILE__)), @destination_file_path)
72
72
  ensure
@@ -45,7 +45,7 @@ module Aspera
45
45
  # @return true if su
46
46
  def external_command(command_sym, command_args)
47
47
  Aspera.assert_values(command_sym, EXTERNAL_TOOLS){'command'}
48
- return Environment.secure_capture(command_sym.to_s, *command_args)
48
+ return Environment.secure_capture(exec: command_sym.to_s, args: command_args.map(&:to_s))
49
49
  end
50
50
 
51
51
  def ffmpeg(a)
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'aspera/environment'
4
+
5
+ module Aspera
6
+ module Products
7
+ # Aspera Desktop Alpha Client
8
+ class Alpha
9
+ APP_NAME = 'IBM Aspera for Desktop'
10
+ APP_IDENTIFIER = 'com.ibm.software.aspera.desktop'
11
+ class << self
12
+ # standard folder locations
13
+ def locations
14
+ case Aspera::Environment.os
15
+ when Aspera::Environment::OS_MACOS then [{
16
+ app_root: File.join('', 'Applications', 'IBM Aspera.app'),
17
+ log_root: File.join(Dir.home, 'Library', 'Logs', APP_IDENTIFIER),
18
+ sub_bin: File.join('Contents', 'Resources', 'transferd', 'bin')
19
+ }]
20
+ else []
21
+ end.map { |i| i.merge({ expected: APP_NAME }) }
22
+ end
23
+
24
+ def log_file
25
+ File.join(Dir.home, 'Library', 'Logs', APP_IDENTIFIER, 'ibm-aspera-desktop.log')
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end