openstudio-analysis 1.3.5 → 1.3.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,862 +1,862 @@
1
- # *******************************************************************************
2
- # OpenStudio(R), Copyright (c) Alliance for Sustainable Energy, LLC.
3
- # See also https://openstudio.net/license
4
- # *******************************************************************************
5
-
6
- # Class manages the communication with the server.
7
- # Presently, this class is simple and stores all information in hashs
8
- module OpenStudio
9
- module Analysis
10
- class ServerApi
11
- attr_reader :hostname
12
-
13
- # Define set of anlaysis methods require batch_run to be queued after them
14
- BATCH_RUN_METHODS = ['lhs', 'preflight', 'single_run', 'repeat_run', 'doe', 'diag', 'baseline_perturbation', 'batch_datapoints'].freeze
15
-
16
- def initialize(options = {})
17
- defaults = { hostname: 'http://localhost:8080', log_path: File.expand_path('~/os_server_api.log') }
18
- options = defaults.merge(options)
19
- if ENV['OS_SERVER_LOG_PATH']
20
- @logger = ::Logger.new(ENV['OS_SERVER_LOG_PATH'] + '/os_server_api.log')
21
- else
22
- @logger = ::Logger.new(options[:log_path])
23
- end
24
-
25
- @hostname = options[:hostname]
26
-
27
- raise 'no host defined for server api class' if @hostname.nil?
28
-
29
- # TODO: add support for the proxy
30
-
31
- # create connection with basic capabilities
32
- @conn = Faraday.new(url: @hostname) do |faraday|
33
- faraday.request :url_encoded # form-encode POST params
34
- faraday.options.timeout = 300
35
- faraday.options.open_timeout = 300
36
- faraday.options.write_timeout = 1800
37
- faraday.use Faraday::Response::Logger, @logger
38
- # faraday.response @logger # log requests to STDOUT
39
- faraday.adapter Faraday.default_adapter # make requests with Net::HTTP
40
- end
41
-
42
- # create connection to server api with multipart capabilities
43
- @conn_multipart = Faraday.new(url: @hostname) do |faraday|
44
- faraday.request :multipart
45
- faraday.request :url_encoded # form-encode POST params
46
- faraday.options.timeout = 300
47
- faraday.options.open_timeout = 300
48
- faraday.options.write_timeout = 1800
49
- faraday.use Faraday::Response::Logger, @logger
50
- # faraday.response :logger # log requests to STDOUT
51
- faraday.adapter Faraday.default_adapter # make requests with Net::HTTP
52
- end
53
- end
54
-
55
- def get_projects
56
- response = @conn.get '/projects.json'
57
-
58
- projects_json = nil
59
- if response.status == 200
60
- projects_json = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
61
- else
62
- raise 'did not receive a 200 in get_projects'
63
- end
64
-
65
- projects_json
66
- end
67
-
68
- def get_project_ids
69
- ids = get_projects
70
- ids.map { |project| project[:uuid] }
71
- end
72
-
73
- def delete_project(id)
74
- deleted = false
75
- response = @conn.delete "/projects/#{id}.json"
76
- if response.status == 204
77
- puts "Successfully deleted project #{id}"
78
- deleted = true
79
- else
80
- puts "ERROR deleting project #{id}"
81
- deleted = false
82
- end
83
-
84
- deleted
85
- end
86
-
87
- def delete_all
88
- ids = get_project_ids
89
- puts "deleting projects with IDs: #{ids}"
90
- success = true
91
- ids.each do |id|
92
- r = delete_project id
93
- success = false if r == false
94
- end
95
-
96
- success
97
- end
98
-
99
- def new_project(options = {})
100
- defaults = { project_name: "Project #{::Time.now.strftime('%Y-%m-%d %H:%M:%S')}" }
101
- options = defaults.merge(options)
102
- project_id = nil
103
-
104
- # TODO: make this a display name and a machine name
105
- project_hash = { project: { name: (options[:project_name]).to_s } }
106
- begin
107
- response = @conn.post do |req|
108
- req.url '/projects.json'
109
- req.headers['Content-Type'] = 'application/json'
110
- req.body = project_hash.to_json
111
- end
112
- puts "response.status: #{response.status}"
113
- puts response.inspect
114
- rescue Net::OpenTimeout => e
115
- puts "new_project OpenTimeout: #{e.message}"
116
- end
117
- if response.status == 201
118
- project_id = JSON.parse(response.body)['_id']
119
-
120
- puts "new project created with ID: #{project_id}"
121
- # grab the project id
122
- elsif response.status == 500
123
- puts '500 Error'
124
- puts response.inspect
125
- end
126
-
127
- project_id
128
- end
129
-
130
- def get_analyses(project_id)
131
- analysis_ids = []
132
- response = @conn.get "/projects/#{project_id}.json"
133
- if response.status == 200
134
- analyses = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
135
- analyses[:analyses]&.each do |analysis|
136
- analysis_ids << analysis[:_id]
137
- end
138
- end
139
-
140
- analysis_ids
141
- end
142
-
143
- def get_analyses_detailed(project_id)
144
- analyses = nil
145
- response = @conn.get "/projects/#{project_id}.json"
146
- if response.status == 200
147
- analyses = JSON.parse(response.body, symbolize_names: true, max_nesting: false)[:analyses]
148
- end
149
-
150
- analyses
151
- end
152
-
153
- # return the entire analysis JSON
154
- def get_analysis(analysis_id)
155
- result = nil
156
- response = @conn.get "/analyses/#{analysis_id}.json"
157
- if response.status == 200
158
- result = JSON.parse(response.body, symbolize_names: true, max_nesting: false)[:analysis]
159
- end
160
-
161
- result
162
- end
163
-
164
- # Check the status of the simulation. Format should be:
165
- # {
166
- # analysis: {
167
- # status: "completed",
168
- # analysis_type: "batch_run"
169
- # },
170
- # data_points: [
171
- # {
172
- # _id: "bbd57e90-ce59-0131-35de-080027880ca6",
173
- # status: "completed"
174
- # }
175
- # ]
176
- # }
177
- def get_analysis_status(analysis_id, analysis_type)
178
- status = nil
179
-
180
- # sleep 2 # super cheesy---need to update how this works. Right now there is a good chance to get a
181
- # race condition when the analysis state changes.
182
- unless analysis_id.nil?
183
- resp = @conn.get "analyses/#{analysis_id}/status.json"
184
- if resp.status == 200
185
- j = JSON.parse resp.body, symbolize_names: true
186
- if j && j[:analysis] && j[:analysis][:analysis_type] == analysis_type
187
- status = j[:analysis][:status]
188
- elsif j && j[:analysis] && analysis_type == 'batch_run'
189
- status = j[:analysis][:status]
190
- end
191
- end
192
- end
193
-
194
- status
195
- end
196
-
197
- # Check if the machine is alive
198
- #
199
- # return [Boolean] True if the machine has an awake value set
200
- def alive?
201
- m = machine_status
202
-
203
- m = !m[:status][:awake].nil? if m
204
-
205
- m
206
- end
207
-
208
- # Retrieve the machine status
209
- #
210
- # return [Hash]
211
- def machine_status
212
- status = nil
213
-
214
- begin
215
- resp = @conn.get do |req|
216
- req.url 'status.json'
217
- req.options.timeout = 300
218
- req.options.open_timeout = 300
219
- end
220
- puts "machine_status resp.status: #{resp.status}"
221
- puts resp.inspect
222
- if resp.status == 200
223
- j = JSON.parse resp.body, symbolize_names: true
224
- status = j if j
225
- end
226
- rescue Faraday::ConnectionFailed => e
227
- puts "machine_Status ConnectionFailed: #{e.message}"
228
- rescue Net::ReadTimeout => e
229
- puts "machine_Status ReadTimeout: #{e.message}"
230
- end
231
-
232
- status
233
- end
234
-
235
- def get_analysis_status_and_json(analysis_id, analysis_type)
236
- status = nil
237
- j = nil
238
-
239
- # sleep 2 # super cheesy---need to update how this works. Right now there is a good chance to get a
240
- # race condition when the analysis state changes.
241
- unless analysis_id.nil?
242
- resp = @conn.get "analyses/#{analysis_id}/status.json"
243
- if resp.status == 200
244
- j = JSON.parse resp.body, symbolize_names: true
245
- if j && j[:analysis] && j[:analysis][:analysis_type] == analysis_type
246
- status = j[:analysis][:status]
247
- end
248
- end
249
- end
250
-
251
- [status, j]
252
- end
253
-
254
- # return the data point results in JSON format
255
- def get_analysis_results(analysis_id)
256
- analysis = nil
257
-
258
- response = @conn.get "/analyses/#{analysis_id}/analysis_data.json"
259
- if response.status == 200
260
- analysis = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
261
- end
262
-
263
- analysis
264
- end
265
-
266
- def download_dataframe(analysis_id, format = 'rdata', save_directory = '.')
267
- downloaded = false
268
- file_path_and_name = nil
269
-
270
- response = @conn.get do |r|
271
- r.url "/analyses/#{analysis_id}/download_data.#{format}?export=true"
272
- r.options.timeout = 3600 # 60 minutes
273
- end
274
- if response.status == 200
275
- filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
276
- downloaded = true
277
- file_path_and_name = "#{save_directory}/#{filename}"
278
- puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
279
- if format == 'rdata'
280
- File.open(file_path_and_name, 'wb') { |f| f << response.body }
281
- else
282
- File.open(file_path_and_name, 'w') { |f| f << response.body }
283
- end
284
- end
285
-
286
- [downloaded, file_path_and_name]
287
- end
288
-
289
- def download_variables(analysis_id, format = 'rdata', save_directory = '.')
290
- downloaded = false
291
- file_path_and_name = nil
292
-
293
- response = @conn.get "/analyses/#{analysis_id}/variables/download_variables.#{format}"
294
- if response.status == 200
295
- filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
296
- downloaded = true
297
- file_path_and_name = "#{save_directory}/#{filename}"
298
- puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
299
- if format == 'rdata'
300
- File.open(file_path_and_name, 'wb') { |f| f << response.body }
301
- else
302
- File.open(file_path_and_name, 'w') { |f| f << response.body }
303
- end
304
- end
305
-
306
- [downloaded, file_path_and_name]
307
- end
308
-
309
- def download_datapoint(datapoint_id, save_directory = '.')
310
- downloaded = false
311
- file_path_and_name = nil
312
-
313
- response = @conn.get "/data_points/#{datapoint_id}/download_result_file?filename=data_point.zip"
314
- if response.status == 200
315
- filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
316
- downloaded = true
317
- file_path_and_name = "#{save_directory}/#{filename}"
318
- puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
319
- File.open(file_path_and_name, 'wb') { |f| f << response.body }
320
- else
321
- response = @conn.get "/data_points/#{datapoint_id}/download"
322
- if response.status == 200
323
- filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
324
- downloaded = true
325
- file_path_and_name = "#{save_directory}/#{filename}"
326
- puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
327
- File.open(file_path_and_name, 'wb') { |f| f << response.body }
328
- end
329
- end
330
-
331
- [downloaded, file_path_and_name]
332
- end
333
-
334
- # Download a MongoDB Snapshot. This database can get large. For 13,000 simulations with
335
- # DEnCity reporting, the size is around 325MB
336
- def download_database(save_directory = '.')
337
- downloaded = false
338
- file_path_and_name = nil
339
-
340
- response = @conn.get do |r|
341
- r.url '/admin/backup_database?full_backup=true'
342
- r.options.timeout = 3600 # 60 minutes
343
- end
344
-
345
- if response.status == 200
346
- filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
347
- downloaded = true
348
- file_path_and_name = "#{save_directory}/#{filename}"
349
- puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
350
- File.open(file_path_and_name, 'wb') { |f| f << response.body }
351
- end
352
-
353
- [downloaded, file_path_and_name]
354
- end
355
-
356
- # http://localhost:3000/data_points/ff857845-a4c6-4eb9-a52b-cbc6a41976d5/download_result_file?filename=
357
- def download_datapoint_report(datapoint_id, report_name, save_directory = '.')
358
- downloaded = false
359
- file_path_and_name = nil
360
-
361
- response = @conn.get "/data_points/#{datapoint_id}/download_result_file?filename=#{report_name}"
362
- if response.status == 200
363
- filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
364
- downloaded = true
365
- file_path_and_name = "#{save_directory}/#{filename}"
366
- puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
367
- File.open(file_path_and_name, 'wb') { |f| f << response.body }
368
- end
369
-
370
- [downloaded, file_path_and_name]
371
- end
372
-
373
- def download_datapoint_jsons(analysis_id, save_directory = '.')
374
- # get the list of all the datapoints
375
- dps = get_datapoint_status(analysis_id)
376
- dps.each do |dp|
377
- if dp[:status] == 'completed'
378
- dp_h = get_datapoint(dp[:_id])
379
- File.open("#{save_directory}/data_point_#{dp[:_id]}.json", 'w') { |f| f << JSON.pretty_generate(dp_h) }
380
- end
381
- end
382
- end
383
-
384
- def datapoint_dencity(datapoint_id)
385
- # Return the JSON (Full) of the datapoint
386
- data_point = nil
387
-
388
- resp = @conn.get "/data_points/#{datapoint_id}/dencity.json"
389
- if resp.status == 200
390
- data_point = JSON.parse resp.body, symbolize_names: true
391
- end
392
-
393
- data_point
394
- end
395
-
396
- def analysis_dencity_json(analysis_id)
397
- # Return the hash of the dencity format for the analysis
398
- dencity = nil
399
-
400
- resp = @conn.get "/analyses/#{analysis_id}/dencity.json"
401
- if resp.status == 200
402
- dencity = JSON.parse resp.body, symbolize_names: true
403
- end
404
-
405
- dencity
406
- end
407
-
408
- def download_dencity_json(analysis_id, save_directory = '.')
409
- a_h = analysis_dencity_json(analysis_id)
410
- if a_h
411
- File.open("#{save_directory}/analysis_#{analysis_id}_dencity.json", 'w') { |f| f << JSON.pretty_generate(a_h) }
412
- end
413
- end
414
-
415
- def download_datapoint_dencity_jsons(analysis_id, save_directory = '.')
416
- # get the list of all the datapoints
417
- dps = get_datapoint_status(analysis_id)
418
- dps.each do |dp|
419
- if dp[:status] == 'completed'
420
- dp_h = datapoint_dencity(dp[:_id])
421
- File.open("#{save_directory}/data_point_#{dp[:_id]}_dencity.json", 'w') { |f| f << JSON.pretty_generate(dp_h) }
422
- end
423
- end
424
- end
425
-
426
- def new_analysis(project_id, options)
427
- defaults = {
428
- analysis_name: nil,
429
- reset_uuids: false,
430
- push_to_dencity: false
431
- }
432
- options = defaults.merge(options)
433
-
434
- raise 'No project id passed' if project_id.nil?
435
-
436
- formulation_json = nil
437
- if options[:formulation_file]
438
- raise "No formulation exists #{options[:formulation_file]}" unless File.exist?(options[:formulation_file])
439
- formulation_json = JSON.parse(File.read(options[:formulation_file]), symbolize_names: true)
440
- end
441
-
442
- # read in the analysis id from the analysis.json file
443
- analysis_id = nil
444
- if formulation_json
445
- if options[:reset_uuids]
446
- analysis_id = SecureRandom.uuid
447
- formulation_json[:analysis][:uuid] = analysis_id
448
-
449
- formulation_json[:analysis][:problem][:workflow].each do |wf|
450
- wf[:uuid] = SecureRandom.uuid
451
- wf[:arguments]&.each do |arg|
452
- arg[:uuid] = SecureRandom.uuid
453
- end
454
- wf[:variables]&.each do |var|
455
- var[:uuid] = SecureRandom.uuid
456
- var[:argument][:uuid] = SecureRandom.uuid if var[:argument]
457
- end
458
- end
459
- else
460
- analysis_id = formulation_json[:analysis][:uuid]
461
- end
462
-
463
- # set the analysis name
464
- formulation_json[:analysis][:name] = (options[:analysis_name]).to_s unless options[:analysis_name].nil?
465
- else
466
- formulation_json = {
467
- analysis: options
468
- }
469
- puts formulation_json
470
- analysis_id = SecureRandom.uuid
471
- formulation_json[:analysis][:uuid] = analysis_id
472
- end
473
- raise "No analysis id defined in analysis.json #{options[:formulation_file]}" if analysis_id.nil?
474
-
475
- # save out this file to compare
476
- # File.open('formulation_merge.json', 'w') { |f| f << JSON.pretty_generate(formulation_json) }
477
-
478
- response = @conn.post do |req|
479
- req.url "projects/#{project_id}/analyses.json"
480
- req.headers['Content-Type'] = 'application/json'
481
- req.body = formulation_json.to_json
482
- req.options.timeout = 600 # seconds
483
- req.options.write_timeout = 1800
484
- end
485
-
486
- if response.status == 201
487
- puts "asked to create analysis with #{analysis_id}"
488
- # puts resp.inspect
489
- analysis_id = JSON.parse(response.body)['_id']
490
- puts "options[:push_to_dencity] = #{options[:push_to_dencity]}"
491
- upload_to_dencity(analysis_id, formulation_json) if options[:push_to_dencity]
492
- puts "new analysis created with ID: #{analysis_id}"
493
- else
494
- raise 'Could not create new analysis'
495
- end
496
-
497
- # check if we need to upload the analysis zip file
498
- if options[:upload_file]
499
- raise "upload file does not exist #{options[:upload_file]}" unless File.exist?(options[:upload_file])
500
-
501
- payload = { file: Faraday::UploadIO.new(options[:upload_file], 'application/zip') }
502
- response = @conn_multipart.post "analyses/#{analysis_id}/upload.json", payload do |req|
503
- req.options.timeout = 1800 # seconds
504
- req.options.write_timeout = 1800
505
- end
506
-
507
- if response.status == 201
508
- puts 'Successfully uploaded ZIP file'
509
- else
510
- raise response.inspect
511
- end
512
- end
513
-
514
- analysis_id
515
- end
516
-
517
- def upload_to_dencity(analysis_uuid, analysis)
518
- require 'dencity'
519
- puts "Attempting to connect to DEnCity server using settings at '~/.dencity/config.yml'"
520
- conn = Dencity.connect
521
- raise "Could not connect to DEnCity server at #{hostname}." unless conn.connected?
522
- begin
523
- r = conn.login
524
- rescue Faraday::ParsingError => user_id_failure
525
- raise "Error in user_id field: #{user_id_failure.message}"
526
- rescue MultiJson::ParseError => authentication_failure
527
- raise "Error in attempted authentication: #{authentication_failure.message}"
528
- end
529
- user_uuid = r.id
530
-
531
- # Find the analysis.json file that SHOULD BE IN THE FOLDER THAT THIS SCRIPT IS IN (or change the below)
532
- # Check that the analysis has not yet been registered with the DEnCity instance.
533
- # TODO This should be simplified with a retrieve_analysis_by_user_defined_id' method in the future
534
- user_analyses = []
535
- r = conn.dencity_get 'analyses'
536
- runner.registerError('Unable to retrieve analyses from DEnCity server') unless r['status'] == 200
537
- r['data'].each do |dencity_analysis|
538
- user_analyses << dencity_analysis['id'] if dencity_analysis['user_id'] == user_uuid
539
- end
540
- found_analysis_uuid = false
541
- user_analyses.each do |dencity_analysis_id|
542
- dencity_analysis = conn.retrieve_analysis_by_id(dencity_analysis_id)
543
- if dencity_analysis['user_defined_id'] == analysis_uuid
544
- found_analysis_uuid = true
545
- break
546
- end
547
- end
548
- raise "Analysis with user_defined_id of #{analysis_uuid} found on DEnCity." if found_analysis_uuid
549
- dencity_hash = OpenStudio::Analysis.to_dencity_analysis(analysis, analysis_uuid)
550
-
551
- # Write the analysis DEnCity hash to dencity_analysis.json
552
- f = File.new('dencity_analysis.json', 'wb')
553
- f.write(JSON.pretty_generate(dencity_hash))
554
- f.close
555
-
556
- # Upload the processed analysis json.
557
- upload = conn.load_analysis 'dencity_analysis.json'
558
- begin
559
- upload_response = upload.push
560
- rescue StandardError => e
561
- runner.registerError("Upload failure: #{e.message} in #{e.backtrace.join('/n')}")
562
- else
563
- if NoMethodError == upload_response.class
564
- raise "ERROR: Server responded with a NoMethodError: #{upload_response}"
565
- end
566
- if upload_response.status.to_s[0] == '2'
567
- puts 'Successfully uploaded processed analysis json file to the DEnCity server.'
568
- else
569
- puts 'ERROR: Server returned a non-20x status. Response below.'
570
- puts upload_response
571
- raise
572
- end
573
- end
574
- end
575
-
576
- # Upload a single datapoint
577
- # @param analysis [String] Analysis ID to attach datapoint
578
- # @param options [Hash] Options
579
- # @option options [String] :datapoint_file Path to datapoint JSON to upload
580
- # @option options [Boolean] :reset_uuids Flag on whether or not to reset the UUID in the datapoint JSON to a new random value.
581
- def upload_datapoint(analysis_id, options)
582
- defaults = { reset_uuids: false }
583
- options = defaults.merge(options)
584
-
585
- raise 'No analysis id passed' if analysis_id.nil?
586
- raise 'No datapoints file passed to new_analysis' unless options[:datapoint_file]
587
- raise "No datapoints_file exists #{options[:datapoint_file]}" unless File.exist?(options[:datapoint_file])
588
-
589
- dp_hash = JSON.parse(File.open(options[:datapoint_file]).read, symbolize_names: true)
590
-
591
- # There are two instances of the analysis ID. There is one in the file,
592
- # and the other is in the POST url. Ideally remove the version in the
593
- # file and support only the URL based analysis_id
594
- dp_hash[:analysis_uuid] = analysis_id
595
-
596
- if options[:reset_uuids]
597
- dp_hash[:uuid] = SecureRandom.uuid
598
- end
599
-
600
- # merge in the analysis_id as it has to be what is in the database
601
- response = @conn.post do |req|
602
- req.url "analyses/#{analysis_id}/data_points.json"
603
- req.headers['Content-Type'] = 'application/json'
604
- req.body = dp_hash.to_json
605
- end
606
-
607
- if response.status == 201
608
- puts "new datapoints created for analysis #{analysis_id}"
609
- return JSON.parse(response.body, symbolize_names: true)
610
- else
611
- raise "could not create new datapoints #{response.body}"
612
- end
613
- end
614
-
615
- # Upload multiple data points to the server.
616
- # @param analysis [String] Analysis ID to attach datapoint
617
- def upload_datapoints(analysis_id, options)
618
- defaults = {}
619
- options = defaults.merge(options)
620
-
621
- raise 'No analysis id passed' if analysis_id.nil?
622
- raise 'No datapoints file passed to new_analysis' unless options[:datapoints_file]
623
- raise "No datapoints_file exists #{options[:datapoints_file]}" unless File.exist?(options[:datapoints_file])
624
-
625
- dp_hash = JSON.parse(File.open(options[:datapoints_file]).read, symbolize_names: true)
626
-
627
- # merge in the analysis_id as it has to be what is in the database
628
- response = @conn.post do |req|
629
- req.url "analyses/#{analysis_id}/data_points/batch_upload.json"
630
- req.headers['Content-Type'] = 'application/json'
631
- req.body = dp_hash.to_json
632
- end
633
-
634
- if response.status == 201
635
- puts "new datapoints created for analysis #{analysis_id}"
636
- else
637
- raise "could not create new datapoints #{response.body}"
638
- end
639
- end
640
-
641
- def start_analysis(analysis_id, options)
642
- defaults = { analysis_action: 'start', without_delay: false }
643
- options = defaults.merge(options)
644
-
645
- puts "Run analysis is configured with #{options.to_json}"
646
- response = @conn.post do |req|
647
- req.url "analyses/#{analysis_id}/action.json"
648
- req.headers['Content-Type'] = 'application/json'
649
- req.body = options.to_json
650
- req.options.timeout = 1800 # seconds
651
- req.options.write_timeout = 1800
652
- end
653
-
654
- if response.status == 200
655
- puts "Received request to run analysis #{analysis_id}"
656
- else
657
- raise 'Could not start the analysis'
658
- end
659
- end
660
-
661
- # Kill the analysis
662
- # @param analysis [String] Analysis ID to stop
663
- def kill_analysis(analysis_id)
664
- analysis_action = { analysis_action: 'stop' }
665
-
666
- response = @conn.post do |req|
667
- req.url "analyses/#{analysis_id}/action.json"
668
- req.headers['Content-Type'] = 'application/json'
669
- req.body = analysis_action.to_json
670
- end
671
-
672
- if response.status == 200
673
- puts "Killed analysis #{analysis_id}"
674
- end
675
- end
676
-
677
- def kill_all_analyses
678
- project_ids = get_project_ids
679
- puts "List of projects ids are: #{project_ids}"
680
-
681
- project_ids.each do |project_id|
682
- analysis_ids = get_analyses(project_id)
683
- puts analysis_ids
684
- analysis_ids.each do |analysis_id|
685
- puts "Trying to kill #{analysis_id}"
686
- kill_analysis(analysis_id)
687
- end
688
- end
689
- end
690
-
691
- # Get a list of analyses and the data points
692
- #
693
- # @param analysis_id [String] An analysis ID
694
- def data_point_status(analysis_id = nil)
695
- data_points = nil
696
- call_string = nil
697
- if analysis_id
698
- call_string = "analyses/#{analysis_id}/status.json"
699
- else
700
- call_string = 'analyses/status.json'
701
- end
702
-
703
- resp = @conn.get call_string, version: 2
704
- if resp.status == 200
705
- data_points = JSON.parse(resp.body, symbolize_names: true)[:analyses]
706
- end
707
-
708
- data_points
709
- end
710
-
711
- # This is the former version of get data point status. The new version is preferred and allows for
712
- # checking data points across all analyses.
713
- def get_datapoint_status(analysis_id, filter = nil)
714
- data_points = nil
715
- # get the status of all the entire analysis
716
- unless analysis_id.nil?
717
- if filter.nil? || filter == ''
718
- resp = @conn.get "analyses/#{analysis_id}/status.json"
719
- if resp.status == 200
720
- data_points = JSON.parse(resp.body, symbolize_names: true)[:analysis][:data_points]
721
- end
722
- else
723
- resp = @conn.get "analyses/#{analysis_id}/status.json", jobs: filter
724
- if resp.status == 200
725
- data_points = JSON.parse(resp.body, symbolize_names: true)[:analysis][:data_points]
726
- end
727
- end
728
- end
729
-
730
- data_points
731
- end
732
-
733
- # Return the JSON (Full) of the datapoint
734
- def get_datapoint(data_point_id)
735
- data_point = nil
736
-
737
- resp = @conn.get "/data_points/#{data_point_id}.json"
738
- if resp.status == 200
739
- data_point = JSON.parse resp.body, symbolize_names: true
740
- end
741
-
742
- data_point
743
- end
744
-
745
- # Submit a generic analysis. This will use the options that are configured in the JSON file including
746
- # the analysis type and options. Note that this may not work for all cases were multiple analyses need to run
747
- # (e.g. single_run, queue_model, lhs)
748
- #
749
- # @params formaluation_filename [String] FQP to the formulation file
750
- # @params analysis_zip_filename [String] FQP to the zip file with the supporting files
751
- def run_file(formulation_filename, analysis_zip_filename)
752
- # parse the JSON file to grab the analysis type
753
- j = JSON.parse(formulation_filename, symbolize_names: true)
754
- analysis_type = j[:analysis][:problem][:analysis_type]
755
-
756
- run(formulation_filename, analysis_zip_filename, analysis_type)
757
- end
758
-
759
- # Submit the analysis for running via the API
760
- #
761
- # @param formulation_filename [String] Name of the analysis.json file
762
- # @param analysis_zip_filename [String] Name of the analysis.zip file
763
- # @param analysis_type [String] Type of analysis to run
764
- # @param options [Hash] Hash of options
765
- # @option options [String] :run_data_point_filename Name of ruby file that the server runs -- will be deprecated
766
- # @option options [String] :push_to_dencity Whether or not to push to DEnCity
767
- # @option options [String] :batch_run_method Which batch run method to use (batch_run or batch_run_local [no R])
768
- def run(formulation_filename, analysis_zip_filename, analysis_type, options = {})
769
- defaults = {
770
- run_data_point_filename: 'run_openstudio_workflow_monthly.rb',
771
- push_to_dencity: false,
772
- batch_run_method: 'batch_run',
773
- without_delay: false
774
- }
775
- options = defaults.merge(options)
776
-
777
- project_options = {}
778
- project_id = new_project(project_options)
779
-
780
- analysis_options = {
781
- formulation_file: formulation_filename,
782
- upload_file: analysis_zip_filename,
783
- reset_uuids: true,
784
- push_to_dencity: options[:push_to_dencity]
785
- }
786
-
787
- analysis_id = new_analysis(project_id, analysis_options)
788
-
789
- run_options = {
790
- analysis_action: 'start',
791
- without_delay: options[:without_delay],
792
- analysis_type: analysis_type,
793
- simulate_data_point_filename: 'simulate_data_point.rb', # TODO: remove these from server?
794
- run_data_point_filename: options[:run_data_point_filename]
795
- }
796
- start_analysis(analysis_id, run_options)
797
-
798
- # If the analysis is a staged analysis, then go ahead and run batch run
799
- # because there is no explicit way to tell the system to do it
800
- if BATCH_RUN_METHODS.include? analysis_type
801
- run_options = {
802
- analysis_action: 'start',
803
- without_delay: false,
804
- analysis_type: options[:batch_run_method],
805
- simulate_data_point_filename: 'simulate_data_point.rb',
806
- run_data_point_filename: options[:run_data_point_filename]
807
- }
808
- start_analysis(analysis_id, run_options)
809
- end
810
-
811
- analysis_id
812
- end
813
-
814
- def queue_single_run(formulation_filename, analysis_zip_filename, analysis_type,
815
- run_data_point_filename = 'run_openstudio_workflow_monthly.rb')
816
- project_options = {}
817
- project_id = new_project(project_options)
818
-
819
- analysis_options = {
820
- formulation_file: formulation_filename,
821
- upload_file: analysis_zip_filename,
822
- reset_uuids: true
823
- }
824
- analysis_id = new_analysis(project_id, analysis_options)
825
-
826
- run_options = {
827
- analysis_action: 'start',
828
- without_delay: false,
829
- analysis_type: analysis_type,
830
- simulate_data_point_filename: 'simulate_data_point.rb',
831
- run_data_point_filename: run_data_point_filename
832
- }
833
- start_analysis(analysis_id, run_options)
834
-
835
- analysis_id
836
- end
837
-
838
- def run_batch_run_across_analyses
839
- project_options = {}
840
- project_id = new_project(project_options)
841
-
842
- analysis_options = {
843
- formulation_file: nil,
844
- upload_file: nil,
845
- reset_uuids: true
846
- }
847
- analysis_id = new_analysis(project_id, analysis_options)
848
-
849
- run_options = {
850
- analysis_action: 'start',
851
- without_delay: false,
852
- analysis_type: 'batch_run_analyses',
853
- simulate_data_point_filename: 'simulate_data_point.rb',
854
- run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
855
- }
856
- start_analysis(analysis_id, run_options)
857
-
858
- analysis_id
859
- end
860
- end
861
- end
862
- end
1
+ # *******************************************************************************
2
+ # OpenStudio(R), Copyright (c) Alliance for Sustainable Energy, LLC.
3
+ # See also https://openstudio.net/license
4
+ # *******************************************************************************
5
+
6
+ # Class manages the communication with the server.
7
+ # Presently, this class is simple and stores all information in hashs
8
+ module OpenStudio
9
+ module Analysis
10
+ class ServerApi
11
+ attr_reader :hostname
12
+
13
+ # Define set of anlaysis methods require batch_run to be queued after them
14
+ BATCH_RUN_METHODS = ['lhs', 'preflight', 'single_run', 'repeat_run', 'doe', 'diag', 'baseline_perturbation', 'batch_datapoints'].freeze
15
+
16
+ def initialize(options = {})
17
+ defaults = { hostname: 'http://localhost:8080', log_path: File.expand_path('~/os_server_api.log') }
18
+ options = defaults.merge(options)
19
+ if ENV['OS_SERVER_LOG_PATH']
20
+ @logger = ::Logger.new(ENV['OS_SERVER_LOG_PATH'] + '/os_server_api.log')
21
+ else
22
+ @logger = ::Logger.new(options[:log_path])
23
+ end
24
+
25
+ @hostname = options[:hostname]
26
+
27
+ raise 'no host defined for server api class' if @hostname.nil?
28
+
29
+ # TODO: add support for the proxy
30
+
31
+ # create connection with basic capabilities
32
+ @conn = Faraday.new(url: @hostname) do |faraday|
33
+ faraday.request :url_encoded # form-encode POST params
34
+ faraday.options.timeout = 300
35
+ faraday.options.open_timeout = 300
36
+ faraday.options.write_timeout = 1800
37
+ faraday.use Faraday::Response::Logger, @logger
38
+ # faraday.response @logger # log requests to STDOUT
39
+ faraday.adapter Faraday.default_adapter # make requests with Net::HTTP
40
+ end
41
+
42
+ # create connection to server api with multipart capabilities
43
+ @conn_multipart = Faraday.new(url: @hostname) do |faraday|
44
+ faraday.request :multipart
45
+ faraday.request :url_encoded # form-encode POST params
46
+ faraday.options.timeout = 300
47
+ faraday.options.open_timeout = 300
48
+ faraday.options.write_timeout = 1800
49
+ faraday.use Faraday::Response::Logger, @logger
50
+ # faraday.response :logger # log requests to STDOUT
51
+ faraday.adapter Faraday.default_adapter # make requests with Net::HTTP
52
+ end
53
+ end
54
+
55
+ def get_projects
56
+ response = @conn.get '/projects.json'
57
+
58
+ projects_json = nil
59
+ if response.status == 200
60
+ projects_json = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
61
+ else
62
+ raise 'did not receive a 200 in get_projects'
63
+ end
64
+
65
+ projects_json
66
+ end
67
+
68
+ def get_project_ids
69
+ ids = get_projects
70
+ ids.map { |project| project[:uuid] }
71
+ end
72
+
73
+ def delete_project(id)
74
+ deleted = false
75
+ response = @conn.delete "/projects/#{id}.json"
76
+ if response.status == 204
77
+ puts "Successfully deleted project #{id}"
78
+ deleted = true
79
+ else
80
+ puts "ERROR deleting project #{id}"
81
+ deleted = false
82
+ end
83
+
84
+ deleted
85
+ end
86
+
87
+ def delete_all
88
+ ids = get_project_ids
89
+ puts "deleting projects with IDs: #{ids}"
90
+ success = true
91
+ ids.each do |id|
92
+ r = delete_project id
93
+ success = false if r == false
94
+ end
95
+
96
+ success
97
+ end
98
+
99
+ def new_project(options = {})
100
+ defaults = { project_name: "Project #{::Time.now.strftime('%Y-%m-%d %H:%M:%S')}" }
101
+ options = defaults.merge(options)
102
+ project_id = nil
103
+
104
+ # TODO: make this a display name and a machine name
105
+ project_hash = { project: { name: (options[:project_name]).to_s } }
106
+ begin
107
+ response = @conn.post do |req|
108
+ req.url '/projects.json'
109
+ req.headers['Content-Type'] = 'application/json'
110
+ req.body = project_hash.to_json
111
+ end
112
+ puts "response.status: #{response.status}"
113
+ puts response.inspect
114
+ rescue Net::OpenTimeout => e
115
+ puts "new_project OpenTimeout: #{e.message}"
116
+ end
117
+ if response.status == 201
118
+ project_id = JSON.parse(response.body)['_id']
119
+
120
+ puts "new project created with ID: #{project_id}"
121
+ # grab the project id
122
+ elsif response.status == 500
123
+ puts '500 Error'
124
+ puts response.inspect
125
+ end
126
+
127
+ project_id
128
+ end
129
+
130
+ def get_analyses(project_id)
131
+ analysis_ids = []
132
+ response = @conn.get "/projects/#{project_id}.json"
133
+ if response.status == 200
134
+ analyses = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
135
+ analyses[:analyses]&.each do |analysis|
136
+ analysis_ids << analysis[:_id]
137
+ end
138
+ end
139
+
140
+ analysis_ids
141
+ end
142
+
143
+ def get_analyses_detailed(project_id)
144
+ analyses = nil
145
+ response = @conn.get "/projects/#{project_id}.json"
146
+ if response.status == 200
147
+ analyses = JSON.parse(response.body, symbolize_names: true, max_nesting: false)[:analyses]
148
+ end
149
+
150
+ analyses
151
+ end
152
+
153
+ # return the entire analysis JSON
154
+ def get_analysis(analysis_id)
155
+ result = nil
156
+ response = @conn.get "/analyses/#{analysis_id}.json"
157
+ if response.status == 200
158
+ result = JSON.parse(response.body, symbolize_names: true, max_nesting: false)[:analysis]
159
+ end
160
+
161
+ result
162
+ end
163
+
164
+ # Check the status of the simulation. Format should be:
165
+ # {
166
+ # analysis: {
167
+ # status: "completed",
168
+ # analysis_type: "batch_run"
169
+ # },
170
+ # data_points: [
171
+ # {
172
+ # _id: "bbd57e90-ce59-0131-35de-080027880ca6",
173
+ # status: "completed"
174
+ # }
175
+ # ]
176
+ # }
177
+ def get_analysis_status(analysis_id, analysis_type)
178
+ status = nil
179
+
180
+ # sleep 2 # super cheesy---need to update how this works. Right now there is a good chance to get a
181
+ # race condition when the analysis state changes.
182
+ unless analysis_id.nil?
183
+ resp = @conn.get "analyses/#{analysis_id}/status.json"
184
+ if resp.status == 200
185
+ j = JSON.parse resp.body, symbolize_names: true
186
+ if j && j[:analysis] && j[:analysis][:analysis_type] == analysis_type
187
+ status = j[:analysis][:status]
188
+ elsif j && j[:analysis] && analysis_type == 'batch_run'
189
+ status = j[:analysis][:status]
190
+ end
191
+ end
192
+ end
193
+
194
+ status
195
+ end
196
+
197
+ # Check if the machine is alive
198
+ #
199
+ # return [Boolean] True if the machine has an awake value set
200
+ def alive?
201
+ m = machine_status
202
+
203
+ m = !m[:status][:awake].nil? if m
204
+
205
+ m
206
+ end
207
+
208
+ # Retrieve the machine status
209
+ #
210
+ # return [Hash]
211
+ def machine_status
212
+ status = nil
213
+
214
+ begin
215
+ resp = @conn.get do |req|
216
+ req.url 'status.json'
217
+ req.options.timeout = 300
218
+ req.options.open_timeout = 300
219
+ end
220
+ puts "machine_status resp.status: #{resp.status}"
221
+ puts resp.inspect
222
+ if resp.status == 200
223
+ j = JSON.parse resp.body, symbolize_names: true
224
+ status = j if j
225
+ end
226
+ rescue Faraday::ConnectionFailed => e
227
+ puts "machine_Status ConnectionFailed: #{e.message}"
228
+ rescue Net::ReadTimeout => e
229
+ puts "machine_Status ReadTimeout: #{e.message}"
230
+ end
231
+
232
+ status
233
+ end
234
+
235
+ def get_analysis_status_and_json(analysis_id, analysis_type)
236
+ status = nil
237
+ j = nil
238
+
239
+ # sleep 2 # super cheesy---need to update how this works. Right now there is a good chance to get a
240
+ # race condition when the analysis state changes.
241
+ unless analysis_id.nil?
242
+ resp = @conn.get "analyses/#{analysis_id}/status.json"
243
+ if resp.status == 200
244
+ j = JSON.parse resp.body, symbolize_names: true
245
+ if j && j[:analysis] && j[:analysis][:analysis_type] == analysis_type
246
+ status = j[:analysis][:status]
247
+ end
248
+ end
249
+ end
250
+
251
+ [status, j]
252
+ end
253
+
254
+ # return the data point results in JSON format
255
+ def get_analysis_results(analysis_id)
256
+ analysis = nil
257
+
258
+ response = @conn.get "/analyses/#{analysis_id}/analysis_data.json"
259
+ if response.status == 200
260
+ analysis = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
261
+ end
262
+
263
+ analysis
264
+ end
265
+
266
+ def download_dataframe(analysis_id, format = 'rdata', save_directory = '.')
267
+ downloaded = false
268
+ file_path_and_name = nil
269
+
270
+ response = @conn.get do |r|
271
+ r.url "/analyses/#{analysis_id}/download_data.#{format}?export=true"
272
+ r.options.timeout = 3600 # 60 minutes
273
+ end
274
+ if response.status == 200
275
+ filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
276
+ downloaded = true
277
+ file_path_and_name = "#{save_directory}/#{filename}"
278
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
279
+ if format == 'rdata'
280
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
281
+ else
282
+ File.open(file_path_and_name, 'w') { |f| f << response.body }
283
+ end
284
+ end
285
+
286
+ [downloaded, file_path_and_name]
287
+ end
288
+
289
+ def download_variables(analysis_id, format = 'rdata', save_directory = '.')
290
+ downloaded = false
291
+ file_path_and_name = nil
292
+
293
+ response = @conn.get "/analyses/#{analysis_id}/variables/download_variables.#{format}"
294
+ if response.status == 200
295
+ filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
296
+ downloaded = true
297
+ file_path_and_name = "#{save_directory}/#{filename}"
298
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
299
+ if format == 'rdata'
300
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
301
+ else
302
+ File.open(file_path_and_name, 'w') { |f| f << response.body }
303
+ end
304
+ end
305
+
306
+ [downloaded, file_path_and_name]
307
+ end
308
+
309
+ def download_datapoint(datapoint_id, save_directory = '.')
310
+ downloaded = false
311
+ file_path_and_name = nil
312
+
313
+ response = @conn.get "/data_points/#{datapoint_id}/download_result_file?filename=data_point.zip"
314
+ if response.status == 200
315
+ filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
316
+ downloaded = true
317
+ file_path_and_name = "#{save_directory}/#{filename}"
318
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
319
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
320
+ else
321
+ response = @conn.get "/data_points/#{datapoint_id}/download"
322
+ if response.status == 200
323
+ filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
324
+ downloaded = true
325
+ file_path_and_name = "#{save_directory}/#{filename}"
326
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
327
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
328
+ end
329
+ end
330
+
331
+ [downloaded, file_path_and_name]
332
+ end
333
+
334
+ # Download a MongoDB Snapshot. This database can get large. For 13,000 simulations with
335
+ # DEnCity reporting, the size is around 325MB
336
+ def download_database(save_directory = '.')
337
+ downloaded = false
338
+ file_path_and_name = nil
339
+
340
+ response = @conn.get do |r|
341
+ r.url '/admin/backup_database?full_backup=true'
342
+ r.options.timeout = 3600 # 60 minutes
343
+ end
344
+
345
+ if response.status == 200
346
+ filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
347
+ downloaded = true
348
+ file_path_and_name = "#{save_directory}/#{filename}"
349
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
350
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
351
+ end
352
+
353
+ [downloaded, file_path_and_name]
354
+ end
355
+
356
+ # http://localhost:3000/data_points/ff857845-a4c6-4eb9-a52b-cbc6a41976d5/download_result_file?filename=
357
+ def download_datapoint_report(datapoint_id, report_name, save_directory = '.')
358
+ downloaded = false
359
+ file_path_and_name = nil
360
+
361
+ response = @conn.get "/data_points/#{datapoint_id}/download_result_file?filename=#{report_name}"
362
+ if response.status == 200
363
+ filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
364
+ downloaded = true
365
+ file_path_and_name = "#{save_directory}/#{filename}"
366
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
367
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
368
+ end
369
+
370
+ [downloaded, file_path_and_name]
371
+ end
372
+
373
+ def download_datapoint_jsons(analysis_id, save_directory = '.')
374
+ # get the list of all the datapoints
375
+ dps = get_datapoint_status(analysis_id)
376
+ dps.each do |dp|
377
+ if dp[:status] == 'completed'
378
+ dp_h = get_datapoint(dp[:_id])
379
+ File.open("#{save_directory}/data_point_#{dp[:_id]}.json", 'w') { |f| f << JSON.pretty_generate(dp_h) }
380
+ end
381
+ end
382
+ end
383
+
384
+ def datapoint_dencity(datapoint_id)
385
+ # Return the JSON (Full) of the datapoint
386
+ data_point = nil
387
+
388
+ resp = @conn.get "/data_points/#{datapoint_id}/dencity.json"
389
+ if resp.status == 200
390
+ data_point = JSON.parse resp.body, symbolize_names: true
391
+ end
392
+
393
+ data_point
394
+ end
395
+
396
+ def analysis_dencity_json(analysis_id)
397
+ # Return the hash of the dencity format for the analysis
398
+ dencity = nil
399
+
400
+ resp = @conn.get "/analyses/#{analysis_id}/dencity.json"
401
+ if resp.status == 200
402
+ dencity = JSON.parse resp.body, symbolize_names: true
403
+ end
404
+
405
+ dencity
406
+ end
407
+
408
+ def download_dencity_json(analysis_id, save_directory = '.')
409
+ a_h = analysis_dencity_json(analysis_id)
410
+ if a_h
411
+ File.open("#{save_directory}/analysis_#{analysis_id}_dencity.json", 'w') { |f| f << JSON.pretty_generate(a_h) }
412
+ end
413
+ end
414
+
415
+ def download_datapoint_dencity_jsons(analysis_id, save_directory = '.')
416
+ # get the list of all the datapoints
417
+ dps = get_datapoint_status(analysis_id)
418
+ dps.each do |dp|
419
+ if dp[:status] == 'completed'
420
+ dp_h = datapoint_dencity(dp[:_id])
421
+ File.open("#{save_directory}/data_point_#{dp[:_id]}_dencity.json", 'w') { |f| f << JSON.pretty_generate(dp_h) }
422
+ end
423
+ end
424
+ end
425
+
426
+ def new_analysis(project_id, options)
427
+ defaults = {
428
+ analysis_name: nil,
429
+ reset_uuids: false,
430
+ push_to_dencity: false
431
+ }
432
+ options = defaults.merge(options)
433
+
434
+ raise 'No project id passed' if project_id.nil?
435
+
436
+ formulation_json = nil
437
+ if options[:formulation_file]
438
+ raise "No formulation exists #{options[:formulation_file]}" unless File.exist?(options[:formulation_file])
439
+ formulation_json = JSON.parse(File.read(options[:formulation_file]), symbolize_names: true)
440
+ end
441
+
442
+ # read in the analysis id from the analysis.json file
443
+ analysis_id = nil
444
+ if formulation_json
445
+ if options[:reset_uuids]
446
+ analysis_id = SecureRandom.uuid
447
+ formulation_json[:analysis][:uuid] = analysis_id
448
+
449
+ formulation_json[:analysis][:problem][:workflow].each do |wf|
450
+ wf[:uuid] = SecureRandom.uuid
451
+ wf[:arguments]&.each do |arg|
452
+ arg[:uuid] = SecureRandom.uuid
453
+ end
454
+ wf[:variables]&.each do |var|
455
+ var[:uuid] = SecureRandom.uuid
456
+ var[:argument][:uuid] = SecureRandom.uuid if var[:argument]
457
+ end
458
+ end
459
+ else
460
+ analysis_id = formulation_json[:analysis][:uuid]
461
+ end
462
+
463
+ # set the analysis name
464
+ formulation_json[:analysis][:name] = (options[:analysis_name]).to_s unless options[:analysis_name].nil?
465
+ else
466
+ formulation_json = {
467
+ analysis: options
468
+ }
469
+ puts formulation_json
470
+ analysis_id = SecureRandom.uuid
471
+ formulation_json[:analysis][:uuid] = analysis_id
472
+ end
473
+ raise "No analysis id defined in analysis.json #{options[:formulation_file]}" if analysis_id.nil?
474
+
475
+ # save out this file to compare
476
+ # File.open('formulation_merge.json', 'w') { |f| f << JSON.pretty_generate(formulation_json) }
477
+
478
+ response = @conn.post do |req|
479
+ req.url "projects/#{project_id}/analyses.json"
480
+ req.headers['Content-Type'] = 'application/json'
481
+ req.body = formulation_json.to_json
482
+ req.options.timeout = 600 # seconds
483
+ req.options.write_timeout = 1800
484
+ end
485
+
486
+ if response.status == 201
487
+ puts "asked to create analysis with #{analysis_id}"
488
+ # puts resp.inspect
489
+ analysis_id = JSON.parse(response.body)['_id']
490
+ puts "options[:push_to_dencity] = #{options[:push_to_dencity]}"
491
+ upload_to_dencity(analysis_id, formulation_json) if options[:push_to_dencity]
492
+ puts "new analysis created with ID: #{analysis_id}"
493
+ else
494
+ raise 'Could not create new analysis'
495
+ end
496
+
497
+ # check if we need to upload the analysis zip file
498
+ if options[:upload_file]
499
+ raise "upload file does not exist #{options[:upload_file]}" unless File.exist?(options[:upload_file])
500
+
501
+ payload = { file: Faraday::UploadIO.new(options[:upload_file], 'application/zip') }
502
+ response = @conn_multipart.post "analyses/#{analysis_id}/upload.json", payload do |req|
503
+ req.options.timeout = 1800 # seconds
504
+ req.options.write_timeout = 1800
505
+ end
506
+
507
+ if response.status == 201
508
+ puts 'Successfully uploaded ZIP file'
509
+ else
510
+ raise response.inspect
511
+ end
512
+ end
513
+
514
+ analysis_id
515
+ end
516
+
517
+ def upload_to_dencity(analysis_uuid, analysis)
518
+ require 'dencity'
519
+ puts "Attempting to connect to DEnCity server using settings at '~/.dencity/config.yml'"
520
+ conn = Dencity.connect
521
+ raise "Could not connect to DEnCity server at #{hostname}." unless conn.connected?
522
+ begin
523
+ r = conn.login
524
+ rescue Faraday::ParsingError => user_id_failure
525
+ raise "Error in user_id field: #{user_id_failure.message}"
526
+ rescue MultiJson::ParseError => authentication_failure
527
+ raise "Error in attempted authentication: #{authentication_failure.message}"
528
+ end
529
+ user_uuid = r.id
530
+
531
+ # Find the analysis.json file that SHOULD BE IN THE FOLDER THAT THIS SCRIPT IS IN (or change the below)
532
+ # Check that the analysis has not yet been registered with the DEnCity instance.
533
+ # TODO This should be simplified with a retrieve_analysis_by_user_defined_id' method in the future
534
+ user_analyses = []
535
+ r = conn.dencity_get 'analyses'
536
+ runner.registerError('Unable to retrieve analyses from DEnCity server') unless r['status'] == 200
537
+ r['data'].each do |dencity_analysis|
538
+ user_analyses << dencity_analysis['id'] if dencity_analysis['user_id'] == user_uuid
539
+ end
540
+ found_analysis_uuid = false
541
+ user_analyses.each do |dencity_analysis_id|
542
+ dencity_analysis = conn.retrieve_analysis_by_id(dencity_analysis_id)
543
+ if dencity_analysis['user_defined_id'] == analysis_uuid
544
+ found_analysis_uuid = true
545
+ break
546
+ end
547
+ end
548
+ raise "Analysis with user_defined_id of #{analysis_uuid} found on DEnCity." if found_analysis_uuid
549
+ dencity_hash = OpenStudio::Analysis.to_dencity_analysis(analysis, analysis_uuid)
550
+
551
+ # Write the analysis DEnCity hash to dencity_analysis.json
552
+ f = File.new('dencity_analysis.json', 'wb')
553
+ f.write(JSON.pretty_generate(dencity_hash))
554
+ f.close
555
+
556
+ # Upload the processed analysis json.
557
+ upload = conn.load_analysis 'dencity_analysis.json'
558
+ begin
559
+ upload_response = upload.push
560
+ rescue StandardError => e
561
+ runner.registerError("Upload failure: #{e.message} in #{e.backtrace.join('/n')}")
562
+ else
563
+ if NoMethodError == upload_response.class
564
+ raise "ERROR: Server responded with a NoMethodError: #{upload_response}"
565
+ end
566
+ if upload_response.status.to_s[0] == '2'
567
+ puts 'Successfully uploaded processed analysis json file to the DEnCity server.'
568
+ else
569
+ puts 'ERROR: Server returned a non-20x status. Response below.'
570
+ puts upload_response
571
+ raise
572
+ end
573
+ end
574
+ end
575
+
576
+ # Upload a single datapoint
577
+ # @param analysis [String] Analysis ID to attach datapoint
578
+ # @param options [Hash] Options
579
+ # @option options [String] :datapoint_file Path to datapoint JSON to upload
580
+ # @option options [Boolean] :reset_uuids Flag on whether or not to reset the UUID in the datapoint JSON to a new random value.
581
+ def upload_datapoint(analysis_id, options)
582
+ defaults = { reset_uuids: false }
583
+ options = defaults.merge(options)
584
+
585
+ raise 'No analysis id passed' if analysis_id.nil?
586
+ raise 'No datapoints file passed to new_analysis' unless options[:datapoint_file]
587
+ raise "No datapoints_file exists #{options[:datapoint_file]}" unless File.exist?(options[:datapoint_file])
588
+
589
+ dp_hash = JSON.parse(File.open(options[:datapoint_file]).read, symbolize_names: true)
590
+
591
+ # There are two instances of the analysis ID. There is one in the file,
592
+ # and the other is in the POST url. Ideally remove the version in the
593
+ # file and support only the URL based analysis_id
594
+ dp_hash[:analysis_uuid] = analysis_id
595
+
596
+ if options[:reset_uuids]
597
+ dp_hash[:uuid] = SecureRandom.uuid
598
+ end
599
+
600
+ # merge in the analysis_id as it has to be what is in the database
601
+ response = @conn.post do |req|
602
+ req.url "analyses/#{analysis_id}/data_points.json"
603
+ req.headers['Content-Type'] = 'application/json'
604
+ req.body = dp_hash.to_json
605
+ end
606
+
607
+ if response.status == 201
608
+ puts "new datapoints created for analysis #{analysis_id}"
609
+ return JSON.parse(response.body, symbolize_names: true)
610
+ else
611
+ raise "could not create new datapoints #{response.body}"
612
+ end
613
+ end
614
+
615
+ # Upload multiple data points to the server.
616
+ # @param analysis [String] Analysis ID to attach datapoint
617
+ def upload_datapoints(analysis_id, options)
618
+ defaults = {}
619
+ options = defaults.merge(options)
620
+
621
+ raise 'No analysis id passed' if analysis_id.nil?
622
+ raise 'No datapoints file passed to new_analysis' unless options[:datapoints_file]
623
+ raise "No datapoints_file exists #{options[:datapoints_file]}" unless File.exist?(options[:datapoints_file])
624
+
625
+ dp_hash = JSON.parse(File.open(options[:datapoints_file]).read, symbolize_names: true)
626
+
627
+ # merge in the analysis_id as it has to be what is in the database
628
+ response = @conn.post do |req|
629
+ req.url "analyses/#{analysis_id}/data_points/batch_upload.json"
630
+ req.headers['Content-Type'] = 'application/json'
631
+ req.body = dp_hash.to_json
632
+ end
633
+
634
+ if response.status == 201
635
+ puts "new datapoints created for analysis #{analysis_id}"
636
+ else
637
+ raise "could not create new datapoints #{response.body}"
638
+ end
639
+ end
640
+
641
+ def start_analysis(analysis_id, options)
642
+ defaults = { analysis_action: 'start', without_delay: false }
643
+ options = defaults.merge(options)
644
+
645
+ puts "Run analysis is configured with #{options.to_json}"
646
+ response = @conn.post do |req|
647
+ req.url "analyses/#{analysis_id}/action.json"
648
+ req.headers['Content-Type'] = 'application/json'
649
+ req.body = options.to_json
650
+ req.options.timeout = 1800 # seconds
651
+ req.options.write_timeout = 1800
652
+ end
653
+
654
+ if response.status == 200
655
+ puts "Received request to run analysis #{analysis_id}"
656
+ else
657
+ raise 'Could not start the analysis'
658
+ end
659
+ end
660
+
661
+ # Kill the analysis
662
+ # @param analysis [String] Analysis ID to stop
663
+ def kill_analysis(analysis_id)
664
+ analysis_action = { analysis_action: 'stop' }
665
+
666
+ response = @conn.post do |req|
667
+ req.url "analyses/#{analysis_id}/action.json"
668
+ req.headers['Content-Type'] = 'application/json'
669
+ req.body = analysis_action.to_json
670
+ end
671
+
672
+ if response.status == 200
673
+ puts "Killed analysis #{analysis_id}"
674
+ end
675
+ end
676
+
677
+ def kill_all_analyses
678
+ project_ids = get_project_ids
679
+ puts "List of projects ids are: #{project_ids}"
680
+
681
+ project_ids.each do |project_id|
682
+ analysis_ids = get_analyses(project_id)
683
+ puts analysis_ids
684
+ analysis_ids.each do |analysis_id|
685
+ puts "Trying to kill #{analysis_id}"
686
+ kill_analysis(analysis_id)
687
+ end
688
+ end
689
+ end
690
+
691
+ # Get a list of analyses and the data points
692
+ #
693
+ # @param analysis_id [String] An analysis ID
694
+ def data_point_status(analysis_id = nil)
695
+ data_points = nil
696
+ call_string = nil
697
+ if analysis_id
698
+ call_string = "analyses/#{analysis_id}/status.json"
699
+ else
700
+ call_string = 'analyses/status.json'
701
+ end
702
+
703
+ resp = @conn.get call_string, version: 2
704
+ if resp.status == 200
705
+ data_points = JSON.parse(resp.body, symbolize_names: true)[:analyses]
706
+ end
707
+
708
+ data_points
709
+ end
710
+
711
+ # This is the former version of get data point status. The new version is preferred and allows for
712
+ # checking data points across all analyses.
713
+ def get_datapoint_status(analysis_id, filter = nil)
714
+ data_points = nil
715
+ # get the status of all the entire analysis
716
+ unless analysis_id.nil?
717
+ if filter.nil? || filter == ''
718
+ resp = @conn.get "analyses/#{analysis_id}/status.json"
719
+ if resp.status == 200
720
+ data_points = JSON.parse(resp.body, symbolize_names: true)[:analysis][:data_points]
721
+ end
722
+ else
723
+ resp = @conn.get "analyses/#{analysis_id}/status.json", jobs: filter
724
+ if resp.status == 200
725
+ data_points = JSON.parse(resp.body, symbolize_names: true)[:analysis][:data_points]
726
+ end
727
+ end
728
+ end
729
+
730
+ data_points
731
+ end
732
+
733
+ # Return the JSON (Full) of the datapoint
734
+ def get_datapoint(data_point_id)
735
+ data_point = nil
736
+
737
+ resp = @conn.get "/data_points/#{data_point_id}.json"
738
+ if resp.status == 200
739
+ data_point = JSON.parse resp.body, symbolize_names: true
740
+ end
741
+
742
+ data_point
743
+ end
744
+
745
+ # Submit a generic analysis. This will use the options that are configured in the JSON file including
746
+ # the analysis type and options. Note that this may not work for all cases were multiple analyses need to run
747
+ # (e.g. single_run, queue_model, lhs)
748
+ #
749
+ # @params formaluation_filename [String] FQP to the formulation file
750
+ # @params analysis_zip_filename [String] FQP to the zip file with the supporting files
751
+ def run_file(formulation_filename, analysis_zip_filename)
752
+ # parse the JSON file to grab the analysis type
753
+ j = JSON.parse(formulation_filename, symbolize_names: true)
754
+ analysis_type = j[:analysis][:problem][:analysis_type]
755
+
756
+ run(formulation_filename, analysis_zip_filename, analysis_type)
757
+ end
758
+
759
+ # Submit the analysis for running via the API
760
+ #
761
+ # @param formulation_filename [String] Name of the analysis.json file
762
+ # @param analysis_zip_filename [String] Name of the analysis.zip file
763
+ # @param analysis_type [String] Type of analysis to run
764
+ # @param options [Hash] Hash of options
765
+ # @option options [String] :run_data_point_filename Name of ruby file that the server runs -- will be deprecated
766
+ # @option options [String] :push_to_dencity Whether or not to push to DEnCity
767
+ # @option options [String] :batch_run_method Which batch run method to use (batch_run or batch_run_local [no R])
768
+ def run(formulation_filename, analysis_zip_filename, analysis_type, options = {})
769
+ defaults = {
770
+ run_data_point_filename: 'run_openstudio_workflow_monthly.rb',
771
+ push_to_dencity: false,
772
+ batch_run_method: 'batch_run',
773
+ without_delay: false
774
+ }
775
+ options = defaults.merge(options)
776
+
777
+ project_options = {}
778
+ project_id = new_project(project_options)
779
+
780
+ analysis_options = {
781
+ formulation_file: formulation_filename,
782
+ upload_file: analysis_zip_filename,
783
+ reset_uuids: true,
784
+ push_to_dencity: options[:push_to_dencity]
785
+ }
786
+
787
+ analysis_id = new_analysis(project_id, analysis_options)
788
+
789
+ run_options = {
790
+ analysis_action: 'start',
791
+ without_delay: options[:without_delay],
792
+ analysis_type: analysis_type,
793
+ simulate_data_point_filename: 'simulate_data_point.rb', # TODO: remove these from server?
794
+ run_data_point_filename: options[:run_data_point_filename]
795
+ }
796
+ start_analysis(analysis_id, run_options)
797
+
798
+ # If the analysis is a staged analysis, then go ahead and run batch run
799
+ # because there is no explicit way to tell the system to do it
800
+ if BATCH_RUN_METHODS.include? analysis_type
801
+ run_options = {
802
+ analysis_action: 'start',
803
+ without_delay: false,
804
+ analysis_type: options[:batch_run_method],
805
+ simulate_data_point_filename: 'simulate_data_point.rb',
806
+ run_data_point_filename: options[:run_data_point_filename]
807
+ }
808
+ start_analysis(analysis_id, run_options)
809
+ end
810
+
811
+ analysis_id
812
+ end
813
+
814
+ def queue_single_run(formulation_filename, analysis_zip_filename, analysis_type,
815
+ run_data_point_filename = 'run_openstudio_workflow_monthly.rb')
816
+ project_options = {}
817
+ project_id = new_project(project_options)
818
+
819
+ analysis_options = {
820
+ formulation_file: formulation_filename,
821
+ upload_file: analysis_zip_filename,
822
+ reset_uuids: true
823
+ }
824
+ analysis_id = new_analysis(project_id, analysis_options)
825
+
826
+ run_options = {
827
+ analysis_action: 'start',
828
+ without_delay: false,
829
+ analysis_type: analysis_type,
830
+ simulate_data_point_filename: 'simulate_data_point.rb',
831
+ run_data_point_filename: run_data_point_filename
832
+ }
833
+ start_analysis(analysis_id, run_options)
834
+
835
+ analysis_id
836
+ end
837
+
838
+ def run_batch_run_across_analyses
839
+ project_options = {}
840
+ project_id = new_project(project_options)
841
+
842
+ analysis_options = {
843
+ formulation_file: nil,
844
+ upload_file: nil,
845
+ reset_uuids: true
846
+ }
847
+ analysis_id = new_analysis(project_id, analysis_options)
848
+
849
+ run_options = {
850
+ analysis_action: 'start',
851
+ without_delay: false,
852
+ analysis_type: 'batch_run_analyses',
853
+ simulate_data_point_filename: 'simulate_data_point.rb',
854
+ run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
855
+ }
856
+ start_analysis(analysis_id, run_options)
857
+
858
+ analysis_id
859
+ end
860
+ end
861
+ end
862
+ end