openstudio-analysis 1.3.4 → 1.3.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,846 +1,862 @@
1
- # *******************************************************************************
2
- # OpenStudio(R), Copyright (c) Alliance for Sustainable Energy, LLC.
3
- # See also https://openstudio.net/license
4
- # *******************************************************************************
5
-
6
- # Class manages the communication with the server.
7
- # Presently, this class is simple and stores all information in hashs
8
- module OpenStudio
9
- module Analysis
10
- class ServerApi
11
- attr_reader :hostname
12
-
13
- # Define set of anlaysis methods require batch_run to be queued after them
14
- BATCH_RUN_METHODS = ['lhs', 'preflight', 'single_run', 'repeat_run', 'doe', 'diag', 'baseline_perturbation', 'batch_datapoints'].freeze
15
-
16
- def initialize(options = {})
17
- defaults = { hostname: 'http://localhost:8080', log_path: File.expand_path('~/os_server_api.log') }
18
- options = defaults.merge(options)
19
- if ENV['OS_SERVER_LOG_PATH']
20
- @logger = ::Logger.new(ENV['OS_SERVER_LOG_PATH'] + '/os_server_api.log')
21
- else
22
- @logger = ::Logger.new(options[:log_path])
23
- end
24
-
25
- @hostname = options[:hostname]
26
-
27
- raise 'no host defined for server api class' if @hostname.nil?
28
-
29
- # TODO: add support for the proxy
30
-
31
- # create connection with basic capabilities
32
- @conn = Faraday.new(url: @hostname) do |faraday|
33
- faraday.request :url_encoded # form-encode POST params
34
- faraday.use Faraday::Response::Logger, @logger
35
- # faraday.response @logger # log requests to STDOUT
36
- faraday.adapter Faraday.default_adapter # make requests with Net::HTTP
37
- end
38
-
39
- # create connection to server api with multipart capabilities
40
- @conn_multipart = Faraday.new(url: @hostname) do |faraday|
41
- faraday.request :multipart
42
- faraday.request :url_encoded # form-encode POST params
43
- faraday.use Faraday::Response::Logger, @logger
44
- # faraday.response :logger # log requests to STDOUT
45
- faraday.adapter Faraday.default_adapter # make requests with Net::HTTP
46
- end
47
- end
48
-
49
- def get_projects
50
- response = @conn.get '/projects.json'
51
-
52
- projects_json = nil
53
- if response.status == 200
54
- projects_json = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
55
- else
56
- raise 'did not receive a 200 in get_projects'
57
- end
58
-
59
- projects_json
60
- end
61
-
62
- def get_project_ids
63
- ids = get_projects
64
- ids.map { |project| project[:uuid] }
65
- end
66
-
67
- def delete_project(id)
68
- deleted = false
69
- response = @conn.delete "/projects/#{id}.json"
70
- if response.status == 204
71
- puts "Successfully deleted project #{id}"
72
- deleted = true
73
- else
74
- puts "ERROR deleting project #{id}"
75
- deleted = false
76
- end
77
-
78
- deleted
79
- end
80
-
81
- def delete_all
82
- ids = get_project_ids
83
- puts "deleting projects with IDs: #{ids}"
84
- success = true
85
- ids.each do |id|
86
- r = delete_project id
87
- success = false if r == false
88
- end
89
-
90
- success
91
- end
92
-
93
- def new_project(options = {})
94
- defaults = { project_name: "Project #{::Time.now.strftime('%Y-%m-%d %H:%M:%S')}" }
95
- options = defaults.merge(options)
96
- project_id = nil
97
-
98
- # TODO: make this a display name and a machine name
99
- project_hash = { project: { name: (options[:project_name]).to_s } }
100
-
101
- response = @conn.post do |req|
102
- req.url '/projects.json'
103
- req.headers['Content-Type'] = 'application/json'
104
- req.body = project_hash.to_json
105
- end
106
-
107
- if response.status == 201
108
- project_id = JSON.parse(response.body)['_id']
109
-
110
- puts "new project created with ID: #{project_id}"
111
- # grab the project id
112
- elsif response.status == 500
113
- puts '500 Error'
114
- puts response.inspect
115
- end
116
-
117
- project_id
118
- end
119
-
120
- def get_analyses(project_id)
121
- analysis_ids = []
122
- response = @conn.get "/projects/#{project_id}.json"
123
- if response.status == 200
124
- analyses = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
125
- analyses[:analyses]&.each do |analysis|
126
- analysis_ids << analysis[:_id]
127
- end
128
- end
129
-
130
- analysis_ids
131
- end
132
-
133
- def get_analyses_detailed(project_id)
134
- analyses = nil
135
- response = @conn.get "/projects/#{project_id}.json"
136
- if response.status == 200
137
- analyses = JSON.parse(response.body, symbolize_names: true, max_nesting: false)[:analyses]
138
- end
139
-
140
- analyses
141
- end
142
-
143
- # return the entire analysis JSON
144
- def get_analysis(analysis_id)
145
- result = nil
146
- response = @conn.get "/analyses/#{analysis_id}.json"
147
- if response.status == 200
148
- result = JSON.parse(response.body, symbolize_names: true, max_nesting: false)[:analysis]
149
- end
150
-
151
- result
152
- end
153
-
154
- # Check the status of the simulation. Format should be:
155
- # {
156
- # analysis: {
157
- # status: "completed",
158
- # analysis_type: "batch_run"
159
- # },
160
- # data_points: [
161
- # {
162
- # _id: "bbd57e90-ce59-0131-35de-080027880ca6",
163
- # status: "completed"
164
- # }
165
- # ]
166
- # }
167
- def get_analysis_status(analysis_id, analysis_type)
168
- status = nil
169
-
170
- # sleep 2 # super cheesy---need to update how this works. Right now there is a good chance to get a
171
- # race condition when the analysis state changes.
172
- unless analysis_id.nil?
173
- resp = @conn.get "analyses/#{analysis_id}/status.json"
174
- if resp.status == 200
175
- j = JSON.parse resp.body, symbolize_names: true
176
- if j && j[:analysis] && j[:analysis][:analysis_type] == analysis_type
177
- status = j[:analysis][:status]
178
- elsif j && j[:analysis] && analysis_type == 'batch_run'
179
- status = j[:analysis][:status]
180
- end
181
- end
182
- end
183
-
184
- status
185
- end
186
-
187
- # Check if the machine is alive
188
- #
189
- # return [Boolean] True if the machine has an awake value set
190
- def alive?
191
- m = machine_status
192
-
193
- m = !m[:status][:awake].nil? if m
194
-
195
- m
196
- end
197
-
198
- # Retrieve the machine status
199
- #
200
- # return [Hash]
201
- def machine_status
202
- status = nil
203
-
204
- begin
205
- resp = @conn.get do |req|
206
- req.url 'status.json'
207
- req.options.timeout = 120
208
- req.options.open_timeout = 120
209
- end
210
-
211
- if resp.status == 200
212
- j = JSON.parse resp.body, symbolize_names: true
213
- status = j if j
214
- end
215
- rescue Faraday::ConnectionFailed
216
- rescue Net::ReadTimeout
217
- end
218
-
219
- status
220
- end
221
-
222
- def get_analysis_status_and_json(analysis_id, analysis_type)
223
- status = nil
224
- j = nil
225
-
226
- # sleep 2 # super cheesy---need to update how this works. Right now there is a good chance to get a
227
- # race condition when the analysis state changes.
228
- unless analysis_id.nil?
229
- resp = @conn.get "analyses/#{analysis_id}/status.json"
230
- if resp.status == 200
231
- j = JSON.parse resp.body, symbolize_names: true
232
- if j && j[:analysis] && j[:analysis][:analysis_type] == analysis_type
233
- status = j[:analysis][:status]
234
- end
235
- end
236
- end
237
-
238
- [status, j]
239
- end
240
-
241
- # return the data point results in JSON format
242
- def get_analysis_results(analysis_id)
243
- analysis = nil
244
-
245
- response = @conn.get "/analyses/#{analysis_id}/analysis_data.json"
246
- if response.status == 200
247
- analysis = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
248
- end
249
-
250
- analysis
251
- end
252
-
253
- def download_dataframe(analysis_id, format = 'rdata', save_directory = '.')
254
- downloaded = false
255
- file_path_and_name = nil
256
-
257
- response = @conn.get do |r|
258
- r.url "/analyses/#{analysis_id}/download_data.#{format}?export=true"
259
- r.options.timeout = 3600 # 60 minutes
260
- end
261
- if response.status == 200
262
- filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
263
- downloaded = true
264
- file_path_and_name = "#{save_directory}/#{filename}"
265
- puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
266
- if format == 'rdata'
267
- File.open(file_path_and_name, 'wb') { |f| f << response.body }
268
- else
269
- File.open(file_path_and_name, 'w') { |f| f << response.body }
270
- end
271
- end
272
-
273
- [downloaded, file_path_and_name]
274
- end
275
-
276
- def download_variables(analysis_id, format = 'rdata', save_directory = '.')
277
- downloaded = false
278
- file_path_and_name = nil
279
-
280
- response = @conn.get "/analyses/#{analysis_id}/variables/download_variables.#{format}"
281
- if response.status == 200
282
- filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
283
- downloaded = true
284
- file_path_and_name = "#{save_directory}/#{filename}"
285
- puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
286
- if format == 'rdata'
287
- File.open(file_path_and_name, 'wb') { |f| f << response.body }
288
- else
289
- File.open(file_path_and_name, 'w') { |f| f << response.body }
290
- end
291
- end
292
-
293
- [downloaded, file_path_and_name]
294
- end
295
-
296
- def download_datapoint(datapoint_id, save_directory = '.')
297
- downloaded = false
298
- file_path_and_name = nil
299
-
300
- response = @conn.get "/data_points/#{datapoint_id}/download_result_file?filename=data_point.zip"
301
- if response.status == 200
302
- filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
303
- downloaded = true
304
- file_path_and_name = "#{save_directory}/#{filename}"
305
- puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
306
- File.open(file_path_and_name, 'wb') { |f| f << response.body }
307
- else
308
- response = @conn.get "/data_points/#{datapoint_id}/download"
309
- if response.status == 200
310
- filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
311
- downloaded = true
312
- file_path_and_name = "#{save_directory}/#{filename}"
313
- puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
314
- File.open(file_path_and_name, 'wb') { |f| f << response.body }
315
- end
316
- end
317
-
318
- [downloaded, file_path_and_name]
319
- end
320
-
321
- # Download a MongoDB Snapshot. This database can get large. For 13,000 simulations with
322
- # DEnCity reporting, the size is around 325MB
323
- def download_database(save_directory = '.')
324
- downloaded = false
325
- file_path_and_name = nil
326
-
327
- response = @conn.get do |r|
328
- r.url '/admin/backup_database?full_backup=true'
329
- r.options.timeout = 3600 # 60 minutes
330
- end
331
-
332
- if response.status == 200
333
- filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
334
- downloaded = true
335
- file_path_and_name = "#{save_directory}/#{filename}"
336
- puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
337
- File.open(file_path_and_name, 'wb') { |f| f << response.body }
338
- end
339
-
340
- [downloaded, file_path_and_name]
341
- end
342
-
343
- # http://localhost:3000/data_points/ff857845-a4c6-4eb9-a52b-cbc6a41976d5/download_result_file?filename=
344
- def download_datapoint_report(datapoint_id, report_name, save_directory = '.')
345
- downloaded = false
346
- file_path_and_name = nil
347
-
348
- response = @conn.get "/data_points/#{datapoint_id}/download_result_file?filename=#{report_name}"
349
- if response.status == 200
350
- filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
351
- downloaded = true
352
- file_path_and_name = "#{save_directory}/#{filename}"
353
- puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
354
- File.open(file_path_and_name, 'wb') { |f| f << response.body }
355
- end
356
-
357
- [downloaded, file_path_and_name]
358
- end
359
-
360
- def download_datapoint_jsons(analysis_id, save_directory = '.')
361
- # get the list of all the datapoints
362
- dps = get_datapoint_status(analysis_id)
363
- dps.each do |dp|
364
- if dp[:status] == 'completed'
365
- dp_h = get_datapoint(dp[:_id])
366
- File.open("#{save_directory}/data_point_#{dp[:_id]}.json", 'w') { |f| f << JSON.pretty_generate(dp_h) }
367
- end
368
- end
369
- end
370
-
371
- def datapoint_dencity(datapoint_id)
372
- # Return the JSON (Full) of the datapoint
373
- data_point = nil
374
-
375
- resp = @conn.get "/data_points/#{datapoint_id}/dencity.json"
376
- if resp.status == 200
377
- data_point = JSON.parse resp.body, symbolize_names: true
378
- end
379
-
380
- data_point
381
- end
382
-
383
- def analysis_dencity_json(analysis_id)
384
- # Return the hash of the dencity format for the analysis
385
- dencity = nil
386
-
387
- resp = @conn.get "/analyses/#{analysis_id}/dencity.json"
388
- if resp.status == 200
389
- dencity = JSON.parse resp.body, symbolize_names: true
390
- end
391
-
392
- dencity
393
- end
394
-
395
- def download_dencity_json(analysis_id, save_directory = '.')
396
- a_h = analysis_dencity_json(analysis_id)
397
- if a_h
398
- File.open("#{save_directory}/analysis_#{analysis_id}_dencity.json", 'w') { |f| f << JSON.pretty_generate(a_h) }
399
- end
400
- end
401
-
402
- def download_datapoint_dencity_jsons(analysis_id, save_directory = '.')
403
- # get the list of all the datapoints
404
- dps = get_datapoint_status(analysis_id)
405
- dps.each do |dp|
406
- if dp[:status] == 'completed'
407
- dp_h = datapoint_dencity(dp[:_id])
408
- File.open("#{save_directory}/data_point_#{dp[:_id]}_dencity.json", 'w') { |f| f << JSON.pretty_generate(dp_h) }
409
- end
410
- end
411
- end
412
-
413
- def new_analysis(project_id, options)
414
- defaults = {
415
- analysis_name: nil,
416
- reset_uuids: false,
417
- push_to_dencity: false
418
- }
419
- options = defaults.merge(options)
420
-
421
- raise 'No project id passed' if project_id.nil?
422
-
423
- formulation_json = nil
424
- if options[:formulation_file]
425
- raise "No formulation exists #{options[:formulation_file]}" unless File.exist?(options[:formulation_file])
426
- formulation_json = JSON.parse(File.read(options[:formulation_file]), symbolize_names: true)
427
- end
428
-
429
- # read in the analysis id from the analysis.json file
430
- analysis_id = nil
431
- if formulation_json
432
- if options[:reset_uuids]
433
- analysis_id = SecureRandom.uuid
434
- formulation_json[:analysis][:uuid] = analysis_id
435
-
436
- formulation_json[:analysis][:problem][:workflow].each do |wf|
437
- wf[:uuid] = SecureRandom.uuid
438
- wf[:arguments]&.each do |arg|
439
- arg[:uuid] = SecureRandom.uuid
440
- end
441
- wf[:variables]&.each do |var|
442
- var[:uuid] = SecureRandom.uuid
443
- var[:argument][:uuid] = SecureRandom.uuid if var[:argument]
444
- end
445
- end
446
- else
447
- analysis_id = formulation_json[:analysis][:uuid]
448
- end
449
-
450
- # set the analysis name
451
- formulation_json[:analysis][:name] = (options[:analysis_name]).to_s unless options[:analysis_name].nil?
452
- else
453
- formulation_json = {
454
- analysis: options
455
- }
456
- puts formulation_json
457
- analysis_id = SecureRandom.uuid
458
- formulation_json[:analysis][:uuid] = analysis_id
459
- end
460
- raise "No analysis id defined in analysis.json #{options[:formulation_file]}" if analysis_id.nil?
461
-
462
- # save out this file to compare
463
- # File.open('formulation_merge.json', 'w') { |f| f << JSON.pretty_generate(formulation_json) }
464
-
465
- response = @conn.post do |req|
466
- req.url "projects/#{project_id}/analyses.json"
467
- req.headers['Content-Type'] = 'application/json'
468
- req.body = formulation_json.to_json
469
- req.options[:timeout] = 600 # seconds
470
- end
471
-
472
- if response.status == 201
473
- puts "asked to create analysis with #{analysis_id}"
474
- # puts resp.inspect
475
- analysis_id = JSON.parse(response.body)['_id']
476
- puts "options[:push_to_dencity] = #{options[:push_to_dencity]}"
477
- upload_to_dencity(analysis_id, formulation_json) if options[:push_to_dencity]
478
- puts "new analysis created with ID: #{analysis_id}"
479
- else
480
- raise 'Could not create new analysis'
481
- end
482
-
483
- # check if we need to upload the analysis zip file
484
- if options[:upload_file]
485
- raise "upload file does not exist #{options[:upload_file]}" unless File.exist?(options[:upload_file])
486
-
487
- payload = { file: Faraday::UploadIO.new(options[:upload_file], 'application/zip') }
488
- response = @conn_multipart.post "analyses/#{analysis_id}/upload.json", payload do |req|
489
- req.options[:timeout] = 1800 # seconds
490
- end
491
-
492
- if response.status == 201
493
- puts 'Successfully uploaded ZIP file'
494
- else
495
- raise response.inspect
496
- end
497
- end
498
-
499
- analysis_id
500
- end
501
-
502
- def upload_to_dencity(analysis_uuid, analysis)
503
- require 'dencity'
504
- puts "Attempting to connect to DEnCity server using settings at '~/.dencity/config.yml'"
505
- conn = Dencity.connect
506
- raise "Could not connect to DEnCity server at #{hostname}." unless conn.connected?
507
- begin
508
- r = conn.login
509
- rescue Faraday::ParsingError => user_id_failure
510
- raise "Error in user_id field: #{user_id_failure.message}"
511
- rescue MultiJson::ParseError => authentication_failure
512
- raise "Error in attempted authentication: #{authentication_failure.message}"
513
- end
514
- user_uuid = r.id
515
-
516
- # Find the analysis.json file that SHOULD BE IN THE FOLDER THAT THIS SCRIPT IS IN (or change the below)
517
- # Check that the analysis has not yet been registered with the DEnCity instance.
518
- # TODO This should be simplified with a retrieve_analysis_by_user_defined_id' method in the future
519
- user_analyses = []
520
- r = conn.dencity_get 'analyses'
521
- runner.registerError('Unable to retrieve analyses from DEnCity server') unless r['status'] == 200
522
- r['data'].each do |dencity_analysis|
523
- user_analyses << dencity_analysis['id'] if dencity_analysis['user_id'] == user_uuid
524
- end
525
- found_analysis_uuid = false
526
- user_analyses.each do |dencity_analysis_id|
527
- dencity_analysis = conn.retrieve_analysis_by_id(dencity_analysis_id)
528
- if dencity_analysis['user_defined_id'] == analysis_uuid
529
- found_analysis_uuid = true
530
- break
531
- end
532
- end
533
- raise "Analysis with user_defined_id of #{analysis_uuid} found on DEnCity." if found_analysis_uuid
534
- dencity_hash = OpenStudio::Analysis.to_dencity_analysis(analysis, analysis_uuid)
535
-
536
- # Write the analysis DEnCity hash to dencity_analysis.json
537
- f = File.new('dencity_analysis.json', 'wb')
538
- f.write(JSON.pretty_generate(dencity_hash))
539
- f.close
540
-
541
- # Upload the processed analysis json.
542
- upload = conn.load_analysis 'dencity_analysis.json'
543
- begin
544
- upload_response = upload.push
545
- rescue StandardError => e
546
- runner.registerError("Upload failure: #{e.message} in #{e.backtrace.join('/n')}")
547
- else
548
- if NoMethodError == upload_response.class
549
- raise "ERROR: Server responded with a NoMethodError: #{upload_response}"
550
- end
551
- if upload_response.status.to_s[0] == '2'
552
- puts 'Successfully uploaded processed analysis json file to the DEnCity server.'
553
- else
554
- puts 'ERROR: Server returned a non-20x status. Response below.'
555
- puts upload_response
556
- raise
557
- end
558
- end
559
- end
560
-
561
- # Upload a single datapoint
562
- # @param analysis [String] Analysis ID to attach datapoint
563
- # @param options [Hash] Options
564
- # @option options [String] :datapoint_file Path to datapoint JSON to upload
565
- # @option options [Boolean] :reset_uuids Flag on whether or not to reset the UUID in the datapoint JSON to a new random value.
566
- def upload_datapoint(analysis_id, options)
567
- defaults = { reset_uuids: false }
568
- options = defaults.merge(options)
569
-
570
- raise 'No analysis id passed' if analysis_id.nil?
571
- raise 'No datapoints file passed to new_analysis' unless options[:datapoint_file]
572
- raise "No datapoints_file exists #{options[:datapoint_file]}" unless File.exist?(options[:datapoint_file])
573
-
574
- dp_hash = JSON.parse(File.open(options[:datapoint_file]).read, symbolize_names: true)
575
-
576
- # There are two instances of the analysis ID. There is one in the file,
577
- # and the other is in the POST url. Ideally remove the version in the
578
- # file and support only the URL based analysis_id
579
- dp_hash[:analysis_uuid] = analysis_id
580
-
581
- if options[:reset_uuids]
582
- dp_hash[:uuid] = SecureRandom.uuid
583
- end
584
-
585
- # merge in the analysis_id as it has to be what is in the database
586
- response = @conn.post do |req|
587
- req.url "analyses/#{analysis_id}/data_points.json"
588
- req.headers['Content-Type'] = 'application/json'
589
- req.body = dp_hash.to_json
590
- end
591
-
592
- if response.status == 201
593
- puts "new datapoints created for analysis #{analysis_id}"
594
- return JSON.parse(response.body, symbolize_names: true)
595
- else
596
- raise "could not create new datapoints #{response.body}"
597
- end
598
- end
599
-
600
- # Upload multiple data points to the server.
601
- # @param analysis [String] Analysis ID to attach datapoint
602
- def upload_datapoints(analysis_id, options)
603
- defaults = {}
604
- options = defaults.merge(options)
605
-
606
- raise 'No analysis id passed' if analysis_id.nil?
607
- raise 'No datapoints file passed to new_analysis' unless options[:datapoints_file]
608
- raise "No datapoints_file exists #{options[:datapoints_file]}" unless File.exist?(options[:datapoints_file])
609
-
610
- dp_hash = JSON.parse(File.open(options[:datapoints_file]).read, symbolize_names: true)
611
-
612
- # merge in the analysis_id as it has to be what is in the database
613
- response = @conn.post do |req|
614
- req.url "analyses/#{analysis_id}/data_points/batch_upload.json"
615
- req.headers['Content-Type'] = 'application/json'
616
- req.body = dp_hash.to_json
617
- end
618
-
619
- if response.status == 201
620
- puts "new datapoints created for analysis #{analysis_id}"
621
- else
622
- raise "could not create new datapoints #{response.body}"
623
- end
624
- end
625
-
626
- def start_analysis(analysis_id, options)
627
- defaults = { analysis_action: 'start', without_delay: false }
628
- options = defaults.merge(options)
629
-
630
- puts "Run analysis is configured with #{options.to_json}"
631
- response = @conn.post do |req|
632
- req.url "analyses/#{analysis_id}/action.json"
633
- req.headers['Content-Type'] = 'application/json'
634
- req.body = options.to_json
635
- req.options[:timeout] = 1800 # seconds
636
- end
637
-
638
- if response.status == 200
639
- puts "Received request to run analysis #{analysis_id}"
640
- else
641
- raise 'Could not start the analysis'
642
- end
643
- end
644
-
645
- # Kill the analysis
646
- # @param analysis [String] Analysis ID to stop
647
- def kill_analysis(analysis_id)
648
- analysis_action = { analysis_action: 'stop' }
649
-
650
- response = @conn.post do |req|
651
- req.url "analyses/#{analysis_id}/action.json"
652
- req.headers['Content-Type'] = 'application/json'
653
- req.body = analysis_action.to_json
654
- end
655
-
656
- if response.status == 200
657
- puts "Killed analysis #{analysis_id}"
658
- end
659
- end
660
-
661
- def kill_all_analyses
662
- project_ids = get_project_ids
663
- puts "List of projects ids are: #{project_ids}"
664
-
665
- project_ids.each do |project_id|
666
- analysis_ids = get_analyses(project_id)
667
- puts analysis_ids
668
- analysis_ids.each do |analysis_id|
669
- puts "Trying to kill #{analysis_id}"
670
- kill_analysis(analysis_id)
671
- end
672
- end
673
- end
674
-
675
- # Get a list of analyses and the data points
676
- #
677
- # @param analysis_id [String] An analysis ID
678
- def data_point_status(analysis_id = nil)
679
- data_points = nil
680
- call_string = nil
681
- if analysis_id
682
- call_string = "analyses/#{analysis_id}/status.json"
683
- else
684
- call_string = 'analyses/status.json'
685
- end
686
-
687
- resp = @conn.get call_string, version: 2
688
- if resp.status == 200
689
- data_points = JSON.parse(resp.body, symbolize_names: true)[:analyses]
690
- end
691
-
692
- data_points
693
- end
694
-
695
- # This is the former version of get data point status. The new version is preferred and allows for
696
- # checking data points across all analyses.
697
- def get_datapoint_status(analysis_id, filter = nil)
698
- data_points = nil
699
- # get the status of all the entire analysis
700
- unless analysis_id.nil?
701
- if filter.nil? || filter == ''
702
- resp = @conn.get "analyses/#{analysis_id}/status.json"
703
- if resp.status == 200
704
- data_points = JSON.parse(resp.body, symbolize_names: true)[:analysis][:data_points]
705
- end
706
- else
707
- resp = @conn.get "analyses/#{analysis_id}/status.json", jobs: filter
708
- if resp.status == 200
709
- data_points = JSON.parse(resp.body, symbolize_names: true)[:analysis][:data_points]
710
- end
711
- end
712
- end
713
-
714
- data_points
715
- end
716
-
717
- # Return the JSON (Full) of the datapoint
718
- def get_datapoint(data_point_id)
719
- data_point = nil
720
-
721
- resp = @conn.get "/data_points/#{data_point_id}.json"
722
- if resp.status == 200
723
- data_point = JSON.parse resp.body, symbolize_names: true
724
- end
725
-
726
- data_point
727
- end
728
-
729
- # Submit a generic analysis. This will use the options that are configured in the JSON file including
730
- # the analysis type and options. Note that this may not work for all cases were multiple analyses need to run
731
- # (e.g. single_run, queue_model, lhs)
732
- #
733
- # @params formaluation_filename [String] FQP to the formulation file
734
- # @params analysis_zip_filename [String] FQP to the zip file with the supporting files
735
- def run_file(formulation_filename, analysis_zip_filename)
736
- # parse the JSON file to grab the analysis type
737
- j = JSON.parse(formulation_filename, symbolize_names: true)
738
- analysis_type = j[:analysis][:problem][:analysis_type]
739
-
740
- run(formulation_filename, analysis_zip_filename, analysis_type)
741
- end
742
-
743
- # Submit the analysis for running via the API
744
- #
745
- # @param formulation_filename [String] Name of the analysis.json file
746
- # @param analysis_zip_filename [String] Name of the analysis.zip file
747
- # @param analysis_type [String] Type of analysis to run
748
- # @param options [Hash] Hash of options
749
- # @option options [String] :run_data_point_filename Name of ruby file that the server runs -- will be deprecated
750
- # @option options [String] :push_to_dencity Whether or not to push to DEnCity
751
- # @option options [String] :batch_run_method Which batch run method to use (batch_run or batch_run_local [no R])
752
- def run(formulation_filename, analysis_zip_filename, analysis_type, options = {})
753
- defaults = {
754
- run_data_point_filename: 'run_openstudio_workflow_monthly.rb',
755
- push_to_dencity: false,
756
- batch_run_method: 'batch_run',
757
- without_delay: false
758
- }
759
- options = defaults.merge(options)
760
-
761
- project_options = {}
762
- project_id = new_project(project_options)
763
-
764
- analysis_options = {
765
- formulation_file: formulation_filename,
766
- upload_file: analysis_zip_filename,
767
- reset_uuids: true,
768
- push_to_dencity: options[:push_to_dencity]
769
- }
770
-
771
- analysis_id = new_analysis(project_id, analysis_options)
772
-
773
- run_options = {
774
- analysis_action: 'start',
775
- without_delay: options[:without_delay],
776
- analysis_type: analysis_type,
777
- simulate_data_point_filename: 'simulate_data_point.rb', # TODO: remove these from server?
778
- run_data_point_filename: options[:run_data_point_filename]
779
- }
780
- start_analysis(analysis_id, run_options)
781
-
782
- # If the analysis is a staged analysis, then go ahead and run batch run
783
- # because there is no explicit way to tell the system to do it
784
- if BATCH_RUN_METHODS.include? analysis_type
785
- run_options = {
786
- analysis_action: 'start',
787
- without_delay: false,
788
- analysis_type: options[:batch_run_method],
789
- simulate_data_point_filename: 'simulate_data_point.rb',
790
- run_data_point_filename: options[:run_data_point_filename]
791
- }
792
- start_analysis(analysis_id, run_options)
793
- end
794
-
795
- analysis_id
796
- end
797
-
798
- def queue_single_run(formulation_filename, analysis_zip_filename, analysis_type,
799
- run_data_point_filename = 'run_openstudio_workflow_monthly.rb')
800
- project_options = {}
801
- project_id = new_project(project_options)
802
-
803
- analysis_options = {
804
- formulation_file: formulation_filename,
805
- upload_file: analysis_zip_filename,
806
- reset_uuids: true
807
- }
808
- analysis_id = new_analysis(project_id, analysis_options)
809
-
810
- run_options = {
811
- analysis_action: 'start',
812
- without_delay: false,
813
- analysis_type: analysis_type,
814
- simulate_data_point_filename: 'simulate_data_point.rb',
815
- run_data_point_filename: run_data_point_filename
816
- }
817
- start_analysis(analysis_id, run_options)
818
-
819
- analysis_id
820
- end
821
-
822
- def run_batch_run_across_analyses
823
- project_options = {}
824
- project_id = new_project(project_options)
825
-
826
- analysis_options = {
827
- formulation_file: nil,
828
- upload_file: nil,
829
- reset_uuids: true
830
- }
831
- analysis_id = new_analysis(project_id, analysis_options)
832
-
833
- run_options = {
834
- analysis_action: 'start',
835
- without_delay: false,
836
- analysis_type: 'batch_run_analyses',
837
- simulate_data_point_filename: 'simulate_data_point.rb',
838
- run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
839
- }
840
- start_analysis(analysis_id, run_options)
841
-
842
- analysis_id
843
- end
844
- end
845
- end
846
- end
1
+ # *******************************************************************************
2
+ # OpenStudio(R), Copyright (c) Alliance for Sustainable Energy, LLC.
3
+ # See also https://openstudio.net/license
4
+ # *******************************************************************************
5
+
6
+ # Class manages the communication with the server.
7
+ # Presently, this class is simple and stores all information in hashs
8
+ module OpenStudio
9
+ module Analysis
10
+ class ServerApi
11
+ attr_reader :hostname
12
+
13
+ # Define set of anlaysis methods require batch_run to be queued after them
14
+ BATCH_RUN_METHODS = ['lhs', 'preflight', 'single_run', 'repeat_run', 'doe', 'diag', 'baseline_perturbation', 'batch_datapoints'].freeze
15
+
16
+ def initialize(options = {})
17
+ defaults = { hostname: 'http://localhost:8080', log_path: File.expand_path('~/os_server_api.log') }
18
+ options = defaults.merge(options)
19
+ if ENV['OS_SERVER_LOG_PATH']
20
+ @logger = ::Logger.new(ENV['OS_SERVER_LOG_PATH'] + '/os_server_api.log')
21
+ else
22
+ @logger = ::Logger.new(options[:log_path])
23
+ end
24
+
25
+ @hostname = options[:hostname]
26
+
27
+ raise 'no host defined for server api class' if @hostname.nil?
28
+
29
+ # TODO: add support for the proxy
30
+
31
+ # create connection with basic capabilities
32
+ @conn = Faraday.new(url: @hostname) do |faraday|
33
+ faraday.request :url_encoded # form-encode POST params
34
+ faraday.options.timeout = 300
35
+ faraday.options.open_timeout = 300
36
+ faraday.options.write_timeout = 1800
37
+ faraday.use Faraday::Response::Logger, @logger
38
+ # faraday.response @logger # log requests to STDOUT
39
+ faraday.adapter Faraday.default_adapter # make requests with Net::HTTP
40
+ end
41
+
42
+ # create connection to server api with multipart capabilities
43
+ @conn_multipart = Faraday.new(url: @hostname) do |faraday|
44
+ faraday.request :multipart
45
+ faraday.request :url_encoded # form-encode POST params
46
+ faraday.options.timeout = 300
47
+ faraday.options.open_timeout = 300
48
+ faraday.options.write_timeout = 1800
49
+ faraday.use Faraday::Response::Logger, @logger
50
+ # faraday.response :logger # log requests to STDOUT
51
+ faraday.adapter Faraday.default_adapter # make requests with Net::HTTP
52
+ end
53
+ end
54
+
55
+ def get_projects
56
+ response = @conn.get '/projects.json'
57
+
58
+ projects_json = nil
59
+ if response.status == 200
60
+ projects_json = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
61
+ else
62
+ raise 'did not receive a 200 in get_projects'
63
+ end
64
+
65
+ projects_json
66
+ end
67
+
68
+ def get_project_ids
69
+ ids = get_projects
70
+ ids.map { |project| project[:uuid] }
71
+ end
72
+
73
+ def delete_project(id)
74
+ deleted = false
75
+ response = @conn.delete "/projects/#{id}.json"
76
+ if response.status == 204
77
+ puts "Successfully deleted project #{id}"
78
+ deleted = true
79
+ else
80
+ puts "ERROR deleting project #{id}"
81
+ deleted = false
82
+ end
83
+
84
+ deleted
85
+ end
86
+
87
+ def delete_all
88
+ ids = get_project_ids
89
+ puts "deleting projects with IDs: #{ids}"
90
+ success = true
91
+ ids.each do |id|
92
+ r = delete_project id
93
+ success = false if r == false
94
+ end
95
+
96
+ success
97
+ end
98
+
99
+ def new_project(options = {})
100
+ defaults = { project_name: "Project #{::Time.now.strftime('%Y-%m-%d %H:%M:%S')}" }
101
+ options = defaults.merge(options)
102
+ project_id = nil
103
+
104
+ # TODO: make this a display name and a machine name
105
+ project_hash = { project: { name: (options[:project_name]).to_s } }
106
+ begin
107
+ response = @conn.post do |req|
108
+ req.url '/projects.json'
109
+ req.headers['Content-Type'] = 'application/json'
110
+ req.body = project_hash.to_json
111
+ end
112
+ puts "response.status: #{response.status}"
113
+ puts response.inspect
114
+ rescue Net::OpenTimeout => e
115
+ puts "new_project OpenTimeout: #{e.message}"
116
+ end
117
+ if response.status == 201
118
+ project_id = JSON.parse(response.body)['_id']
119
+
120
+ puts "new project created with ID: #{project_id}"
121
+ # grab the project id
122
+ elsif response.status == 500
123
+ puts '500 Error'
124
+ puts response.inspect
125
+ end
126
+
127
+ project_id
128
+ end
129
+
130
+ def get_analyses(project_id)
131
+ analysis_ids = []
132
+ response = @conn.get "/projects/#{project_id}.json"
133
+ if response.status == 200
134
+ analyses = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
135
+ analyses[:analyses]&.each do |analysis|
136
+ analysis_ids << analysis[:_id]
137
+ end
138
+ end
139
+
140
+ analysis_ids
141
+ end
142
+
143
+ def get_analyses_detailed(project_id)
144
+ analyses = nil
145
+ response = @conn.get "/projects/#{project_id}.json"
146
+ if response.status == 200
147
+ analyses = JSON.parse(response.body, symbolize_names: true, max_nesting: false)[:analyses]
148
+ end
149
+
150
+ analyses
151
+ end
152
+
153
+ # return the entire analysis JSON
154
+ def get_analysis(analysis_id)
155
+ result = nil
156
+ response = @conn.get "/analyses/#{analysis_id}.json"
157
+ if response.status == 200
158
+ result = JSON.parse(response.body, symbolize_names: true, max_nesting: false)[:analysis]
159
+ end
160
+
161
+ result
162
+ end
163
+
164
+ # Check the status of the simulation. Format should be:
165
+ # {
166
+ # analysis: {
167
+ # status: "completed",
168
+ # analysis_type: "batch_run"
169
+ # },
170
+ # data_points: [
171
+ # {
172
+ # _id: "bbd57e90-ce59-0131-35de-080027880ca6",
173
+ # status: "completed"
174
+ # }
175
+ # ]
176
+ # }
177
+ def get_analysis_status(analysis_id, analysis_type)
178
+ status = nil
179
+
180
+ # sleep 2 # super cheesy---need to update how this works. Right now there is a good chance to get a
181
+ # race condition when the analysis state changes.
182
+ unless analysis_id.nil?
183
+ resp = @conn.get "analyses/#{analysis_id}/status.json"
184
+ if resp.status == 200
185
+ j = JSON.parse resp.body, symbolize_names: true
186
+ if j && j[:analysis] && j[:analysis][:analysis_type] == analysis_type
187
+ status = j[:analysis][:status]
188
+ elsif j && j[:analysis] && analysis_type == 'batch_run'
189
+ status = j[:analysis][:status]
190
+ end
191
+ end
192
+ end
193
+
194
+ status
195
+ end
196
+
197
+ # Check if the machine is alive
198
+ #
199
+ # return [Boolean] True if the machine has an awake value set
200
+ def alive?
201
+ m = machine_status
202
+
203
+ m = !m[:status][:awake].nil? if m
204
+
205
+ m
206
+ end
207
+
208
+ # Retrieve the machine status
209
+ #
210
+ # return [Hash]
211
+ def machine_status
212
+ status = nil
213
+
214
+ begin
215
+ resp = @conn.get do |req|
216
+ req.url 'status.json'
217
+ req.options.timeout = 300
218
+ req.options.open_timeout = 300
219
+ end
220
+ puts "machine_status resp.status: #{resp.status}"
221
+ puts resp.inspect
222
+ if resp.status == 200
223
+ j = JSON.parse resp.body, symbolize_names: true
224
+ status = j if j
225
+ end
226
+ rescue Faraday::ConnectionFailed => e
227
+ puts "machine_Status ConnectionFailed: #{e.message}"
228
+ rescue Net::ReadTimeout => e
229
+ puts "machine_Status ReadTimeout: #{e.message}"
230
+ end
231
+
232
+ status
233
+ end
234
+
235
+ def get_analysis_status_and_json(analysis_id, analysis_type)
236
+ status = nil
237
+ j = nil
238
+
239
+ # sleep 2 # super cheesy---need to update how this works. Right now there is a good chance to get a
240
+ # race condition when the analysis state changes.
241
+ unless analysis_id.nil?
242
+ resp = @conn.get "analyses/#{analysis_id}/status.json"
243
+ if resp.status == 200
244
+ j = JSON.parse resp.body, symbolize_names: true
245
+ if j && j[:analysis] && j[:analysis][:analysis_type] == analysis_type
246
+ status = j[:analysis][:status]
247
+ end
248
+ end
249
+ end
250
+
251
+ [status, j]
252
+ end
253
+
254
+ # return the data point results in JSON format
255
+ def get_analysis_results(analysis_id)
256
+ analysis = nil
257
+
258
+ response = @conn.get "/analyses/#{analysis_id}/analysis_data.json"
259
+ if response.status == 200
260
+ analysis = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
261
+ end
262
+
263
+ analysis
264
+ end
265
+
266
+ def download_dataframe(analysis_id, format = 'rdata', save_directory = '.')
267
+ downloaded = false
268
+ file_path_and_name = nil
269
+
270
+ response = @conn.get do |r|
271
+ r.url "/analyses/#{analysis_id}/download_data.#{format}?export=true"
272
+ r.options.timeout = 3600 # 60 minutes
273
+ end
274
+ if response.status == 200
275
+ filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
276
+ downloaded = true
277
+ file_path_and_name = "#{save_directory}/#{filename}"
278
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
279
+ if format == 'rdata'
280
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
281
+ else
282
+ File.open(file_path_and_name, 'w') { |f| f << response.body }
283
+ end
284
+ end
285
+
286
+ [downloaded, file_path_and_name]
287
+ end
288
+
289
+ def download_variables(analysis_id, format = 'rdata', save_directory = '.')
290
+ downloaded = false
291
+ file_path_and_name = nil
292
+
293
+ response = @conn.get "/analyses/#{analysis_id}/variables/download_variables.#{format}"
294
+ if response.status == 200
295
+ filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
296
+ downloaded = true
297
+ file_path_and_name = "#{save_directory}/#{filename}"
298
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
299
+ if format == 'rdata'
300
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
301
+ else
302
+ File.open(file_path_and_name, 'w') { |f| f << response.body }
303
+ end
304
+ end
305
+
306
+ [downloaded, file_path_and_name]
307
+ end
308
+
309
+ def download_datapoint(datapoint_id, save_directory = '.')
310
+ downloaded = false
311
+ file_path_and_name = nil
312
+
313
+ response = @conn.get "/data_points/#{datapoint_id}/download_result_file?filename=data_point.zip"
314
+ if response.status == 200
315
+ filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
316
+ downloaded = true
317
+ file_path_and_name = "#{save_directory}/#{filename}"
318
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
319
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
320
+ else
321
+ response = @conn.get "/data_points/#{datapoint_id}/download"
322
+ if response.status == 200
323
+ filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
324
+ downloaded = true
325
+ file_path_and_name = "#{save_directory}/#{filename}"
326
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
327
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
328
+ end
329
+ end
330
+
331
+ [downloaded, file_path_and_name]
332
+ end
333
+
334
+ # Download a MongoDB Snapshot. This database can get large. For 13,000 simulations with
335
+ # DEnCity reporting, the size is around 325MB
336
+ def download_database(save_directory = '.')
337
+ downloaded = false
338
+ file_path_and_name = nil
339
+
340
+ response = @conn.get do |r|
341
+ r.url '/admin/backup_database?full_backup=true'
342
+ r.options.timeout = 3600 # 60 minutes
343
+ end
344
+
345
+ if response.status == 200
346
+ filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
347
+ downloaded = true
348
+ file_path_and_name = "#{save_directory}/#{filename}"
349
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
350
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
351
+ end
352
+
353
+ [downloaded, file_path_and_name]
354
+ end
355
+
356
+ # http://localhost:3000/data_points/ff857845-a4c6-4eb9-a52b-cbc6a41976d5/download_result_file?filename=
357
+ def download_datapoint_report(datapoint_id, report_name, save_directory = '.')
358
+ downloaded = false
359
+ file_path_and_name = nil
360
+
361
+ response = @conn.get "/data_points/#{datapoint_id}/download_result_file?filename=#{report_name}"
362
+ if response.status == 200
363
+ filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
364
+ downloaded = true
365
+ file_path_and_name = "#{save_directory}/#{filename}"
366
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
367
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
368
+ end
369
+
370
+ [downloaded, file_path_and_name]
371
+ end
372
+
373
+ def download_datapoint_jsons(analysis_id, save_directory = '.')
374
+ # get the list of all the datapoints
375
+ dps = get_datapoint_status(analysis_id)
376
+ dps.each do |dp|
377
+ if dp[:status] == 'completed'
378
+ dp_h = get_datapoint(dp[:_id])
379
+ File.open("#{save_directory}/data_point_#{dp[:_id]}.json", 'w') { |f| f << JSON.pretty_generate(dp_h) }
380
+ end
381
+ end
382
+ end
383
+
384
+ def datapoint_dencity(datapoint_id)
385
+ # Return the JSON (Full) of the datapoint
386
+ data_point = nil
387
+
388
+ resp = @conn.get "/data_points/#{datapoint_id}/dencity.json"
389
+ if resp.status == 200
390
+ data_point = JSON.parse resp.body, symbolize_names: true
391
+ end
392
+
393
+ data_point
394
+ end
395
+
396
+ def analysis_dencity_json(analysis_id)
397
+ # Return the hash of the dencity format for the analysis
398
+ dencity = nil
399
+
400
+ resp = @conn.get "/analyses/#{analysis_id}/dencity.json"
401
+ if resp.status == 200
402
+ dencity = JSON.parse resp.body, symbolize_names: true
403
+ end
404
+
405
+ dencity
406
+ end
407
+
408
+ def download_dencity_json(analysis_id, save_directory = '.')
409
+ a_h = analysis_dencity_json(analysis_id)
410
+ if a_h
411
+ File.open("#{save_directory}/analysis_#{analysis_id}_dencity.json", 'w') { |f| f << JSON.pretty_generate(a_h) }
412
+ end
413
+ end
414
+
415
+ def download_datapoint_dencity_jsons(analysis_id, save_directory = '.')
416
+ # get the list of all the datapoints
417
+ dps = get_datapoint_status(analysis_id)
418
+ dps.each do |dp|
419
+ if dp[:status] == 'completed'
420
+ dp_h = datapoint_dencity(dp[:_id])
421
+ File.open("#{save_directory}/data_point_#{dp[:_id]}_dencity.json", 'w') { |f| f << JSON.pretty_generate(dp_h) }
422
+ end
423
+ end
424
+ end
425
+
426
+ def new_analysis(project_id, options)
427
+ defaults = {
428
+ analysis_name: nil,
429
+ reset_uuids: false,
430
+ push_to_dencity: false
431
+ }
432
+ options = defaults.merge(options)
433
+
434
+ raise 'No project id passed' if project_id.nil?
435
+
436
+ formulation_json = nil
437
+ if options[:formulation_file]
438
+ raise "No formulation exists #{options[:formulation_file]}" unless File.exist?(options[:formulation_file])
439
+ formulation_json = JSON.parse(File.read(options[:formulation_file]), symbolize_names: true)
440
+ end
441
+
442
+ # read in the analysis id from the analysis.json file
443
+ analysis_id = nil
444
+ if formulation_json
445
+ if options[:reset_uuids]
446
+ analysis_id = SecureRandom.uuid
447
+ formulation_json[:analysis][:uuid] = analysis_id
448
+
449
+ formulation_json[:analysis][:problem][:workflow].each do |wf|
450
+ wf[:uuid] = SecureRandom.uuid
451
+ wf[:arguments]&.each do |arg|
452
+ arg[:uuid] = SecureRandom.uuid
453
+ end
454
+ wf[:variables]&.each do |var|
455
+ var[:uuid] = SecureRandom.uuid
456
+ var[:argument][:uuid] = SecureRandom.uuid if var[:argument]
457
+ end
458
+ end
459
+ else
460
+ analysis_id = formulation_json[:analysis][:uuid]
461
+ end
462
+
463
+ # set the analysis name
464
+ formulation_json[:analysis][:name] = (options[:analysis_name]).to_s unless options[:analysis_name].nil?
465
+ else
466
+ formulation_json = {
467
+ analysis: options
468
+ }
469
+ puts formulation_json
470
+ analysis_id = SecureRandom.uuid
471
+ formulation_json[:analysis][:uuid] = analysis_id
472
+ end
473
+ raise "No analysis id defined in analysis.json #{options[:formulation_file]}" if analysis_id.nil?
474
+
475
+ # save out this file to compare
476
+ # File.open('formulation_merge.json', 'w') { |f| f << JSON.pretty_generate(formulation_json) }
477
+
478
+ response = @conn.post do |req|
479
+ req.url "projects/#{project_id}/analyses.json"
480
+ req.headers['Content-Type'] = 'application/json'
481
+ req.body = formulation_json.to_json
482
+ req.options.timeout = 600 # seconds
483
+ req.options.write_timeout = 1800
484
+ end
485
+
486
+ if response.status == 201
487
+ puts "asked to create analysis with #{analysis_id}"
488
+ # puts resp.inspect
489
+ analysis_id = JSON.parse(response.body)['_id']
490
+ puts "options[:push_to_dencity] = #{options[:push_to_dencity]}"
491
+ upload_to_dencity(analysis_id, formulation_json) if options[:push_to_dencity]
492
+ puts "new analysis created with ID: #{analysis_id}"
493
+ else
494
+ raise 'Could not create new analysis'
495
+ end
496
+
497
+ # check if we need to upload the analysis zip file
498
+ if options[:upload_file]
499
+ raise "upload file does not exist #{options[:upload_file]}" unless File.exist?(options[:upload_file])
500
+
501
+ payload = { file: Faraday::UploadIO.new(options[:upload_file], 'application/zip') }
502
+ response = @conn_multipart.post "analyses/#{analysis_id}/upload.json", payload do |req|
503
+ req.options.timeout = 1800 # seconds
504
+ req.options.write_timeout = 1800
505
+ end
506
+
507
+ if response.status == 201
508
+ puts 'Successfully uploaded ZIP file'
509
+ else
510
+ raise response.inspect
511
+ end
512
+ end
513
+
514
+ analysis_id
515
+ end
516
+
517
+ def upload_to_dencity(analysis_uuid, analysis)
518
+ require 'dencity'
519
+ puts "Attempting to connect to DEnCity server using settings at '~/.dencity/config.yml'"
520
+ conn = Dencity.connect
521
+ raise "Could not connect to DEnCity server at #{hostname}." unless conn.connected?
522
+ begin
523
+ r = conn.login
524
+ rescue Faraday::ParsingError => user_id_failure
525
+ raise "Error in user_id field: #{user_id_failure.message}"
526
+ rescue MultiJson::ParseError => authentication_failure
527
+ raise "Error in attempted authentication: #{authentication_failure.message}"
528
+ end
529
+ user_uuid = r.id
530
+
531
+ # Find the analysis.json file that SHOULD BE IN THE FOLDER THAT THIS SCRIPT IS IN (or change the below)
532
+ # Check that the analysis has not yet been registered with the DEnCity instance.
533
+ # TODO This should be simplified with a retrieve_analysis_by_user_defined_id' method in the future
534
+ user_analyses = []
535
+ r = conn.dencity_get 'analyses'
536
+ runner.registerError('Unable to retrieve analyses from DEnCity server') unless r['status'] == 200
537
+ r['data'].each do |dencity_analysis|
538
+ user_analyses << dencity_analysis['id'] if dencity_analysis['user_id'] == user_uuid
539
+ end
540
+ found_analysis_uuid = false
541
+ user_analyses.each do |dencity_analysis_id|
542
+ dencity_analysis = conn.retrieve_analysis_by_id(dencity_analysis_id)
543
+ if dencity_analysis['user_defined_id'] == analysis_uuid
544
+ found_analysis_uuid = true
545
+ break
546
+ end
547
+ end
548
+ raise "Analysis with user_defined_id of #{analysis_uuid} found on DEnCity." if found_analysis_uuid
549
+ dencity_hash = OpenStudio::Analysis.to_dencity_analysis(analysis, analysis_uuid)
550
+
551
+ # Write the analysis DEnCity hash to dencity_analysis.json
552
+ f = File.new('dencity_analysis.json', 'wb')
553
+ f.write(JSON.pretty_generate(dencity_hash))
554
+ f.close
555
+
556
+ # Upload the processed analysis json.
557
+ upload = conn.load_analysis 'dencity_analysis.json'
558
+ begin
559
+ upload_response = upload.push
560
+ rescue StandardError => e
561
+ runner.registerError("Upload failure: #{e.message} in #{e.backtrace.join('/n')}")
562
+ else
563
+ if NoMethodError == upload_response.class
564
+ raise "ERROR: Server responded with a NoMethodError: #{upload_response}"
565
+ end
566
+ if upload_response.status.to_s[0] == '2'
567
+ puts 'Successfully uploaded processed analysis json file to the DEnCity server.'
568
+ else
569
+ puts 'ERROR: Server returned a non-20x status. Response below.'
570
+ puts upload_response
571
+ raise
572
+ end
573
+ end
574
+ end
575
+
576
+ # Upload a single datapoint
577
+ # @param analysis [String] Analysis ID to attach datapoint
578
+ # @param options [Hash] Options
579
+ # @option options [String] :datapoint_file Path to datapoint JSON to upload
580
+ # @option options [Boolean] :reset_uuids Flag on whether or not to reset the UUID in the datapoint JSON to a new random value.
581
+ def upload_datapoint(analysis_id, options)
582
+ defaults = { reset_uuids: false }
583
+ options = defaults.merge(options)
584
+
585
+ raise 'No analysis id passed' if analysis_id.nil?
586
+ raise 'No datapoints file passed to new_analysis' unless options[:datapoint_file]
587
+ raise "No datapoints_file exists #{options[:datapoint_file]}" unless File.exist?(options[:datapoint_file])
588
+
589
+ dp_hash = JSON.parse(File.open(options[:datapoint_file]).read, symbolize_names: true)
590
+
591
+ # There are two instances of the analysis ID. There is one in the file,
592
+ # and the other is in the POST url. Ideally remove the version in the
593
+ # file and support only the URL based analysis_id
594
+ dp_hash[:analysis_uuid] = analysis_id
595
+
596
+ if options[:reset_uuids]
597
+ dp_hash[:uuid] = SecureRandom.uuid
598
+ end
599
+
600
+ # merge in the analysis_id as it has to be what is in the database
601
+ response = @conn.post do |req|
602
+ req.url "analyses/#{analysis_id}/data_points.json"
603
+ req.headers['Content-Type'] = 'application/json'
604
+ req.body = dp_hash.to_json
605
+ end
606
+
607
+ if response.status == 201
608
+ puts "new datapoints created for analysis #{analysis_id}"
609
+ return JSON.parse(response.body, symbolize_names: true)
610
+ else
611
+ raise "could not create new datapoints #{response.body}"
612
+ end
613
+ end
614
+
615
+ # Upload multiple data points to the server.
616
+ # @param analysis [String] Analysis ID to attach datapoint
617
+ def upload_datapoints(analysis_id, options)
618
+ defaults = {}
619
+ options = defaults.merge(options)
620
+
621
+ raise 'No analysis id passed' if analysis_id.nil?
622
+ raise 'No datapoints file passed to new_analysis' unless options[:datapoints_file]
623
+ raise "No datapoints_file exists #{options[:datapoints_file]}" unless File.exist?(options[:datapoints_file])
624
+
625
+ dp_hash = JSON.parse(File.open(options[:datapoints_file]).read, symbolize_names: true)
626
+
627
+ # merge in the analysis_id as it has to be what is in the database
628
+ response = @conn.post do |req|
629
+ req.url "analyses/#{analysis_id}/data_points/batch_upload.json"
630
+ req.headers['Content-Type'] = 'application/json'
631
+ req.body = dp_hash.to_json
632
+ end
633
+
634
+ if response.status == 201
635
+ puts "new datapoints created for analysis #{analysis_id}"
636
+ else
637
+ raise "could not create new datapoints #{response.body}"
638
+ end
639
+ end
640
+
641
+ def start_analysis(analysis_id, options)
642
+ defaults = { analysis_action: 'start', without_delay: false }
643
+ options = defaults.merge(options)
644
+
645
+ puts "Run analysis is configured with #{options.to_json}"
646
+ response = @conn.post do |req|
647
+ req.url "analyses/#{analysis_id}/action.json"
648
+ req.headers['Content-Type'] = 'application/json'
649
+ req.body = options.to_json
650
+ req.options.timeout = 1800 # seconds
651
+ req.options.write_timeout = 1800
652
+ end
653
+
654
+ if response.status == 200
655
+ puts "Received request to run analysis #{analysis_id}"
656
+ else
657
+ raise 'Could not start the analysis'
658
+ end
659
+ end
660
+
661
+ # Kill the analysis
662
+ # @param analysis [String] Analysis ID to stop
663
+ def kill_analysis(analysis_id)
664
+ analysis_action = { analysis_action: 'stop' }
665
+
666
+ response = @conn.post do |req|
667
+ req.url "analyses/#{analysis_id}/action.json"
668
+ req.headers['Content-Type'] = 'application/json'
669
+ req.body = analysis_action.to_json
670
+ end
671
+
672
+ if response.status == 200
673
+ puts "Killed analysis #{analysis_id}"
674
+ end
675
+ end
676
+
677
+ def kill_all_analyses
678
+ project_ids = get_project_ids
679
+ puts "List of projects ids are: #{project_ids}"
680
+
681
+ project_ids.each do |project_id|
682
+ analysis_ids = get_analyses(project_id)
683
+ puts analysis_ids
684
+ analysis_ids.each do |analysis_id|
685
+ puts "Trying to kill #{analysis_id}"
686
+ kill_analysis(analysis_id)
687
+ end
688
+ end
689
+ end
690
+
691
+ # Get a list of analyses and the data points
692
+ #
693
+ # @param analysis_id [String] An analysis ID
694
+ def data_point_status(analysis_id = nil)
695
+ data_points = nil
696
+ call_string = nil
697
+ if analysis_id
698
+ call_string = "analyses/#{analysis_id}/status.json"
699
+ else
700
+ call_string = 'analyses/status.json'
701
+ end
702
+
703
+ resp = @conn.get call_string, version: 2
704
+ if resp.status == 200
705
+ data_points = JSON.parse(resp.body, symbolize_names: true)[:analyses]
706
+ end
707
+
708
+ data_points
709
+ end
710
+
711
+ # This is the former version of get data point status. The new version is preferred and allows for
712
+ # checking data points across all analyses.
713
+ def get_datapoint_status(analysis_id, filter = nil)
714
+ data_points = nil
715
+ # get the status of all the entire analysis
716
+ unless analysis_id.nil?
717
+ if filter.nil? || filter == ''
718
+ resp = @conn.get "analyses/#{analysis_id}/status.json"
719
+ if resp.status == 200
720
+ data_points = JSON.parse(resp.body, symbolize_names: true)[:analysis][:data_points]
721
+ end
722
+ else
723
+ resp = @conn.get "analyses/#{analysis_id}/status.json", jobs: filter
724
+ if resp.status == 200
725
+ data_points = JSON.parse(resp.body, symbolize_names: true)[:analysis][:data_points]
726
+ end
727
+ end
728
+ end
729
+
730
+ data_points
731
+ end
732
+
733
+ # Return the JSON (Full) of the datapoint
734
+ def get_datapoint(data_point_id)
735
+ data_point = nil
736
+
737
+ resp = @conn.get "/data_points/#{data_point_id}.json"
738
+ if resp.status == 200
739
+ data_point = JSON.parse resp.body, symbolize_names: true
740
+ end
741
+
742
+ data_point
743
+ end
744
+
745
+ # Submit a generic analysis. This will use the options that are configured in the JSON file including
746
+ # the analysis type and options. Note that this may not work for all cases were multiple analyses need to run
747
+ # (e.g. single_run, queue_model, lhs)
748
+ #
749
+ # @params formaluation_filename [String] FQP to the formulation file
750
+ # @params analysis_zip_filename [String] FQP to the zip file with the supporting files
751
+ def run_file(formulation_filename, analysis_zip_filename)
752
+ # parse the JSON file to grab the analysis type
753
+ j = JSON.parse(formulation_filename, symbolize_names: true)
754
+ analysis_type = j[:analysis][:problem][:analysis_type]
755
+
756
+ run(formulation_filename, analysis_zip_filename, analysis_type)
757
+ end
758
+
759
+ # Submit the analysis for running via the API
760
+ #
761
+ # @param formulation_filename [String] Name of the analysis.json file
762
+ # @param analysis_zip_filename [String] Name of the analysis.zip file
763
+ # @param analysis_type [String] Type of analysis to run
764
+ # @param options [Hash] Hash of options
765
+ # @option options [String] :run_data_point_filename Name of ruby file that the server runs -- will be deprecated
766
+ # @option options [String] :push_to_dencity Whether or not to push to DEnCity
767
+ # @option options [String] :batch_run_method Which batch run method to use (batch_run or batch_run_local [no R])
768
+ def run(formulation_filename, analysis_zip_filename, analysis_type, options = {})
769
+ defaults = {
770
+ run_data_point_filename: 'run_openstudio_workflow_monthly.rb',
771
+ push_to_dencity: false,
772
+ batch_run_method: 'batch_run',
773
+ without_delay: false
774
+ }
775
+ options = defaults.merge(options)
776
+
777
+ project_options = {}
778
+ project_id = new_project(project_options)
779
+
780
+ analysis_options = {
781
+ formulation_file: formulation_filename,
782
+ upload_file: analysis_zip_filename,
783
+ reset_uuids: true,
784
+ push_to_dencity: options[:push_to_dencity]
785
+ }
786
+
787
+ analysis_id = new_analysis(project_id, analysis_options)
788
+
789
+ run_options = {
790
+ analysis_action: 'start',
791
+ without_delay: options[:without_delay],
792
+ analysis_type: analysis_type,
793
+ simulate_data_point_filename: 'simulate_data_point.rb', # TODO: remove these from server?
794
+ run_data_point_filename: options[:run_data_point_filename]
795
+ }
796
+ start_analysis(analysis_id, run_options)
797
+
798
+ # If the analysis is a staged analysis, then go ahead and run batch run
799
+ # because there is no explicit way to tell the system to do it
800
+ if BATCH_RUN_METHODS.include? analysis_type
801
+ run_options = {
802
+ analysis_action: 'start',
803
+ without_delay: false,
804
+ analysis_type: options[:batch_run_method],
805
+ simulate_data_point_filename: 'simulate_data_point.rb',
806
+ run_data_point_filename: options[:run_data_point_filename]
807
+ }
808
+ start_analysis(analysis_id, run_options)
809
+ end
810
+
811
+ analysis_id
812
+ end
813
+
814
+ def queue_single_run(formulation_filename, analysis_zip_filename, analysis_type,
815
+ run_data_point_filename = 'run_openstudio_workflow_monthly.rb')
816
+ project_options = {}
817
+ project_id = new_project(project_options)
818
+
819
+ analysis_options = {
820
+ formulation_file: formulation_filename,
821
+ upload_file: analysis_zip_filename,
822
+ reset_uuids: true
823
+ }
824
+ analysis_id = new_analysis(project_id, analysis_options)
825
+
826
+ run_options = {
827
+ analysis_action: 'start',
828
+ without_delay: false,
829
+ analysis_type: analysis_type,
830
+ simulate_data_point_filename: 'simulate_data_point.rb',
831
+ run_data_point_filename: run_data_point_filename
832
+ }
833
+ start_analysis(analysis_id, run_options)
834
+
835
+ analysis_id
836
+ end
837
+
838
+ def run_batch_run_across_analyses
839
+ project_options = {}
840
+ project_id = new_project(project_options)
841
+
842
+ analysis_options = {
843
+ formulation_file: nil,
844
+ upload_file: nil,
845
+ reset_uuids: true
846
+ }
847
+ analysis_id = new_analysis(project_id, analysis_options)
848
+
849
+ run_options = {
850
+ analysis_action: 'start',
851
+ without_delay: false,
852
+ analysis_type: 'batch_run_analyses',
853
+ simulate_data_point_filename: 'simulate_data_point.rb',
854
+ run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
855
+ }
856
+ start_analysis(analysis_id, run_options)
857
+
858
+ analysis_id
859
+ end
860
+ end
861
+ end
862
+ end