openstudio-analysis 0.4.5 → 1.0.0.pat1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.travis.yml +1 -1
- data/CHANGELOG.md +7 -3
- data/Gemfile +1 -0
- data/README.md +53 -47
- data/lib/openstudio/analysis/formulation.rb +1 -1
- data/lib/openstudio/analysis/server_api.rb +143 -246
- data/lib/openstudio/analysis/translator/datapoints.rb +8 -3
- data/lib/openstudio/analysis/translator/excel.rb +16 -4
- data/lib/openstudio/analysis/translator/workflow.rb +106 -0
- data/lib/openstudio/analysis/version.rb +1 -1
- data/lib/openstudio/analysis/workflow_step.rb +5 -2
- data/lib/openstudio/analysis.rb +85 -0
- data/lib/openstudio-analysis.rb +1 -0
- data/openstudio-analysis.gemspec +4 -3
- data/spec/files/workflow/analysis.osa +1334 -0
- data/spec/files/workflow/datapoint_0.osd +63 -0
- data/spec/files/workflow/datapoint_1.osd +63 -0
- data/spec/files/workflow/datapoint_2.osd +63 -0
- data/spec/files/workflow/datapoint_wrong_osa_id.osd +65 -0
- data/spec/openstudio/excel_spec.rb +0 -5
- data/spec/openstudio/osw_spec.rb +618 -0
- metadata +36 -9
@@ -9,7 +9,7 @@ module OpenStudio
|
|
9
9
|
BATCH_RUN_METHODS = %w(lhs preflight single_run repeat_run doe diag baseline_perturbation batch_datapoints)
|
10
10
|
|
11
11
|
def initialize(options = {})
|
12
|
-
defaults = {
|
12
|
+
defaults = {hostname: 'http://localhost:8080'}
|
13
13
|
options = defaults.merge(options)
|
14
14
|
@logger = ::Logger.new('faraday.log')
|
15
15
|
|
@@ -82,12 +82,12 @@ module OpenStudio
|
|
82
82
|
end
|
83
83
|
|
84
84
|
def new_project(options = {})
|
85
|
-
defaults = {
|
85
|
+
defaults = {project_name: "Project #{::Time.now.strftime('%Y-%m-%d %H:%M:%S')}"}
|
86
86
|
options = defaults.merge(options)
|
87
87
|
project_id = nil
|
88
88
|
|
89
89
|
# TODO: make this a display name and a machine name
|
90
|
-
project_hash = {
|
90
|
+
project_hash = {project: {name: "#{options[:project_name]}"}}
|
91
91
|
|
92
92
|
response = @conn.post do |req|
|
93
93
|
req.url '/projects.json'
|
@@ -195,8 +195,8 @@ module OpenStudio
|
|
195
195
|
begin
|
196
196
|
resp = @conn.get do |req|
|
197
197
|
req.url 'status.json'
|
198
|
-
req.options.timeout =
|
199
|
-
req.options.open_timeout =
|
198
|
+
req.options.timeout = 120
|
199
|
+
req.options.open_timeout = 120
|
200
200
|
end
|
201
201
|
|
202
202
|
if resp.status == 200
|
@@ -405,7 +405,11 @@ module OpenStudio
|
|
405
405
|
end
|
406
406
|
|
407
407
|
def new_analysis(project_id, options)
|
408
|
-
defaults = {
|
408
|
+
defaults = {
|
409
|
+
analysis_name: nil,
|
410
|
+
reset_uuids: false,
|
411
|
+
push_to_dencity: false
|
412
|
+
}
|
409
413
|
options = defaults.merge(options)
|
410
414
|
|
411
415
|
fail 'No project id passed' if project_id.nil?
|
@@ -445,7 +449,7 @@ module OpenStudio
|
|
445
449
|
formulation_json[:analysis][:name] = "#{options[:analysis_name]}" unless options[:analysis_name].nil?
|
446
450
|
else
|
447
451
|
formulation_json = {
|
448
|
-
|
452
|
+
analysis: options
|
449
453
|
}
|
450
454
|
puts formulation_json
|
451
455
|
analysis_id = SecureRandom.uuid
|
@@ -460,13 +464,15 @@ module OpenStudio
|
|
460
464
|
req.url "projects/#{project_id}/analyses.json"
|
461
465
|
req.headers['Content-Type'] = 'application/json'
|
462
466
|
req.body = formulation_json.to_json
|
467
|
+
req.options[:timeout] = 600 # seconds
|
463
468
|
end
|
464
469
|
|
465
470
|
if response.status == 201
|
466
471
|
puts "asked to create analysis with #{analysis_id}"
|
467
472
|
# puts resp.inspect
|
468
473
|
analysis_id = JSON.parse(response.body)['_id']
|
469
|
-
|
474
|
+
puts "options[:push_to_dencity] = #{options[:push_to_dencity]}"
|
475
|
+
upload_to_dencity(analysis_id, formulation_json) if options[:push_to_dencity]
|
470
476
|
puts "new analysis created with ID: #{analysis_id}"
|
471
477
|
else
|
472
478
|
fail 'Could not create new analysis'
|
@@ -476,7 +482,7 @@ module OpenStudio
|
|
476
482
|
if options[:upload_file]
|
477
483
|
fail "upload file does not exist #{options[:upload_file]}" unless File.exist?(options[:upload_file])
|
478
484
|
|
479
|
-
payload = {
|
485
|
+
payload = {file: Faraday::UploadIO.new(options[:upload_file], 'application/zip')}
|
480
486
|
response = @conn_multipart.post "analyses/#{analysis_id}/upload.json", payload do |req|
|
481
487
|
req.options[:timeout] = 1800 # seconds
|
482
488
|
end
|
@@ -491,8 +497,72 @@ module OpenStudio
|
|
491
497
|
analysis_id
|
492
498
|
end
|
493
499
|
|
500
|
+
def upload_to_dencity(analysis_uuid, analysis)
|
501
|
+
require 'dencity'
|
502
|
+
puts "Attempting to connect to DEnCity server using settings at '~/.dencity/config.yml'"
|
503
|
+
conn = Dencity.connect
|
504
|
+
fail "Could not connect to DEnCity server at #{hostname}." unless conn.connected?
|
505
|
+
begin
|
506
|
+
r = conn.login
|
507
|
+
rescue Faraday::ParsingError => user_id_failure
|
508
|
+
fail "Error in user_id field: #{user_id_failure.message}"
|
509
|
+
rescue MultiJson::ParseError => authentication_failure
|
510
|
+
fail "Error in attempted authentication: #{authentication_failure.message}"
|
511
|
+
end
|
512
|
+
user_uuid = r.id
|
513
|
+
|
514
|
+
# Find the analysis.json file that SHOULD BE IN THE FOLDER THAT THIS SCRIPT IS IN (or change the below)
|
515
|
+
# Check that the analysis has not yet been registered with the DEnCity instance.
|
516
|
+
# TODO This should be simplified with a retrieve_analysis_by_user_defined_id' method in the future
|
517
|
+
user_analyses = []
|
518
|
+
r = conn.dencity_get 'analyses'
|
519
|
+
runner.registerError('Unable to retrieve analyses from DEnCity server') unless r['status'] == 200
|
520
|
+
r['data'].each do |dencity_analysis|
|
521
|
+
user_analyses << dencity_analysis['id'] if dencity_analysis['user_id'] == user_uuid
|
522
|
+
end
|
523
|
+
found_analysis_uuid = false
|
524
|
+
user_analyses.each do |dencity_analysis_id|
|
525
|
+
dencity_analysis = conn.retrieve_analysis_by_id(dencity_analysis_id)
|
526
|
+
if dencity_analysis['user_defined_id'] == analysis_uuid
|
527
|
+
found_analysis_uuid = true
|
528
|
+
break
|
529
|
+
end
|
530
|
+
end
|
531
|
+
fail "Analysis with user_defined_id of #{analysis_uuid} found on DEnCity." if found_analysis_uuid
|
532
|
+
dencity_hash = OpenStudio::Analysis.to_dencity_analysis(analysis, analysis_uuid)
|
533
|
+
|
534
|
+
# Write the analysis DEnCity hash to dencity_analysis.json
|
535
|
+
f = File.new('dencity_analysis.json', 'wb')
|
536
|
+
f.write(JSON.pretty_generate(dencity_hash))
|
537
|
+
f.close
|
538
|
+
|
539
|
+
# Upload the processed analysis json.
|
540
|
+
upload = conn.load_analysis 'dencity_analysis.json'
|
541
|
+
begin
|
542
|
+
upload_response = upload.push
|
543
|
+
rescue => e
|
544
|
+
runner.registerError("Upload failure: #{e.message} in #{e.backtrace.join('/n')}")
|
545
|
+
else
|
546
|
+
if NoMethodError == upload_response.class
|
547
|
+
fail "ERROR: Server responded with a NoMethodError: #{upload_response}"
|
548
|
+
end
|
549
|
+
if upload_response.status.to_s[0] == '2'
|
550
|
+
puts 'Successfully uploaded processed analysis json file to the DEnCity server.'
|
551
|
+
else
|
552
|
+
puts 'ERROR: Server returned a non-20x status. Response below.'
|
553
|
+
puts upload_response
|
554
|
+
fail
|
555
|
+
end
|
556
|
+
end
|
557
|
+
end
|
558
|
+
|
559
|
+
# Upload a single datapoint
|
560
|
+
# @param analysis [String] Analysis ID to attach datapoint
|
561
|
+
# @param options [Hash] Options
|
562
|
+
# @option options [String] :datapoint_file Path to datapoint JSON to upload
|
563
|
+
# @option options [Boolean] :reset_uuids Flag on whether or not to reset the UUID in the datapoint JSON to a new random value.
|
494
564
|
def upload_datapoint(analysis_id, options)
|
495
|
-
defaults = {
|
565
|
+
defaults = {reset_uuids: false}
|
496
566
|
options = defaults.merge(options)
|
497
567
|
|
498
568
|
fail 'No analysis id passed' if analysis_id.nil?
|
@@ -501,8 +571,12 @@ module OpenStudio
|
|
501
571
|
|
502
572
|
dp_hash = JSON.parse(File.open(options[:datapoint_file]).read, symbolize_names: true)
|
503
573
|
|
574
|
+
# There are two instances of the analysis ID. There is one in the file,
|
575
|
+
# and the other is in the POST url. Ideally remove the version in the
|
576
|
+
# file and support only the URL based analysis_id
|
577
|
+
dp_hash[:analysis_uuid] = analysis_id
|
578
|
+
|
504
579
|
if options[:reset_uuids]
|
505
|
-
dp_hash[:analysis_uuid] = analysis_id
|
506
580
|
dp_hash[:uuid] = SecureRandom.uuid
|
507
581
|
end
|
508
582
|
|
@@ -515,11 +589,14 @@ module OpenStudio
|
|
515
589
|
|
516
590
|
if response.status == 201
|
517
591
|
puts "new datapoints created for analysis #{analysis_id}"
|
592
|
+
return JSON.parse(response.body, symbolize_names: true)
|
518
593
|
else
|
519
594
|
fail "could not create new datapoints #{response.body}"
|
520
595
|
end
|
521
596
|
end
|
522
597
|
|
598
|
+
# Upload multiple data points to the server.
|
599
|
+
# @param analysis [String] Analysis ID to attach datapoint
|
523
600
|
def upload_datapoints(analysis_id, options)
|
524
601
|
defaults = {}
|
525
602
|
options = defaults.merge(options)
|
@@ -544,10 +621,8 @@ module OpenStudio
|
|
544
621
|
end
|
545
622
|
end
|
546
623
|
|
547
|
-
|
548
|
-
|
549
|
-
warn 'In 0.5.0, OpenStudio::Analysis::ServerApi run_analysis will be renamed to start_analysis. Use start_analysis.'
|
550
|
-
defaults = { analysis_action: 'start', without_delay: false }
|
624
|
+
def start_analysis(analysis_id, options)
|
625
|
+
defaults = {analysis_action: 'start', without_delay: false}
|
551
626
|
options = defaults.merge(options)
|
552
627
|
|
553
628
|
puts "Run analysis is configured with #{options.to_json}"
|
@@ -559,16 +634,16 @@ module OpenStudio
|
|
559
634
|
end
|
560
635
|
|
561
636
|
if response.status == 200
|
562
|
-
puts "
|
637
|
+
puts "Received request to run analysis #{analysis_id}"
|
563
638
|
else
|
564
639
|
fail 'Could not start the analysis'
|
565
640
|
end
|
566
641
|
end
|
567
642
|
|
568
|
-
|
569
|
-
|
643
|
+
# Kill the analysis
|
644
|
+
# @param analysis [String] Analysis ID to stop
|
570
645
|
def kill_analysis(analysis_id)
|
571
|
-
analysis_action = {
|
646
|
+
analysis_action = {analysis_action: 'stop'}
|
572
647
|
|
573
648
|
response = @conn.post do |req|
|
574
649
|
req.url "analyses/#{analysis_id}/action.json"
|
@@ -649,8 +724,6 @@ module OpenStudio
|
|
649
724
|
data_point
|
650
725
|
end
|
651
726
|
|
652
|
-
## here are a bunch of runs that really don't belong here.
|
653
|
-
|
654
727
|
# Submit a generic analysis. This will use the options that are configured in the JSON file including
|
655
728
|
# the analysis type and options. Note that this may not work for all cases were multiple analyses need to run
|
656
729
|
# (e.g. single_run, queue_model, lhs)
|
@@ -665,218 +738,54 @@ module OpenStudio
|
|
665
738
|
run(formulation_filename, analysis_zip_filename, analysis_type)
|
666
739
|
end
|
667
740
|
|
668
|
-
#
|
669
|
-
|
670
|
-
|
671
|
-
|
672
|
-
|
673
|
-
|
674
|
-
|
675
|
-
|
676
|
-
|
677
|
-
|
678
|
-
|
679
|
-
|
680
|
-
|
681
|
-
|
682
|
-
|
683
|
-
without_delay: true, # run this in the foreground
|
684
|
-
analysis_type: 'single_run',
|
685
|
-
allow_multiple_jobs: true,
|
686
|
-
use_server_as_worker: true,
|
687
|
-
simulate_data_point_filename: 'simulate_data_point.rb',
|
688
|
-
run_data_point_filename: run_data_point_filename
|
689
|
-
}
|
690
|
-
start_analysis(analysis_id, run_options)
|
691
|
-
|
692
|
-
run_options = {
|
693
|
-
analysis_action: 'start',
|
694
|
-
without_delay: false, # run in background
|
695
|
-
analysis_type: 'batch_run',
|
696
|
-
allow_multiple_jobs: true,
|
697
|
-
use_server_as_worker: true,
|
698
|
-
simulate_data_point_filename: 'simulate_data_point.rb',
|
699
|
-
run_data_point_filename: run_data_point_filename
|
700
|
-
}
|
701
|
-
start_analysis(analysis_id, run_options)
|
702
|
-
|
703
|
-
analysis_id
|
704
|
-
end
|
705
|
-
|
706
|
-
# creates a new analysis and runs rgenoud optimization - number of generations isn't used right now
|
707
|
-
def run_rgenoud(formulation_filename, analysis_zip_filename)
|
708
|
-
project_options = {}
|
709
|
-
project_id = new_project(project_options)
|
710
|
-
|
711
|
-
analysis_options = {
|
712
|
-
formulation_file: formulation_filename,
|
713
|
-
upload_file: analysis_zip_filename,
|
714
|
-
reset_uuids: true
|
715
|
-
}
|
716
|
-
analysis_id = new_analysis(project_id, analysis_options)
|
717
|
-
|
718
|
-
run_options = {
|
719
|
-
analysis_action: 'start',
|
720
|
-
without_delay: false,
|
721
|
-
analysis_type: 'rgenoud',
|
722
|
-
allow_multiple_jobs: true,
|
723
|
-
use_server_as_worker: true,
|
724
|
-
simulate_data_point_filename: 'simulate_data_point.rb',
|
725
|
-
run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
|
726
|
-
}
|
727
|
-
start_analysis(analysis_id, run_options)
|
728
|
-
|
729
|
-
analysis_id
|
730
|
-
end
|
731
|
-
|
732
|
-
def run_lhs(formulation_filename, analysis_zip_filename)
|
733
|
-
project_options = {}
|
734
|
-
project_id = new_project(project_options)
|
735
|
-
|
736
|
-
analysis_options = {
|
737
|
-
formulation_file: formulation_filename,
|
738
|
-
upload_file: analysis_zip_filename,
|
739
|
-
reset_uuids: true
|
740
|
-
}
|
741
|
-
analysis_id = new_analysis(project_id, analysis_options)
|
742
|
-
|
743
|
-
run_options = {
|
744
|
-
analysis_action: 'start',
|
745
|
-
without_delay: false,
|
746
|
-
analysis_type: 'lhs',
|
747
|
-
allow_multiple_jobs: true,
|
748
|
-
use_server_as_worker: true,
|
749
|
-
simulate_data_point_filename: 'simulate_data_point.rb',
|
750
|
-
run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
|
751
|
-
}
|
752
|
-
start_analysis(analysis_id, run_options)
|
753
|
-
|
754
|
-
run_options = {
|
755
|
-
analysis_action: 'start',
|
756
|
-
without_delay: false, # run in background
|
757
|
-
analysis_type: 'batch_run',
|
758
|
-
allow_multiple_jobs: true,
|
759
|
-
use_server_as_worker: true,
|
760
|
-
simulate_data_point_filename: 'simulate_data_point.rb',
|
761
|
-
run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
|
762
|
-
}
|
763
|
-
start_analysis(analysis_id, run_options)
|
764
|
-
|
765
|
-
analysis_id
|
766
|
-
end
|
767
|
-
|
768
|
-
def run_baseline_perturbation(formulation_filename, analysis_zip_filename)
|
769
|
-
project_options = {}
|
770
|
-
project_id = new_project(project_options)
|
771
|
-
|
772
|
-
analysis_options = {
|
773
|
-
formulation_file: formulation_filename,
|
774
|
-
upload_file: analysis_zip_filename,
|
775
|
-
reset_uuids: true
|
776
|
-
}
|
777
|
-
analysis_id = new_analysis(project_id, analysis_options)
|
778
|
-
|
779
|
-
run_options = {
|
780
|
-
analysis_action: 'start',
|
781
|
-
without_delay: false,
|
782
|
-
analysis_type: 'baseline_perturbation',
|
783
|
-
allow_multiple_jobs: true,
|
784
|
-
use_server_as_worker: true,
|
785
|
-
simulate_data_point_filename: 'simulate_data_point.rb',
|
786
|
-
run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
|
787
|
-
}
|
788
|
-
start_analysis(analysis_id, run_options)
|
789
|
-
|
790
|
-
run_options = {
|
791
|
-
analysis_action: 'start',
|
792
|
-
without_delay: false, # run in background
|
793
|
-
analysis_type: 'batch_run',
|
794
|
-
allow_multiple_jobs: true,
|
795
|
-
use_server_as_worker: true,
|
796
|
-
simulate_data_point_filename: 'simulate_data_point.rb',
|
797
|
-
run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
|
798
|
-
}
|
799
|
-
start_analysis(analysis_id, run_options)
|
800
|
-
|
801
|
-
analysis_id
|
802
|
-
end
|
803
|
-
|
804
|
-
def run_batch_datapoints(formulation_filename, analysis_zip_filename)
|
805
|
-
project_options = {}
|
806
|
-
project_id = new_project(project_options)
|
807
|
-
|
808
|
-
puts 'In run_batch_datapoints'
|
809
|
-
|
810
|
-
analysis_options = {
|
811
|
-
formulation_file: formulation_filename,
|
812
|
-
upload_file: analysis_zip_filename,
|
813
|
-
reset_uuids: true
|
814
|
-
}
|
815
|
-
analysis_id = new_analysis(project_id, analysis_options)
|
816
|
-
|
817
|
-
run_options = {
|
818
|
-
analysis_action: 'start',
|
819
|
-
without_delay: false,
|
820
|
-
analysis_type: 'batch_datapoints',
|
821
|
-
allow_multiple_jobs: true,
|
822
|
-
use_server_as_worker: true,
|
823
|
-
simulate_data_point_filename: 'simulate_data_point.rb',
|
824
|
-
run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
|
825
|
-
}
|
826
|
-
start_analysis(analysis_id, run_options)
|
827
|
-
|
828
|
-
run_options = {
|
829
|
-
analysis_action: 'start',
|
830
|
-
without_delay: false, # run in background
|
831
|
-
analysis_type: 'batch_run',
|
832
|
-
allow_multiple_jobs: true,
|
833
|
-
use_server_as_worker: true,
|
834
|
-
simulate_data_point_filename: 'simulate_data_point.rb',
|
835
|
-
run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
|
741
|
+
# Submit the analysis for running via the API
|
742
|
+
#
|
743
|
+
# @param formulation_filename [String] Name of the analysis.json file
|
744
|
+
# @param analysis_zip_filename [String] Name of the analysis.zip file
|
745
|
+
# @param analysis_type [String] Type of analysis to run
|
746
|
+
# @param options [Hash] Hash of options
|
747
|
+
# @option options [String] :run_data_point_filename Name of ruby file that the server runs -- will be deprecated
|
748
|
+
# @option options [String] :push_to_dencity Whether or not to push to DEnCity
|
749
|
+
# @option options [String] :batch_run_method Which batch run method to use (batch_run or batch_run_local [no R])
|
750
|
+
def run(formulation_filename, analysis_zip_filename, analysis_type,
|
751
|
+
options = {})
|
752
|
+
defaults = {
|
753
|
+
run_data_point_filename: 'run_openstudio_workflow_monthly.rb',
|
754
|
+
push_to_dencity: false,
|
755
|
+
batch_run_method: 'batch_run'
|
836
756
|
}
|
837
|
-
|
838
|
-
|
839
|
-
analysis_id
|
840
|
-
end
|
757
|
+
options = defaults.merge(options)
|
841
758
|
|
842
|
-
def run_analysis_detailed(formulation_filename, analysis_zip_filename, analysis_type,
|
843
|
-
allow_multiple_jobs = true, server_as_worker = true,
|
844
|
-
run_data_point_filename = 'run_openstudio_workflow_monthly.rb')
|
845
|
-
warn 'run_analysis_detailed will be deprecated in 0.5.0. Use run(...)'
|
846
759
|
project_options = {}
|
847
760
|
project_id = new_project(project_options)
|
848
761
|
|
849
762
|
analysis_options = {
|
850
|
-
|
851
|
-
|
852
|
-
|
763
|
+
formulation_file: formulation_filename,
|
764
|
+
upload_file: analysis_zip_filename,
|
765
|
+
reset_uuids: true,
|
766
|
+
push_to_dencity: options[:push_to_dencity]
|
853
767
|
}
|
854
768
|
|
855
769
|
analysis_id = new_analysis(project_id, analysis_options)
|
856
770
|
|
857
|
-
server_as_worker = true if analysis_type == 'optim' || analysis_type == 'rgenoud'
|
858
771
|
run_options = {
|
859
|
-
|
860
|
-
|
861
|
-
|
862
|
-
|
863
|
-
|
864
|
-
simulate_data_point_filename: 'simulate_data_point.rb',
|
865
|
-
run_data_point_filename: run_data_point_filename
|
772
|
+
analysis_action: 'start',
|
773
|
+
without_delay: false,
|
774
|
+
analysis_type: analysis_type,
|
775
|
+
simulate_data_point_filename: 'simulate_data_point.rb', # TODO: remove these from server?
|
776
|
+
run_data_point_filename: options[:run_data_point_filename]
|
866
777
|
}
|
867
778
|
start_analysis(analysis_id, run_options)
|
868
779
|
|
869
|
-
# If the analysis is a staged analysis, then go ahead and run batch run
|
870
|
-
# no explicit way to tell the system to do it
|
780
|
+
# If the analysis is a staged analysis, then go ahead and run batch run
|
781
|
+
# because there is no explicit way to tell the system to do it
|
871
782
|
if BATCH_RUN_METHODS.include? analysis_type
|
872
783
|
run_options = {
|
873
|
-
|
874
|
-
|
875
|
-
|
876
|
-
|
877
|
-
|
878
|
-
simulate_data_point_filename: 'simulate_data_point.rb',
|
879
|
-
run_data_point_filename: run_data_point_filename
|
784
|
+
analysis_action: 'start',
|
785
|
+
without_delay: false,
|
786
|
+
analysis_type: options[:batch_run_method],
|
787
|
+
simulate_data_point_filename: 'simulate_data_point.rb',
|
788
|
+
run_data_point_filename: options[:run_data_point_filename]
|
880
789
|
}
|
881
790
|
start_analysis(analysis_id, run_options)
|
882
791
|
end
|
@@ -884,59 +793,47 @@ module OpenStudio
|
|
884
793
|
analysis_id
|
885
794
|
end
|
886
795
|
|
887
|
-
alias_method :run, :run_analysis_detailed
|
888
|
-
|
889
796
|
def queue_single_run(formulation_filename, analysis_zip_filename, analysis_type,
|
890
|
-
allow_multiple_jobs = true, server_as_worker = true,
|
891
797
|
run_data_point_filename = 'run_openstudio_workflow_monthly.rb')
|
892
798
|
project_options = {}
|
893
799
|
project_id = new_project(project_options)
|
894
800
|
|
895
801
|
analysis_options = {
|
896
|
-
|
897
|
-
|
898
|
-
|
802
|
+
formulation_file: formulation_filename,
|
803
|
+
upload_file: analysis_zip_filename,
|
804
|
+
reset_uuids: true
|
899
805
|
}
|
900
806
|
analysis_id = new_analysis(project_id, analysis_options)
|
901
807
|
|
902
|
-
server_as_worker = true if analysis_type == 'optim' || analysis_type == 'rgenoud'
|
903
808
|
run_options = {
|
904
|
-
|
905
|
-
|
906
|
-
|
907
|
-
|
908
|
-
|
909
|
-
simulate_data_point_filename: 'simulate_data_point.rb',
|
910
|
-
run_data_point_filename: run_data_point_filename
|
809
|
+
analysis_action: 'start',
|
810
|
+
without_delay: false,
|
811
|
+
analysis_type: analysis_type,
|
812
|
+
simulate_data_point_filename: 'simulate_data_point.rb',
|
813
|
+
run_data_point_filename: run_data_point_filename
|
911
814
|
}
|
912
815
|
start_analysis(analysis_id, run_options)
|
913
816
|
|
914
817
|
analysis_id
|
915
818
|
end
|
916
819
|
|
917
|
-
|
918
|
-
def run_batch_run_across_analyses(formulation_filename, analysis_zip_filename,
|
919
|
-
allow_multiple_jobs = true, server_as_worker = true,
|
920
|
-
run_data_point_filename = 'run_openstudio_workflow_monthly.rb')
|
820
|
+
def run_batch_run_across_analyses
|
921
821
|
project_options = {}
|
922
822
|
project_id = new_project(project_options)
|
923
823
|
|
924
824
|
analysis_options = {
|
925
|
-
|
926
|
-
|
927
|
-
|
928
|
-
# { analysis: { name: 'something', display_name: 'something else' }}
|
825
|
+
formulation_file: nil,
|
826
|
+
upload_file: nil,
|
827
|
+
reset_uuids: true,
|
929
828
|
}
|
930
829
|
analysis_id = new_analysis(project_id, analysis_options)
|
931
830
|
|
932
831
|
run_options = {
|
933
|
-
|
934
|
-
|
935
|
-
|
936
|
-
|
937
|
-
|
938
|
-
simulate_data_point_filename: 'simulate_data_point.rb',
|
939
|
-
run_data_point_filename: run_data_point_filename
|
832
|
+
analysis_action: 'start',
|
833
|
+
without_delay: false,
|
834
|
+
analysis_type: 'batch_run_analyses',
|
835
|
+
simulate_data_point_filename: 'simulate_data_point.rb',
|
836
|
+
run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
|
940
837
|
}
|
941
838
|
start_analysis(analysis_id, run_options)
|
942
839
|
|
@@ -165,6 +165,7 @@ module OpenStudio
|
|
165
165
|
display_name = append_model_name ? @name + ' ' + seed_model[:display_name] : @name
|
166
166
|
|
167
167
|
a = OpenStudio::Analysis.create(display_name)
|
168
|
+
a
|
168
169
|
|
169
170
|
@variables.each do |measure|
|
170
171
|
@measure_paths.each do |measure_path|
|
@@ -247,7 +248,7 @@ module OpenStudio
|
|
247
248
|
end
|
248
249
|
|
249
250
|
fail 'Required setting not found: weather_paths' unless config_hash[:weather_paths]
|
250
|
-
config_hash[:weather_paths] =
|
251
|
+
config_hash[:weather_paths] = config_hash[:weather_paths].split(',')
|
251
252
|
config_hash[:weather_paths].each do |path|
|
252
253
|
if (Pathname.new path).absolute?
|
253
254
|
@weather_paths << path
|
@@ -294,8 +295,12 @@ module OpenStudio
|
|
294
295
|
@other_files << { lib_zip_name: library_name, path: config_hash[:library_path] }
|
295
296
|
end
|
296
297
|
|
297
|
-
|
298
|
-
|
298
|
+
if config_hash[:allow_multiple_jobs]
|
299
|
+
fail "allow_multiple_jobs is no longer a valid option in the CSV, please delete and rerun"
|
300
|
+
end
|
301
|
+
if config_hash[:use_server_as_worker]
|
302
|
+
fail "use_server_as_worker is no longer a valid option in the CSV, please delete and rerun"
|
303
|
+
end
|
299
304
|
|
300
305
|
# Assign AWS settings
|
301
306
|
@settings[:proxy_port] = config_hash[:proxy_port] if config_hash[:proxy_port]
|
@@ -22,6 +22,7 @@ module OpenStudio
|
|
22
22
|
|
23
23
|
# remove these once we have classes to construct the JSON file
|
24
24
|
attr_accessor :name
|
25
|
+
attr_accessor :cluster_name
|
25
26
|
attr_reader :analysis_name
|
26
27
|
|
27
28
|
# methods to override instance variables
|
@@ -44,7 +45,6 @@ module OpenStudio
|
|
44
45
|
@analyses = [] # Array o OpenStudio::Analysis. Use method to access
|
45
46
|
@name = nil
|
46
47
|
@analysis_name = nil
|
47
|
-
@cluster_name = nil
|
48
48
|
@settings = {}
|
49
49
|
@weather_files = [] # remove this from excel!
|
50
50
|
@weather_paths = []
|
@@ -195,6 +195,7 @@ module OpenStudio
|
|
195
195
|
end
|
196
196
|
|
197
197
|
# convert the data in excel's parsed data into an OpenStudio Analysis Object
|
198
|
+
#
|
198
199
|
# @seed_model [Hash] Seed model to set the new analysis to
|
199
200
|
# @append_model_name [Boolean] Append the name of the seed model to the display name
|
200
201
|
# @return [Object] An OpenStudio::Analysis
|
@@ -275,6 +276,11 @@ module OpenStudio
|
|
275
276
|
as
|
276
277
|
end
|
277
278
|
|
279
|
+
# Method to return the cluster name for backwards compatibility
|
280
|
+
def cluster_name
|
281
|
+
@settings['cluster_name']
|
282
|
+
end
|
283
|
+
|
278
284
|
# save_analysis will iterate over each model that is defined in the spreadsheet and save the
|
279
285
|
# zip and json file.
|
280
286
|
def save_analysis
|
@@ -411,7 +417,9 @@ module OpenStudio
|
|
411
417
|
if b_settings
|
412
418
|
@version = row[1].chomp if row[0] == 'Spreadsheet Version'
|
413
419
|
@settings["#{row[0].snake_case}"] = row[1] if row[0]
|
414
|
-
|
420
|
+
if @settings['cluster_name']
|
421
|
+
@settings['cluster_name'] = @settings['cluster_name'].snake_case
|
422
|
+
end
|
415
423
|
|
416
424
|
if row[0] == 'AWS Tag'
|
417
425
|
@aws_tags << row[1].strip
|
@@ -448,8 +456,12 @@ module OpenStudio
|
|
448
456
|
@run_setup["#{row[0].snake_case}"] = row[1] if row[0]
|
449
457
|
|
450
458
|
# type cast
|
451
|
-
|
452
|
-
|
459
|
+
if @run_setup['allow_multiple_jobs']
|
460
|
+
fail "allow_multiple_jobs is no longer a valid option in the Excel file, please delete the row and rerun"
|
461
|
+
end
|
462
|
+
if @run_setup['use_server_as_worker']
|
463
|
+
fail "use_server_as_worker is no longer a valid option in the Excel file, please delete the row and rerun"
|
464
|
+
end
|
453
465
|
elsif b_problem_setup
|
454
466
|
if row[0]
|
455
467
|
v = row[1]
|