carin_for_blue_button_test_kit 0.11.2 → 0.12.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/carin_for_blue_button_test_kit/version.rb +1 -1
- data/lib/carin_for_blue_button_test_kit.rb +1 -0
- data/lib/inferno_requirements_tools/ext/inferno_core/runnable.rb +22 -0
- data/lib/inferno_requirements_tools/tasks/collect_requirements.rb +196 -0
- data/lib/inferno_requirements_tools/tasks/map_requirements.rb +253 -0
- metadata +21 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f41fd8b880ca87da7f764f53bdd35d5c0ced19d31f698b77e0797c8ce573c454
|
4
|
+
data.tar.gz: 3b6a11364e422efda129ffb6e2e874f632a70e30a96e974ea97d4b365855f6d0
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 804e36426dadbd1c5d42abe58d25af010213c8cafd337540afb21729c3a8535a69ee1d71eb8efd2d984969cc78de6895194eaf4813ca87653e40038eba39dac7
|
7
|
+
data.tar.gz: c10305ff52761d02b4dd8bea9a37ec5b36f794257b9114a9eb3e0189c61445e244068d06626d08c2f97f802b5b847c5d9f809e53daa151553d2862b74177c1d9
|
@@ -1,3 +1,4 @@
|
|
1
|
+
require_relative 'inferno_requirements_tools/ext/inferno_core/runnable'
|
1
2
|
require_relative 'carin_for_blue_button_test_kit/generated/v1.1.0/c4bb_test_suite'
|
2
3
|
require_relative 'carin_for_blue_button_test_kit/generated/v2.0.0/c4bb_test_suite'
|
3
4
|
require_relative 'carin_for_blue_button_test_kit/generated/v2.0.0-dev-nonfinancial/c4bb_test_suite'
|
@@ -0,0 +1,22 @@
|
|
1
|
+
module Inferno
|
2
|
+
module DSL
|
3
|
+
# This module contains the DSL for defining child entities in the test
|
4
|
+
# definition framework.
|
5
|
+
module Runnable
|
6
|
+
# Set/Get the IDs of requirements verifed by this runnable
|
7
|
+
# Set with [] to clear the list
|
8
|
+
#
|
9
|
+
# @param requirements [Array<String>]
|
10
|
+
# @return [Array<String>] the requirement IDs
|
11
|
+
def verifies_requirements(*requirement_ids)
|
12
|
+
if requirement_ids.empty?
|
13
|
+
@requirement_ids || []
|
14
|
+
elsif requirement_ids == [[]]
|
15
|
+
@requirement_ids = []
|
16
|
+
else
|
17
|
+
@requirement_ids = requirement_ids
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
@@ -0,0 +1,196 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'CSV'
|
4
|
+
require 'roo'
|
5
|
+
|
6
|
+
module InfernoRequirementsTools
|
7
|
+
module Tasks
|
8
|
+
# This class manages the collection of requirements details from
|
9
|
+
# requirements planning excel workbooks into a CSV representation.
|
10
|
+
# Currently splits out Requirements and Planned Not Tested Requirements
|
11
|
+
# into two separate files.
|
12
|
+
#
|
13
|
+
# The `run_check` method will check whether the previously generated file is up-to-date.
|
14
|
+
class CollectRequirements
|
15
|
+
# Update these constants based on the test kit.
|
16
|
+
TEST_KIT_ID = 'carin-for-blue-button-test-kit'
|
17
|
+
INPUT_SETS = ['hl7.fhir.us.carin-bb_2.0.0'].freeze
|
18
|
+
|
19
|
+
# Derivative constants
|
20
|
+
TEST_KIT_CODE_FOLDER = TEST_KIT_ID.gsub('-', '_')
|
21
|
+
INPUT_HEADERS = ['ID*', 'URL*', 'Requirement*', 'Conformance*', 'Actor*', 'Sub-Requirement(s)', 'Conditionality',
|
22
|
+
'Verifiable?', 'Verifiability Details', 'Planning To Test?', 'Planning To Test Details'].freeze
|
23
|
+
REQUIREMENTS_OUTPUT_HEADERS = ['Req Set', 'ID', 'URL', 'Requirement', 'Conformance', 'Actor',
|
24
|
+
'Sub-Requirement(s)', 'Conditionality'].freeze
|
25
|
+
REQUIREMENTS_OUTPUT_FILE_NAME = "#{TEST_KIT_ID}_requirements.csv"
|
26
|
+
REQUIREMENTS_OUTPUT_FILE = File.join('lib', TEST_KIT_CODE_FOLDER, 'requirements',
|
27
|
+
REQUIREMENTS_OUTPUT_FILE_NAME).freeze
|
28
|
+
PLANNED_NOT_TESTED_OUTPUT_HEADERS = ['Req Set', 'ID', 'Reason', 'Details'].freeze
|
29
|
+
PLANNED_NOT_TESTED_OUTPUT_FILE_NAME = "#{TEST_KIT_ID}_out_of_scope_requirements.csv"
|
30
|
+
PLANNED_NOT_TESTED_OUTPUT_FILE = File.join('lib', TEST_KIT_CODE_FOLDER, 'requirements',
|
31
|
+
PLANNED_NOT_TESTED_OUTPUT_FILE_NAME).freeze
|
32
|
+
BOM = "\xEF\xBB\xBF"
|
33
|
+
|
34
|
+
def input_file_map
|
35
|
+
@input_file_map ||= {}
|
36
|
+
end
|
37
|
+
|
38
|
+
def input_rows
|
39
|
+
@input_rows ||= {}
|
40
|
+
end
|
41
|
+
|
42
|
+
def input_rows_for_set(req_set_id)
|
43
|
+
input_rows[req_set_id] = extract_input_rows_for_set(req_set_id) unless input_rows.has_key?(req_set_id)
|
44
|
+
input_rows[req_set_id]
|
45
|
+
end
|
46
|
+
|
47
|
+
def extract_input_rows_for_set(req_set_id)
|
48
|
+
CSV.parse(Roo::Spreadsheet.open(input_file_map[req_set_id]).sheet('Requirements').to_csv,
|
49
|
+
headers: true).map do |row|
|
50
|
+
row.to_h.slice(*INPUT_HEADERS)
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
def new_requirements_csv
|
55
|
+
@new_requirements_csv ||=
|
56
|
+
CSV.generate do |csv|
|
57
|
+
csv << REQUIREMENTS_OUTPUT_HEADERS
|
58
|
+
|
59
|
+
INPUT_SETS.each do |req_set_id|
|
60
|
+
input_rows = input_rows_for_set(req_set_id)
|
61
|
+
input_rows.each do |row| # NOTE: use row order from source file
|
62
|
+
row['Req Set'] = req_set_id
|
63
|
+
|
64
|
+
csv << REQUIREMENTS_OUTPUT_HEADERS.map do |header|
|
65
|
+
row.has_key?(header) ? row[header] : row["#{header}*"]
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
def old_requirements_csv
|
73
|
+
@old_requirements_csv ||= File.read(REQUIREMENTS_OUTPUT_FILE)
|
74
|
+
end
|
75
|
+
|
76
|
+
def new_planned_not_tested_csv
|
77
|
+
@new_planned_not_tested_csv ||=
|
78
|
+
CSV.generate do |csv|
|
79
|
+
csv << PLANNED_NOT_TESTED_OUTPUT_HEADERS
|
80
|
+
|
81
|
+
INPUT_SETS.each do |req_set_id|
|
82
|
+
input_rows = input_rows_for_set(req_set_id)
|
83
|
+
input_rows.each do |row| # NOTE: use row order from source file
|
84
|
+
not_verifiable = row['Verifiable?']&.downcase == 'no' || row['Verifiable?']&.downcase == 'false'
|
85
|
+
not_tested = row['Planning To Test?']&.downcase == 'no' || row['Planning To Test?']&.downcase == 'false'
|
86
|
+
next unless not_verifiable || not_tested
|
87
|
+
|
88
|
+
csv << [req_set_id,
|
89
|
+
row['ID*'],
|
90
|
+
not_verifiable ? 'Not Verifiable' : 'Not Tested',
|
91
|
+
not_verifiable ? row['Verifiability Details'] : row['Planning To Test Details']]
|
92
|
+
end
|
93
|
+
end
|
94
|
+
end
|
95
|
+
end
|
96
|
+
|
97
|
+
def old_planned_not_tested_csv
|
98
|
+
@old_planned_not_tested_csv ||= File.read(PLANNED_NOT_TESTED_OUTPUT_FILE)
|
99
|
+
end
|
100
|
+
|
101
|
+
def check_for_req_set_files(input_directory)
|
102
|
+
available_worksheets = Dir.glob(File.join(input_directory, '*.xlsx')).reject { |f| f.include?('~$') }
|
103
|
+
|
104
|
+
INPUT_SETS.each do |req_set_id|
|
105
|
+
req_set_file = available_worksheets&.find { |worksheet_file| worksheet_file.include?(req_set_id) }
|
106
|
+
|
107
|
+
if req_set_file&.empty?
|
108
|
+
puts "Could not find input file for set #{req_set_id} in directory #{input_directory}. Aborting requirements collection..."
|
109
|
+
exit(1)
|
110
|
+
end
|
111
|
+
input_file_map[req_set_id] = req_set_file
|
112
|
+
end
|
113
|
+
end
|
114
|
+
|
115
|
+
def run(input_directory)
|
116
|
+
check_for_req_set_files(input_directory)
|
117
|
+
|
118
|
+
update_requirements = if File.exist?(REQUIREMENTS_OUTPUT_FILE)
|
119
|
+
if old_requirements_csv == (BOM + new_requirements_csv)
|
120
|
+
puts "'#{REQUIREMENTS_OUTPUT_FILE_NAME}' file is up to date."
|
121
|
+
false
|
122
|
+
else
|
123
|
+
puts 'Requirements set has changed.'
|
124
|
+
true
|
125
|
+
end
|
126
|
+
else
|
127
|
+
puts "No existing #{REQUIREMENTS_OUTPUT_FILE_NAME}."
|
128
|
+
true
|
129
|
+
end
|
130
|
+
if update_requirements
|
131
|
+
puts "Writing to file #{REQUIREMENTS_OUTPUT_FILE}..."
|
132
|
+
File.write(REQUIREMENTS_OUTPUT_FILE, BOM + new_requirements_csv, encoding: Encoding::UTF_8)
|
133
|
+
end
|
134
|
+
|
135
|
+
udpate_planned_not_tested = if File.exist?(PLANNED_NOT_TESTED_OUTPUT_FILE)
|
136
|
+
if old_planned_not_tested_csv == (BOM + new_planned_not_tested_csv)
|
137
|
+
puts "'#{PLANNED_NOT_TESTED_OUTPUT_FILE_NAME}' file is up to date."
|
138
|
+
false
|
139
|
+
else
|
140
|
+
puts 'Planned Not Tested Requirements set has changed.'
|
141
|
+
true
|
142
|
+
end
|
143
|
+
else
|
144
|
+
puts "No existing #{PLANNED_NOT_TESTED_OUTPUT_FILE_NAME}."
|
145
|
+
true
|
146
|
+
end
|
147
|
+
if udpate_planned_not_tested
|
148
|
+
puts "Writing to file #{PLANNED_NOT_TESTED_OUTPUT_FILE}..."
|
149
|
+
File.write(PLANNED_NOT_TESTED_OUTPUT_FILE, BOM + new_planned_not_tested_csv, encoding: Encoding::UTF_8)
|
150
|
+
end
|
151
|
+
|
152
|
+
puts 'Done.'
|
153
|
+
end
|
154
|
+
|
155
|
+
def run_check(input_directory)
|
156
|
+
check_for_req_set_files(input_directory)
|
157
|
+
|
158
|
+
requirements_ok = if File.exist?(REQUIREMENTS_OUTPUT_FILE)
|
159
|
+
if old_requirements_csv == (BOM + new_requirements_csv)
|
160
|
+
puts "'#{REQUIREMENTS_OUTPUT_FILE_NAME}' file is up to date."
|
161
|
+
true
|
162
|
+
else
|
163
|
+
puts "#{REQUIREMENTS_OUTPUT_FILE_NAME} file is out of date."
|
164
|
+
false
|
165
|
+
end
|
166
|
+
else
|
167
|
+
puts "No existing #{REQUIREMENTS_OUTPUT_FILE_NAME} file."
|
168
|
+
false
|
169
|
+
end
|
170
|
+
|
171
|
+
planned_not_tested_requirements_ok = if File.exist?(PLANNED_NOT_TESTED_OUTPUT_FILE)
|
172
|
+
if old_planned_not_tested_csv == (BOM + new_planned_not_tested_csv)
|
173
|
+
puts "'#{PLANNED_NOT_TESTED_OUTPUT_FILE_NAME}' file is up to date."
|
174
|
+
true
|
175
|
+
else
|
176
|
+
puts "#{PLANNED_NOT_TESTED_OUTPUT_FILE_NAME} file is out of date."
|
177
|
+
false
|
178
|
+
end
|
179
|
+
else
|
180
|
+
puts "No existing #{PLANNED_NOT_TESTED_OUTPUT_FILE_NAME} file."
|
181
|
+
false
|
182
|
+
end
|
183
|
+
|
184
|
+
return if planned_not_tested_requirements_ok && requirements_ok
|
185
|
+
|
186
|
+
puts <<~MESSAGE
|
187
|
+
Check Failed. To resolve, run:
|
188
|
+
|
189
|
+
bundle exec rake "requirements:collect_requirements[<input_directory>]"
|
190
|
+
|
191
|
+
MESSAGE
|
192
|
+
exit(1)
|
193
|
+
end
|
194
|
+
end
|
195
|
+
end
|
196
|
+
end
|
@@ -0,0 +1,253 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'CSV'
|
4
|
+
require 'roo'
|
5
|
+
require_relative '../ext/inferno_core/runnable'
|
6
|
+
|
7
|
+
module InfernoRequirementsTools
|
8
|
+
module Tasks
|
9
|
+
# This class manages the mapping of IG requirements to test kit tests.
|
10
|
+
# It expects a CSV file in the repo at lib/carin_for_blue_button_test_kit/requirements/Requirements.csv
|
11
|
+
# This file must have a column with the header 'ID' which holds user-designated IDs for each requirement.
|
12
|
+
# These requirement IDs must map to the IDs specified in the test kit using `verifies_requirements`
|
13
|
+
#
|
14
|
+
# The `run` method generates a CSV file at lib/carin_for_blue_button_test_kit/requirements/Requirements_Coverage.csv.
|
15
|
+
# This file will be identical to the input spreadsheet, plus an additional column which holds a comma separated
|
16
|
+
# list of inferno test IDs that test each requirement. These test IDs are Inferno short form IDs that represent the
|
17
|
+
# position of the test within its group and suite. For example, the fifth test in the second group will have an ID
|
18
|
+
# of 2.05. This ID is also shown in the Inferno web UI.
|
19
|
+
# The output file is also sorted by requirement ID.
|
20
|
+
#
|
21
|
+
# The `run_check` method will check whether the previously generated file is up-to-date.
|
22
|
+
class MapRequirements
|
23
|
+
# Update these constants based on the test kit.
|
24
|
+
TEST_KIT_ID = 'carin-for-blue-button-test-kit'
|
25
|
+
TEST_SUITES = [CarinForBlueButtonTestKit::CARIN4BBV200::C4BBTestSuite].freeze # list of suite classes
|
26
|
+
SUITE_ID_TO_ACTOR_MAP = {
|
27
|
+
'c4bb_v200' => 'Health Plan'
|
28
|
+
}.freeze
|
29
|
+
|
30
|
+
# Derivative constants
|
31
|
+
TEST_KIT_CODE_FOLDER = TEST_KIT_ID.gsub('-', '_')
|
32
|
+
INPUT_HEADERS = ['Req Set', 'ID', 'URL', 'Requirement', 'Conformance', 'Actor', 'Sub-Requirement(s)',
|
33
|
+
'Conditionality'].freeze
|
34
|
+
SHORT_ID_HEADER = 'Short ID(s)'
|
35
|
+
FULL_ID_HEADER = 'Full ID(s)'
|
36
|
+
INPUT_FILE_NAME = "#{TEST_KIT_ID}_requirements.csv"
|
37
|
+
INPUT_FILE = File.join('lib', TEST_KIT_CODE_FOLDER, 'requirements', INPUT_FILE_NAME).freeze
|
38
|
+
NOT_TESTED_FILE_NAME = "#{TEST_KIT_ID}_out_of_scope_requirements.csv"
|
39
|
+
NOT_TESTED_FILE = File.join('lib', TEST_KIT_CODE_FOLDER, 'requirements', NOT_TESTED_FILE_NAME).freeze
|
40
|
+
OUTPUT_FILE_NAME = "#{TEST_KIT_ID}_requirements_coverage.csv"
|
41
|
+
OUTPUT_FILE_DIRECTORY = File.join('lib', TEST_KIT_CODE_FOLDER, 'requirements', 'generated').freeze
|
42
|
+
OUTPUT_FILE = File.join(OUTPUT_FILE_DIRECTORY, OUTPUT_FILE_NAME).freeze
|
43
|
+
BOM = "\xEF\xBB\xBF"
|
44
|
+
|
45
|
+
def input_rows
|
46
|
+
@input_rows ||=
|
47
|
+
CSV.parse(File.open(INPUT_FILE, 'r:bom|utf-8'), headers: true).map do |row|
|
48
|
+
row.to_h.slice(*INPUT_HEADERS)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
def not_tested_requirements_map
|
53
|
+
@not_tested_requirements_map ||= load_not_tested_requirements
|
54
|
+
end
|
55
|
+
|
56
|
+
def load_not_tested_requirements
|
57
|
+
return {} unless File.exist?(NOT_TESTED_FILE)
|
58
|
+
|
59
|
+
not_tested_requirements = {}
|
60
|
+
CSV.parse(File.open(NOT_TESTED_FILE, 'r:bom|utf-8'), headers: true).each do |row|
|
61
|
+
row_hash = row.to_h
|
62
|
+
not_tested_requirements["#{row_hash['Req Set']}##{row_hash['ID']}"] = row_hash
|
63
|
+
end
|
64
|
+
|
65
|
+
not_tested_requirements
|
66
|
+
end
|
67
|
+
|
68
|
+
# Of the form:
|
69
|
+
# {
|
70
|
+
# 'req-id-1': [
|
71
|
+
# { short_id: 'short-id-1', full_id: 'long-id-1', suite_id: 'suite-id-1' },
|
72
|
+
# { short_id: 'short-id-2', full_id: 'long-id-2', suite_id: 'suite-id-2' }
|
73
|
+
# ],
|
74
|
+
# 'req-id-2': [{ short_id: 'short-id-3', full_id: 'long-id-3', suite_id: 'suite-id-3' }],
|
75
|
+
# ...
|
76
|
+
# }
|
77
|
+
def inferno_requirements_map
|
78
|
+
@inferno_requirements_map ||= TEST_SUITES.each_with_object({}) do |suite, requirements_map|
|
79
|
+
serialize_requirements(suite, 'suite', suite.id, requirements_map)
|
80
|
+
suite.groups.each do |group|
|
81
|
+
map_group_requirements(group, suite.id, requirements_map)
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
def new_csv
|
87
|
+
@new_csv ||=
|
88
|
+
CSV.generate do |csv|
|
89
|
+
output_headers = TEST_SUITES.each_with_object(INPUT_HEADERS.dup) do |suite, headers|
|
90
|
+
headers << "#{suite.title} #{SHORT_ID_HEADER}"
|
91
|
+
headers << "#{suite.title} #{FULL_ID_HEADER}"
|
92
|
+
end
|
93
|
+
|
94
|
+
csv << output_headers
|
95
|
+
input_rows.each do |row| # NOTE: use row order from source file
|
96
|
+
row_actor = row['Actor']
|
97
|
+
TEST_SUITES.each do |suite|
|
98
|
+
suite_actor = SUITE_ID_TO_ACTOR_MAP[suite.id]
|
99
|
+
if row_actor&.include?(suite_actor)
|
100
|
+
set_and_req_id = "#{row['Req Set']}##{row['ID']}"
|
101
|
+
suite_requirement_items = inferno_requirements_map[set_and_req_id]&.filter do |item|
|
102
|
+
item[:suite_id] == suite.id
|
103
|
+
end
|
104
|
+
short_ids = suite_requirement_items&.map { |item| item[:short_id] }
|
105
|
+
full_ids = suite_requirement_items&.map { |item| item[:full_id] }
|
106
|
+
if short_ids.blank? && not_tested_requirements_map.has_key?(set_and_req_id)
|
107
|
+
row["#{suite.title} #{SHORT_ID_HEADER}"] = 'Not Tested'
|
108
|
+
row["#{suite.title} #{FULL_ID_HEADER}"] = 'Not Tested'
|
109
|
+
else
|
110
|
+
row["#{suite.title} #{SHORT_ID_HEADER}"] = short_ids&.join(', ')
|
111
|
+
row["#{suite.title} #{FULL_ID_HEADER}"] = full_ids&.join(', ')
|
112
|
+
end
|
113
|
+
else
|
114
|
+
row["#{suite.title} #{SHORT_ID_HEADER}"] = 'NA'
|
115
|
+
row["#{suite.title} #{FULL_ID_HEADER}"] = 'NA'
|
116
|
+
end
|
117
|
+
end
|
118
|
+
|
119
|
+
csv << row.values
|
120
|
+
end
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
def input_requirement_ids
|
125
|
+
@input_requirement_ids ||= input_rows.map { |row| "#{row['Req Set']}##{row['ID']}" }
|
126
|
+
end
|
127
|
+
|
128
|
+
# The requirements present in Inferno that aren't in the input spreadsheet
|
129
|
+
def unmatched_requirements_map
|
130
|
+
@unmatched_requirements_map ||= inferno_requirements_map.filter do |requirement_id, _|
|
131
|
+
!input_requirement_ids.include?(requirement_id)
|
132
|
+
end
|
133
|
+
end
|
134
|
+
|
135
|
+
def old_csv
|
136
|
+
@old_csv ||= File.read(OUTPUT_FILE)
|
137
|
+
end
|
138
|
+
|
139
|
+
def run
|
140
|
+
unless File.exist?(INPUT_FILE)
|
141
|
+
puts "Could not find input file: #{INPUT_FILE}. Aborting requirements mapping..."
|
142
|
+
exit(1)
|
143
|
+
end
|
144
|
+
|
145
|
+
if unmatched_requirements_map.any?
|
146
|
+
puts "WARNING: The following requirements indicated in the test kit are not present in #{INPUT_FILE_NAME}"
|
147
|
+
output_requirements_map_table(unmatched_requirements_map)
|
148
|
+
end
|
149
|
+
|
150
|
+
if File.exist?(OUTPUT_FILE)
|
151
|
+
if old_csv == (BOM + new_csv)
|
152
|
+
puts "'#{OUTPUT_FILE_NAME}' file is up to date."
|
153
|
+
return
|
154
|
+
else
|
155
|
+
puts 'Requirements mapping has changed.'
|
156
|
+
end
|
157
|
+
else
|
158
|
+
puts "No existing #{OUTPUT_FILE_NAME}."
|
159
|
+
end
|
160
|
+
|
161
|
+
puts "Writing to file #{OUTPUT_FILE}..."
|
162
|
+
FileUtils.mkdir_p(OUTPUT_FILE_DIRECTORY)
|
163
|
+
File.write(OUTPUT_FILE, BOM + new_csv)
|
164
|
+
puts 'Done.'
|
165
|
+
end
|
166
|
+
|
167
|
+
def run_check
|
168
|
+
unless File.exist?(INPUT_FILE)
|
169
|
+
puts "Could not find input file: #{INPUT_FILE}. Aborting requirements mapping check..."
|
170
|
+
exit(1)
|
171
|
+
end
|
172
|
+
|
173
|
+
if unmatched_requirements_map.any?
|
174
|
+
puts "The following requirements indicated in the test kit are not present in #{INPUT_FILE_NAME}"
|
175
|
+
output_requirements_map_table(unmatched_requirements_map)
|
176
|
+
end
|
177
|
+
|
178
|
+
if File.exist?(OUTPUT_FILE)
|
179
|
+
if old_csv == (BOM + new_csv)
|
180
|
+
puts "'#{OUTPUT_FILE_NAME}' file is up to date."
|
181
|
+
return unless unmatched_requirements_map.any?
|
182
|
+
else
|
183
|
+
puts <<~MESSAGE
|
184
|
+
#{OUTPUT_FILE_NAME} file is out of date.
|
185
|
+
To regenerate the file, run:
|
186
|
+
|
187
|
+
bundle exec rake requirements:map_requirements
|
188
|
+
|
189
|
+
MESSAGE
|
190
|
+
end
|
191
|
+
else
|
192
|
+
puts <<~MESSAGE
|
193
|
+
No existing #{OUTPUT_FILE_NAME} file.
|
194
|
+
To generate the file, run:
|
195
|
+
|
196
|
+
bundle exec rake requirementss:map_requirements
|
197
|
+
|
198
|
+
MESSAGE
|
199
|
+
end
|
200
|
+
|
201
|
+
puts 'Check failed.'
|
202
|
+
exit(1)
|
203
|
+
end
|
204
|
+
|
205
|
+
def map_group_requirements(group, suite_id, requirements_map)
|
206
|
+
serialize_requirements(group, group.short_id, suite_id, requirements_map)
|
207
|
+
group.tests&.each { |test| serialize_requirements(test, test.short_id, suite_id, requirements_map) }
|
208
|
+
group.groups&.each { |subgroup| map_group_requirements(subgroup, suite_id, requirements_map) }
|
209
|
+
end
|
210
|
+
|
211
|
+
def serialize_requirements(runnable, short_id, suite_id, requirements_map)
|
212
|
+
runnable.verifies_requirements&.each do |requirement_id|
|
213
|
+
requirement_id_string = requirement_id.to_s
|
214
|
+
|
215
|
+
requirements_map[requirement_id_string] ||= []
|
216
|
+
requirements_map[requirement_id_string] << { short_id:, full_id: runnable.id, suite_id: }
|
217
|
+
end
|
218
|
+
end
|
219
|
+
|
220
|
+
# Output the requirements in the map like so:
|
221
|
+
#
|
222
|
+
# requirement_id | short_id | full_id
|
223
|
+
# ---------------+------------+----------
|
224
|
+
# req-id-1 | short-id-1 | full-id-1
|
225
|
+
# req-id-2 | short-id-2 | full-id-2
|
226
|
+
def output_requirements_map_table(requirements_map)
|
227
|
+
headers = %w[requirement_id short_id full_id]
|
228
|
+
col_widths = headers.map(&:length)
|
229
|
+
col_widths[0] = [col_widths[0], requirements_map.keys.map(&:length).max].max
|
230
|
+
col_widths[1] = ([col_widths[1]] + requirements_map.values.flatten.map { |item| item[:short_id].length }).max
|
231
|
+
col_widths[2] = ([col_widths[2]] + requirements_map.values.flatten.map { |item| item[:full_id].length }).max
|
232
|
+
col_widths.map { |width| width + 3 }
|
233
|
+
|
234
|
+
puts [
|
235
|
+
headers[0].ljust(col_widths[0]),
|
236
|
+
headers[1].ljust(col_widths[1]),
|
237
|
+
headers[2].ljust(col_widths[2])
|
238
|
+
].join(' | ')
|
239
|
+
puts col_widths.map { |width| '-' * width }.join('-+-')
|
240
|
+
requirements_map.each do |requirement_id, runnables|
|
241
|
+
runnables.each do |runnable|
|
242
|
+
puts [
|
243
|
+
requirement_id.ljust(col_widths[0]),
|
244
|
+
runnable[:short_id].ljust(col_widths[1]),
|
245
|
+
runnable[:full_id].ljust(col_widths[2])
|
246
|
+
].join(' | ')
|
247
|
+
end
|
248
|
+
end
|
249
|
+
puts
|
250
|
+
end
|
251
|
+
end
|
252
|
+
end
|
253
|
+
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: carin_for_blue_button_test_kit
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.12.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- John Morrison
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-11-06 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: inferno_core
|
@@ -16,14 +16,14 @@ dependencies:
|
|
16
16
|
requirements:
|
17
17
|
- - "~>"
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: 0.
|
19
|
+
version: 0.5.0
|
20
20
|
type: :runtime
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
24
|
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version: 0.
|
26
|
+
version: 0.5.0
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
28
|
name: smart_app_launch_test_kit
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
@@ -94,6 +94,20 @@ dependencies:
|
|
94
94
|
- - "~>"
|
95
95
|
- !ruby/object:Gem::Version
|
96
96
|
version: '3.11'
|
97
|
+
- !ruby/object:Gem::Dependency
|
98
|
+
name: roo
|
99
|
+
requirement: !ruby/object:Gem::Requirement
|
100
|
+
requirements:
|
101
|
+
- - "~>"
|
102
|
+
- !ruby/object:Gem::Version
|
103
|
+
version: 2.7.1
|
104
|
+
type: :runtime
|
105
|
+
prerelease: false
|
106
|
+
version_requirements: !ruby/object:Gem::Requirement
|
107
|
+
requirements:
|
108
|
+
- - "~>"
|
109
|
+
- !ruby/object:Gem::Version
|
110
|
+
version: 2.7.1
|
97
111
|
description: CARIN IG for Blue Button® Test Kit
|
98
112
|
email:
|
99
113
|
- jmorrison@leaporbit.com
|
@@ -663,6 +677,9 @@ files:
|
|
663
677
|
- lib/carin_for_blue_button_test_kit/search_test_properties.rb
|
664
678
|
- lib/carin_for_blue_button_test_kit/validation_test.rb
|
665
679
|
- lib/carin_for_blue_button_test_kit/version.rb
|
680
|
+
- lib/inferno_requirements_tools/ext/inferno_core/runnable.rb
|
681
|
+
- lib/inferno_requirements_tools/tasks/collect_requirements.rb
|
682
|
+
- lib/inferno_requirements_tools/tasks/map_requirements.rb
|
666
683
|
homepage: https://github.com/inferno-framework/carin-for-blue-button-test-kit
|
667
684
|
licenses:
|
668
685
|
- Apache-2.0
|