feature_map 1.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/README.md +269 -0
- data/bin/featuremap +5 -0
- data/lib/feature_map/cli.rb +243 -0
- data/lib/feature_map/code_features/plugin.rb +79 -0
- data/lib/feature_map/code_features/plugins/identity.rb +39 -0
- data/lib/feature_map/code_features.rb +152 -0
- data/lib/feature_map/configuration.rb +43 -0
- data/lib/feature_map/constants.rb +11 -0
- data/lib/feature_map/mapper.rb +78 -0
- data/lib/feature_map/output_color.rb +42 -0
- data/lib/feature_map/private/assignment_mappers/directory_assignment.rb +150 -0
- data/lib/feature_map/private/assignment_mappers/feature_definition_assignment.rb +68 -0
- data/lib/feature_map/private/assignment_mappers/feature_globs.rb +138 -0
- data/lib/feature_map/private/assignment_mappers/file_annotations.rb +158 -0
- data/lib/feature_map/private/assignments_file.rb +190 -0
- data/lib/feature_map/private/code_cov.rb +96 -0
- data/lib/feature_map/private/cyclomatic_complexity_calculator.rb +46 -0
- data/lib/feature_map/private/docs/index.html +247 -0
- data/lib/feature_map/private/documentation_site.rb +128 -0
- data/lib/feature_map/private/extension_loader.rb +24 -0
- data/lib/feature_map/private/feature_assigner.rb +22 -0
- data/lib/feature_map/private/feature_metrics_calculator.rb +76 -0
- data/lib/feature_map/private/feature_plugins/assignment.rb +17 -0
- data/lib/feature_map/private/glob_cache.rb +80 -0
- data/lib/feature_map/private/lines_of_code_calculator.rb +49 -0
- data/lib/feature_map/private/metrics_file.rb +86 -0
- data/lib/feature_map/private/test_coverage_file.rb +97 -0
- data/lib/feature_map/private/test_pyramid_file.rb +151 -0
- data/lib/feature_map/private/todo_inspector.rb +57 -0
- data/lib/feature_map/private/validations/features_up_to_date.rb +78 -0
- data/lib/feature_map/private/validations/files_have_features.rb +45 -0
- data/lib/feature_map/private/validations/files_have_unique_features.rb +34 -0
- data/lib/feature_map/private.rb +204 -0
- data/lib/feature_map/validator.rb +29 -0
- data/lib/feature_map.rb +212 -0
- metadata +253 -0
@@ -0,0 +1,158 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# typed: strict
|
4
|
+
|
5
|
+
module FeatureMap
|
6
|
+
module Private
|
7
|
+
module AssignmentMappers
|
8
|
+
# Calculate, cache, and return a mapping of file names (relative to the root
|
9
|
+
# of the repository) to a feature name.
|
10
|
+
#
|
11
|
+
# Example:
|
12
|
+
#
|
13
|
+
# {
|
14
|
+
# 'app/models/company.rb' => Feature.find('Onboarding'),
|
15
|
+
# ...
|
16
|
+
# }
|
17
|
+
class FileAnnotations
|
18
|
+
extend T::Sig
|
19
|
+
include Mapper
|
20
|
+
|
21
|
+
# NOTE: regex 'x' arg ignores whitespace within the _construction_ of the regex.
|
22
|
+
# regex 'm' arg allows the regex to _execute_ on multiline strings.
|
23
|
+
SINGLE_LINE_ANNOTATION_PATTERN = T.let(
|
24
|
+
/
|
25
|
+
\s* # Any amount of whitespace
|
26
|
+
(#{Constants::SINGLE_LINE_COMMENT_PATTERNS.join('|')}) # Single line comment start
|
27
|
+
\s* # Any amount of whitespace, not including newlines
|
28
|
+
@feature\s # We find the feature annotation followed by one space
|
29
|
+
(?<feature>.*?$) # A named capture grabs the rest as the feature until the line ends
|
30
|
+
/x.freeze,
|
31
|
+
Regexp
|
32
|
+
)
|
33
|
+
MULTILINE_ANNOTATION_PATTERN = T.let(
|
34
|
+
/
|
35
|
+
(?:#{Constants::MULTILINE_COMMENT_START_PATTERNS.join('|')}) # Any comment start
|
36
|
+
.*? # Followed by any characters, including newlines, until...
|
37
|
+
@feature\s # We find the feature annotation followed by one space
|
38
|
+
(?<feature>.*?$) # A named capture grabs the rest as the feature until the line ends
|
39
|
+
/xm.freeze,
|
40
|
+
Regexp
|
41
|
+
)
|
42
|
+
DESCRIPTION = 'Annotations at the top of file'
|
43
|
+
|
44
|
+
sig do
|
45
|
+
override.params(file: String)
|
46
|
+
.returns(T.nilable(CodeFeatures::Feature))
|
47
|
+
end
|
48
|
+
def map_file_to_feature(file)
|
49
|
+
file_annotation_based_feature(file)
|
50
|
+
end
|
51
|
+
|
52
|
+
sig do
|
53
|
+
override
|
54
|
+
.params(files: T::Array[String])
|
55
|
+
.returns(T::Hash[String, CodeFeatures::Feature])
|
56
|
+
end
|
57
|
+
def globs_to_feature(files)
|
58
|
+
files.each_with_object({}) do |filename_relative_to_root, mapping|
|
59
|
+
feature = file_annotation_based_feature(filename_relative_to_root)
|
60
|
+
next unless feature
|
61
|
+
|
62
|
+
mapping[filename_relative_to_root] = feature
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
sig do
|
67
|
+
override.params(cache: GlobsToAssignedFeatureMap, files: T::Array[String]).returns(GlobsToAssignedFeatureMap)
|
68
|
+
end
|
69
|
+
def update_cache(cache, files)
|
70
|
+
# We map files to nil features so that files whose annotation have been removed will be properly
|
71
|
+
# overwritten (i.e. removed) from the cache.
|
72
|
+
fileset = Set.new(files)
|
73
|
+
updated_cache_for_files = globs_to_feature(files)
|
74
|
+
cache.merge!(updated_cache_for_files)
|
75
|
+
|
76
|
+
invalid_files = cache.keys.select do |file|
|
77
|
+
# If a file is not tracked, it should be removed from the cache
|
78
|
+
!Private.file_tracked?(file) ||
|
79
|
+
# If a file no longer has a file annotation (i.e. `globs_to_feature` doesn't map it)
|
80
|
+
# it should be removed from the cache
|
81
|
+
# We make sure to only apply this to the input files since otherwise `updated_cache_for_files.key?(file)` would always return `false` when files == []
|
82
|
+
(fileset.include?(file) && !updated_cache_for_files.key?(file))
|
83
|
+
end
|
84
|
+
|
85
|
+
invalid_files.each do |invalid_file|
|
86
|
+
cache.delete(invalid_file)
|
87
|
+
end
|
88
|
+
|
89
|
+
cache
|
90
|
+
end
|
91
|
+
|
92
|
+
sig { params(lines: T::Array[String]).returns(T.nilable(String)) }
|
93
|
+
def identify_feature_from(lines)
|
94
|
+
matched_single_line_feature = lines.join("\n").match(SINGLE_LINE_ANNOTATION_PATTERN)
|
95
|
+
matched_multiline_feature = lines.join("\n").match(MULTILINE_ANNOTATION_PATTERN)
|
96
|
+
matched_feature = matched_single_line_feature || matched_multiline_feature
|
97
|
+
return if matched_feature.nil?
|
98
|
+
|
99
|
+
T.must(matched_feature
|
100
|
+
.values_at(:feature)
|
101
|
+
.first)
|
102
|
+
.gsub(/#{Constants::MULTILINE_COMMENT_END_PATTERNS.join('|')}/, '')
|
103
|
+
.strip
|
104
|
+
rescue ArgumentError => e
|
105
|
+
raise unless e.message.include?('invalid byte sequence')
|
106
|
+
end
|
107
|
+
|
108
|
+
sig { params(filename: String).returns(T.nilable(CodeFeatures::Feature)) }
|
109
|
+
def file_annotation_based_feature(filename)
|
110
|
+
# Not too sure what the following comment means but it was carried over from the code_ownership repo, so
|
111
|
+
# I've opted to leave it unchanged in case it is helpful for future engineers:
|
112
|
+
# > If for a directory is named with an ownable extension, we need to skip
|
113
|
+
# > so File.foreach doesn't blow up below. This was needed because Cypress
|
114
|
+
# > screenshots are saved to a folder with the test suite filename.
|
115
|
+
return if File.directory?(filename)
|
116
|
+
return unless File.file?(filename)
|
117
|
+
|
118
|
+
# The annotation should be on one of the first ten lines.
|
119
|
+
# If the annotation isn't in the first ten lines we assume it
|
120
|
+
# doesn't exist.
|
121
|
+
|
122
|
+
lines = File.foreach(filename).first(10)
|
123
|
+
return if lines.empty?
|
124
|
+
|
125
|
+
feature = identify_feature_from(lines)
|
126
|
+
return unless feature
|
127
|
+
|
128
|
+
Private.find_feature!(
|
129
|
+
feature,
|
130
|
+
filename
|
131
|
+
)
|
132
|
+
end
|
133
|
+
|
134
|
+
sig { params(filename: String).void }
|
135
|
+
def remove_file_annotation!(filename)
|
136
|
+
if file_annotation_based_feature(filename)
|
137
|
+
filepath = Pathname.new(filename)
|
138
|
+
lines = filepath.read.split("\n")
|
139
|
+
new_lines = lines.reject { |line| line[SINGLE_LINE_ANNOTATION_PATTERN] }
|
140
|
+
# We explicitly add a final new line since splitting by new line when reading the file lines
|
141
|
+
# ignores new lines at the ends of files
|
142
|
+
# We also remove leading new lines, since there is after a new line after an annotation
|
143
|
+
new_file_contents = "#{new_lines.join("\n")}\n".gsub(/\A\n+/, '')
|
144
|
+
filepath.write(new_file_contents)
|
145
|
+
end
|
146
|
+
end
|
147
|
+
|
148
|
+
sig { override.returns(String) }
|
149
|
+
def description
|
150
|
+
DESCRIPTION
|
151
|
+
end
|
152
|
+
|
153
|
+
sig { override.void }
|
154
|
+
def bust_caches!; end
|
155
|
+
end
|
156
|
+
end
|
157
|
+
end
|
158
|
+
end
|
@@ -0,0 +1,190 @@
|
|
1
|
+
# typed: strict
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
module FeatureMap
|
5
|
+
module Private
|
6
|
+
#
|
7
|
+
# This class is responsible for turning FeatureMap directives (e.g. annotations, directory assignments, etc)
|
8
|
+
# into a assignments.yml file, that can be used as an input to a variety of engineering team utilities (e.g.
|
9
|
+
# PR/release announcements, documentation generation, etc).
|
10
|
+
#
|
11
|
+
class AssignmentsFile
|
12
|
+
extend T::Sig
|
13
|
+
|
14
|
+
class FileContentError < StandardError; end
|
15
|
+
|
16
|
+
FILES_KEY = 'files'
|
17
|
+
FILE_FEATURE_KEY = 'feature'
|
18
|
+
FILE_MAPPER_KEY = 'mapper'
|
19
|
+
FEATURES_KEY = 'features'
|
20
|
+
FEATURE_FILES_KEY = 'files'
|
21
|
+
|
22
|
+
FeatureName = T.type_alias { String }
|
23
|
+
FilePath = T.type_alias { String }
|
24
|
+
MapperDescription = T.type_alias { String }
|
25
|
+
|
26
|
+
FileDetails = T.type_alias do
|
27
|
+
T::Hash[
|
28
|
+
String,
|
29
|
+
T.any(FeatureName, MapperDescription)
|
30
|
+
]
|
31
|
+
end
|
32
|
+
|
33
|
+
FilesContent = T.type_alias do
|
34
|
+
T::Hash[
|
35
|
+
FilePath,
|
36
|
+
FileDetails
|
37
|
+
]
|
38
|
+
end
|
39
|
+
|
40
|
+
FileList = T.type_alias { T::Array[String] }
|
41
|
+
TeamList = T.type_alias { T::Array[String] }
|
42
|
+
|
43
|
+
FeatureDetails = T.type_alias do
|
44
|
+
T::Hash[
|
45
|
+
String,
|
46
|
+
T.any(FileList, TeamList)
|
47
|
+
]
|
48
|
+
end
|
49
|
+
|
50
|
+
FeaturesContent = T.type_alias do
|
51
|
+
T::Hash[
|
52
|
+
FeatureName,
|
53
|
+
FeatureDetails
|
54
|
+
]
|
55
|
+
end
|
56
|
+
|
57
|
+
sig { returns(T::Array[String]) }
|
58
|
+
def self.actual_contents_lines
|
59
|
+
if path.exist?
|
60
|
+
content = path.read
|
61
|
+
lines = path.read.split("\n")
|
62
|
+
if content.end_with?("\n")
|
63
|
+
lines << ''
|
64
|
+
end
|
65
|
+
lines
|
66
|
+
else
|
67
|
+
['']
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
sig { returns(T::Array[T.nilable(String)]) }
|
72
|
+
def self.expected_contents_lines
|
73
|
+
cache = Private.glob_cache.raw_cache_contents
|
74
|
+
|
75
|
+
header = <<~HEADER
|
76
|
+
# STOP! - DO NOT EDIT THIS FILE MANUALLY
|
77
|
+
# This file was automatically generated by "bin/featuremap validate". The next time this file
|
78
|
+
# is generated any changes will be lost. For more details:
|
79
|
+
# https://github.com/Beyond-Finance/feature_map
|
80
|
+
#
|
81
|
+
# It is recommended to commit this file into your source control. It will only change when the
|
82
|
+
# set of files assigned to a feature change, which should be explicitly tracked.
|
83
|
+
HEADER
|
84
|
+
|
85
|
+
files_content = T.let({}, FilesContent)
|
86
|
+
files_by_feature = T.let({}, T::Hash[FeatureName, FileList])
|
87
|
+
features_content = T.let({}, FeaturesContent)
|
88
|
+
|
89
|
+
cache.each do |mapper_description, assignment_map_cache|
|
90
|
+
assignment_map_cache = assignment_map_cache.sort_by do |glob, _feature|
|
91
|
+
glob
|
92
|
+
end
|
93
|
+
|
94
|
+
assignment_map_cache.to_h.each do |path, feature|
|
95
|
+
files_content[path] = T.let({ FILE_FEATURE_KEY => feature.name, FILE_MAPPER_KEY => mapper_description }, FileDetails)
|
96
|
+
|
97
|
+
files_by_feature[feature.name] ||= []
|
98
|
+
T.must(files_by_feature[feature.name]) << path
|
99
|
+
end
|
100
|
+
end
|
101
|
+
|
102
|
+
# Ordering of features in the resulting YAML content is determined by the order in which keys are added to
|
103
|
+
# each hash.
|
104
|
+
CodeFeatures.all.sort_by(&:name).each do |feature|
|
105
|
+
files = files_by_feature[feature.name] || []
|
106
|
+
expanded_files = files.flat_map { |file| Dir.glob(file) }.reject { |path| File.directory?(path) }
|
107
|
+
|
108
|
+
# Exclude features that have no releated files. These features are presumably irrelevant to the current
|
109
|
+
# repo/application.
|
110
|
+
next if expanded_files.empty?
|
111
|
+
|
112
|
+
features_content[feature.name] = T.let({ 'files' => expanded_files.sort }, FeatureDetails)
|
113
|
+
|
114
|
+
if !Private.configuration.skip_code_ownership
|
115
|
+
T.must(features_content[feature.name])['teams'] = expanded_files.map { |file| CodeOwnership.for_file(file)&.name }.compact.uniq.sort
|
116
|
+
end
|
117
|
+
end
|
118
|
+
|
119
|
+
[
|
120
|
+
*header.split("\n"),
|
121
|
+
'', # For line between header and file assignments lines
|
122
|
+
*{ FILES_KEY => files_content, FEATURES_KEY => features_content }.to_yaml.split("\n"),
|
123
|
+
'' # For end-of-file newline
|
124
|
+
]
|
125
|
+
end
|
126
|
+
|
127
|
+
sig { void }
|
128
|
+
def self.write!
|
129
|
+
FileUtils.mkdir_p(path.dirname) if !path.dirname.exist?
|
130
|
+
path.write(expected_contents_lines.join("\n"))
|
131
|
+
end
|
132
|
+
|
133
|
+
sig { returns(Pathname) }
|
134
|
+
def self.path
|
135
|
+
Pathname.pwd.join('.feature_map/assignments.yml')
|
136
|
+
end
|
137
|
+
|
138
|
+
sig { params(files: T::Array[String]).void }
|
139
|
+
def self.update_cache!(files)
|
140
|
+
cache = Private.glob_cache
|
141
|
+
# Each mapper returns a new copy of the cache subset related to that mapper,
|
142
|
+
# which is then stored back into the cache.
|
143
|
+
Mapper.all.each do |mapper|
|
144
|
+
existing_cache = cache.raw_cache_contents.fetch(mapper.description, {})
|
145
|
+
updated_cache = mapper.update_cache(existing_cache, files)
|
146
|
+
cache.raw_cache_contents[mapper.description] = updated_cache
|
147
|
+
end
|
148
|
+
end
|
149
|
+
|
150
|
+
sig { returns(T::Boolean) }
|
151
|
+
def self.use_features_cache?
|
152
|
+
AssignmentsFile.path.exist? && !Private.configuration.skip_features_validation
|
153
|
+
end
|
154
|
+
|
155
|
+
sig { returns(GlobCache) }
|
156
|
+
def self.to_glob_cache
|
157
|
+
raw_cache_contents = T.let({}, GlobCache::CacheShape)
|
158
|
+
features_by_name = CodeFeatures.all.each_with_object({}) do |feature, map|
|
159
|
+
map[feature.name] = feature
|
160
|
+
end
|
161
|
+
mapper_descriptions = Set.new(Mapper.all.map(&:description))
|
162
|
+
|
163
|
+
features_file_content = YAML.load_file(path)
|
164
|
+
features_file_content[FILES_KEY]&.each do |file_path, file_assignment|
|
165
|
+
next if file_assignment.nil?
|
166
|
+
next if file_assignment[FILE_FEATURE_KEY].nil? || features_by_name[file_assignment[FILE_FEATURE_KEY]].nil?
|
167
|
+
next if file_assignment[FILE_MAPPER_KEY].nil? || !mapper_descriptions.include?(file_assignment[FILE_MAPPER_KEY])
|
168
|
+
|
169
|
+
raw_cache_contents[file_assignment[FILE_MAPPER_KEY]] ||= {}
|
170
|
+
raw_cache_contents.fetch(file_assignment[FILE_MAPPER_KEY])[file_path] = features_by_name[file_assignment[FILE_FEATURE_KEY]]
|
171
|
+
end
|
172
|
+
|
173
|
+
GlobCache.new(raw_cache_contents)
|
174
|
+
end
|
175
|
+
|
176
|
+
sig { returns(FeaturesContent) }
|
177
|
+
def self.load_features!
|
178
|
+
assignments_content = YAML.load_file(path)
|
179
|
+
|
180
|
+
return assignments_content[FEATURES_KEY] if assignments_content.is_a?(Hash) && assignments_content[FEATURES_KEY]
|
181
|
+
|
182
|
+
raise FileContentError, "Unexpected content found in #{path}. Use `bin/featuremap validate` to regenerate it and try again."
|
183
|
+
rescue Psych::SyntaxError => e
|
184
|
+
raise FileContentError, "Invalid YAML content found at #{path}. Error: #{e.message} Use `bin/featuremap validate` to generate it and try again."
|
185
|
+
rescue Errno::ENOENT
|
186
|
+
raise FileContentError, "No feature assignments file found at #{path}. Use `bin/featuremap validate` to generate it and try again."
|
187
|
+
end
|
188
|
+
end
|
189
|
+
end
|
190
|
+
end
|
@@ -0,0 +1,96 @@
|
|
1
|
+
# typed: strict
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require 'faraday'
|
5
|
+
|
6
|
+
module FeatureMap
|
7
|
+
module Private
|
8
|
+
# This class is responsible for all interactions with the CodeCov platform.
|
9
|
+
class CodeCov
|
10
|
+
CODE_COV_API_BASE_URL = 'https://api.codecov.io/api/v2/'
|
11
|
+
|
12
|
+
class ApiError < StandardError; end
|
13
|
+
class ConfigurationError < StandardError; end
|
14
|
+
|
15
|
+
extend T::Sig
|
16
|
+
|
17
|
+
FilePath = T.type_alias { String }
|
18
|
+
CoverageStat = T.type_alias { String }
|
19
|
+
|
20
|
+
Coverage = T.type_alias do
|
21
|
+
T::Hash[
|
22
|
+
CoverageStat,
|
23
|
+
Integer
|
24
|
+
]
|
25
|
+
end
|
26
|
+
|
27
|
+
TestCoverageStats = T.type_alias do
|
28
|
+
T::Hash[
|
29
|
+
FilePath,
|
30
|
+
Coverage
|
31
|
+
]
|
32
|
+
end
|
33
|
+
|
34
|
+
sig { params(commit_sha: String, api_token: String).returns(TestCoverageStats) }
|
35
|
+
def self.fetch_coverage_stats(commit_sha, api_token)
|
36
|
+
commit_details_response = fetch_commit_details(commit_sha, api_token)
|
37
|
+
raise ApiError, "Failed to retrieve CodeCov stats for commit #{commit_sha}. Response: #{commit_details_response.status} - #{commit_details_response.body}" unless commit_details_response.success?
|
38
|
+
|
39
|
+
build_coverage_status(commit_details_response.body)
|
40
|
+
end
|
41
|
+
|
42
|
+
sig { params(commit_sha: String, api_token: String).returns(T.untyped) }
|
43
|
+
def self.fetch_commit_details(commit_sha, api_token)
|
44
|
+
conn.get("#{service}/#{owner}/repos/#{repo}/commits/#{commit_sha}",
|
45
|
+
{},
|
46
|
+
{ 'Authorization' => "Bearer #{api_token}" })
|
47
|
+
end
|
48
|
+
|
49
|
+
sig { params(commit_details: T::Hash[T.untyped, T.untyped]).returns(TestCoverageStats) }
|
50
|
+
def self.build_coverage_status(commit_details)
|
51
|
+
file_coverage_details = commit_details.dig('report', 'files')
|
52
|
+
raise ApiError, 'No file coverage information retruned from CodeCov.' unless file_coverage_details
|
53
|
+
|
54
|
+
file_coverage_details.each_with_object({}) do |file_coverage, coverage_stats|
|
55
|
+
file_path = file_coverage['name']
|
56
|
+
file_coverage_stats = file_coverage['totals']
|
57
|
+
|
58
|
+
next if !file_path || !file_coverage_stats
|
59
|
+
|
60
|
+
coverage_stats[file_path] = {
|
61
|
+
'lines' => file_coverage_stats['lines'],
|
62
|
+
'hits' => file_coverage_stats['hits'],
|
63
|
+
'misses' => file_coverage_stats['misses']
|
64
|
+
}
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
# TODO: Move these values to config.
|
69
|
+
sig { returns(String) }
|
70
|
+
def self.service
|
71
|
+
Private.configuration.code_cov['service'] ||
|
72
|
+
(raise ConfigurationError, 'Missing CodeCov configuration: service')
|
73
|
+
end
|
74
|
+
|
75
|
+
sig { returns(String) }
|
76
|
+
def self.owner
|
77
|
+
Private.configuration.code_cov['owner'] ||
|
78
|
+
(raise ConfigurationError, 'Missing CodeCov configuration: owner')
|
79
|
+
end
|
80
|
+
|
81
|
+
sig { returns(String) }
|
82
|
+
def self.repo
|
83
|
+
Private.configuration.code_cov['repo'] ||
|
84
|
+
(raise ConfigurationError, 'Missing CodeCov configuration: repo')
|
85
|
+
end
|
86
|
+
|
87
|
+
sig { returns(Faraday::Connection) }
|
88
|
+
def self.conn
|
89
|
+
@conn ||= T.let(Faraday.new(url: CODE_COV_API_BASE_URL) do |f|
|
90
|
+
f.request :json
|
91
|
+
f.response :json
|
92
|
+
end, T.nilable(Faraday::Connection))
|
93
|
+
end
|
94
|
+
end
|
95
|
+
end
|
96
|
+
end
|
@@ -0,0 +1,46 @@
|
|
1
|
+
# typed: strict
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require 'parser/current'
|
5
|
+
|
6
|
+
module FeatureMap
|
7
|
+
module Private
|
8
|
+
class CyclomaticComplexityCalculator
|
9
|
+
extend T::Sig
|
10
|
+
|
11
|
+
COMPLEXITY_NODES = %i[
|
12
|
+
if case while until for
|
13
|
+
rescue when and or
|
14
|
+
].freeze
|
15
|
+
|
16
|
+
sig { params(ast: T.nilable(Parser::AST::Node)).void }
|
17
|
+
def initialize(ast)
|
18
|
+
@ast = ast
|
19
|
+
@complexity = T.let(1, Integer) # Start at 1 for the base path
|
20
|
+
end
|
21
|
+
|
22
|
+
sig { returns(Integer) }
|
23
|
+
def calculate
|
24
|
+
process(@ast)
|
25
|
+
@complexity
|
26
|
+
end
|
27
|
+
|
28
|
+
private
|
29
|
+
|
30
|
+
sig { params(node: T.nilable(T.any(Parser::AST::Node, Symbol, Integer, String, NilClass))).void }
|
31
|
+
def process(node)
|
32
|
+
return unless node.is_a?(Parser::AST::Node)
|
33
|
+
|
34
|
+
# Increment complexity for each branching node
|
35
|
+
@complexity += 1 if COMPLEXITY_NODES.include?(node.type)
|
36
|
+
|
37
|
+
# Process children
|
38
|
+
node.children.each do |child|
|
39
|
+
# Nodes can have children that are Symbols, Integers, Strings, or nil
|
40
|
+
# We only want to process actual AST nodes
|
41
|
+
process(child) if child.is_a?(Parser::AST::Node)
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|