ecosystems-bibliothecary 14.3.0 → 15.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +32 -0
- data/README.md +8 -23
- data/bibliothecary.gemspec +5 -9
- data/lib/bibliothecary/analyser.rb +0 -31
- data/lib/bibliothecary/cli.rb +35 -26
- data/lib/bibliothecary/configuration.rb +1 -6
- data/lib/bibliothecary/dependency.rb +1 -4
- data/lib/bibliothecary/parsers/bentoml.rb +0 -2
- data/lib/bibliothecary/parsers/bower.rb +0 -1
- data/lib/bibliothecary/parsers/cargo.rb +12 -10
- data/lib/bibliothecary/parsers/carthage.rb +51 -15
- data/lib/bibliothecary/parsers/clojars.rb +14 -18
- data/lib/bibliothecary/parsers/cocoapods.rb +100 -19
- data/lib/bibliothecary/parsers/cog.rb +0 -2
- data/lib/bibliothecary/parsers/conan.rb +156 -0
- data/lib/bibliothecary/parsers/conda.rb +0 -3
- data/lib/bibliothecary/parsers/cpan.rb +0 -2
- data/lib/bibliothecary/parsers/cran.rb +40 -19
- data/lib/bibliothecary/parsers/docker.rb +0 -2
- data/lib/bibliothecary/parsers/dub.rb +33 -8
- data/lib/bibliothecary/parsers/dvc.rb +0 -2
- data/lib/bibliothecary/parsers/elm.rb +13 -3
- data/lib/bibliothecary/parsers/go.rb +14 -5
- data/lib/bibliothecary/parsers/hackage.rb +132 -24
- data/lib/bibliothecary/parsers/haxelib.rb +14 -4
- data/lib/bibliothecary/parsers/hex.rb +37 -20
- data/lib/bibliothecary/parsers/homebrew.rb +0 -2
- data/lib/bibliothecary/parsers/julia.rb +0 -2
- data/lib/bibliothecary/parsers/maven.rb +35 -25
- data/lib/bibliothecary/parsers/meteor.rb +14 -4
- data/lib/bibliothecary/parsers/mlflow.rb +0 -2
- data/lib/bibliothecary/parsers/npm.rb +47 -59
- data/lib/bibliothecary/parsers/nuget.rb +22 -21
- data/lib/bibliothecary/parsers/ollama.rb +0 -2
- data/lib/bibliothecary/parsers/packagist.rb +0 -3
- data/lib/bibliothecary/parsers/pub.rb +0 -2
- data/lib/bibliothecary/parsers/pypi.rb +54 -35
- data/lib/bibliothecary/parsers/rubygems.rb +92 -27
- data/lib/bibliothecary/parsers/shard.rb +0 -1
- data/lib/bibliothecary/parsers/swift_pm.rb +77 -29
- data/lib/bibliothecary/parsers/vcpkg.rb +68 -17
- data/lib/bibliothecary/runner.rb +2 -15
- data/lib/bibliothecary/version.rb +1 -1
- data/lib/bibliothecary.rb +0 -4
- metadata +2 -110
- data/.codeclimate.yml +0 -25
- data/.github/CONTRIBUTING.md +0 -195
- data/.github/workflows/ci.yml +0 -25
- data/.gitignore +0 -10
- data/.rspec +0 -2
- data/.rubocop.yml +0 -69
- data/.ruby-version +0 -1
- data/.tidelift +0 -1
- data/CODE_OF_CONDUCT.md +0 -74
- data/Gemfile +0 -35
- data/Rakefile +0 -18
- data/bin/benchmark +0 -386
- data/bin/console +0 -15
- data/bin/setup +0 -8
- data/lib/bibliothecary/multi_parsers/bundler_like_manifest.rb +0 -26
- data/lib/bibliothecary/multi_parsers/cyclonedx.rb +0 -170
- data/lib/bibliothecary/multi_parsers/dependencies_csv.rb +0 -155
- data/lib/bibliothecary/multi_parsers/json_runtime.rb +0 -22
- data/lib/bibliothecary/multi_parsers/spdx.rb +0 -149
- data/lib/bibliothecary/purl_util.rb +0 -37
- data/lib/bibliothecary/runner/multi_manifest_filter.rb +0 -92
- data/lib/sdl_parser.rb +0 -30
|
@@ -1,170 +0,0 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
|
|
3
|
-
require "json"
|
|
4
|
-
require "ox"
|
|
5
|
-
|
|
6
|
-
# packageurl-ruby uses pattern-matching (https://docs.ruby-lang.org/en/2.7.0/NEWS.html#label-Pattern+matching)
|
|
7
|
-
# which warns a whole bunch in Ruby 2.7 as being an experimental feature, but has
|
|
8
|
-
# been accepted in Ruby 3.0 (https://rubyreferences.github.io/rubychanges/3.0.html#pattern-matching).
|
|
9
|
-
Warning[:experimental] = false
|
|
10
|
-
require "package_url"
|
|
11
|
-
Warning[:experimental] = true
|
|
12
|
-
|
|
13
|
-
module Bibliothecary
|
|
14
|
-
module MultiParsers
|
|
15
|
-
module CycloneDX
|
|
16
|
-
include Bibliothecary::Analyser
|
|
17
|
-
include Bibliothecary::Analyser::TryCache
|
|
18
|
-
|
|
19
|
-
NoComponents = Class.new(StandardError)
|
|
20
|
-
|
|
21
|
-
class ManifestEntries
|
|
22
|
-
# If a purl type (key) exists, it will be used in a manifest for
|
|
23
|
-
# the key's value. If not, it's ignored.
|
|
24
|
-
#
|
|
25
|
-
# https://github.com/package-url/purl-spec/blob/master/PURL-TYPES.rst
|
|
26
|
-
PURL_TYPE_MAPPING = {
|
|
27
|
-
"brew" => :homebrew,
|
|
28
|
-
"cargo" => :cargo,
|
|
29
|
-
"carthage" => :carthage,
|
|
30
|
-
"clojars" => :clojars,
|
|
31
|
-
"cocoapods" => :cocoapods,
|
|
32
|
-
"composer" => :packagist,
|
|
33
|
-
"conda" => :conda,
|
|
34
|
-
"cpan" => :cpan,
|
|
35
|
-
"cran" => :cran,
|
|
36
|
-
"docker" => :docker,
|
|
37
|
-
"dub" => :dub,
|
|
38
|
-
"elm" => :elm,
|
|
39
|
-
"gem" => :rubygems,
|
|
40
|
-
"golang" => :go,
|
|
41
|
-
"hackage" => :hackage,
|
|
42
|
-
"haxe" => :haxelib,
|
|
43
|
-
"hex" => :hex,
|
|
44
|
-
"julia" => :julia,
|
|
45
|
-
"maven" => :maven,
|
|
46
|
-
"meteor" => :meteor,
|
|
47
|
-
"npm" => :npm,
|
|
48
|
-
"nuget" => :nuget,
|
|
49
|
-
"pub" => :pub,
|
|
50
|
-
"pypi" => :pypi,
|
|
51
|
-
"swift" => :swift_pm,
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
attr_reader :manifests
|
|
55
|
-
|
|
56
|
-
def initialize(parse_queue:)
|
|
57
|
-
@manifests = {}
|
|
58
|
-
|
|
59
|
-
# Instead of recursing, we'll work through a queue of components
|
|
60
|
-
# to process, letting the different parser add components to the
|
|
61
|
-
# queue however they need to pull them from the source document.
|
|
62
|
-
@parse_queue = parse_queue.dup
|
|
63
|
-
end
|
|
64
|
-
|
|
65
|
-
def add(purl, source = nil)
|
|
66
|
-
mapping = PurlUtil::PURL_TYPE_MAPPING[purl.type]
|
|
67
|
-
return unless mapping
|
|
68
|
-
|
|
69
|
-
@manifests[mapping] ||= Set.new
|
|
70
|
-
@manifests[mapping] << Dependency.new(
|
|
71
|
-
name: PurlUtil.full_name(purl),
|
|
72
|
-
requirement: purl.version,
|
|
73
|
-
platform: mapping.to_s,
|
|
74
|
-
type: "lockfile",
|
|
75
|
-
source: source
|
|
76
|
-
)
|
|
77
|
-
end
|
|
78
|
-
|
|
79
|
-
# Iterates over each manifest entry in the parse_queue, and accepts a block which will
|
|
80
|
-
# be called on each component. The block has two jobs: 1) add more sub-components
|
|
81
|
-
# to parse (if they exist), and 2) return the components purl.
|
|
82
|
-
def parse!(source = nil, &block)
|
|
83
|
-
until @parse_queue.empty?
|
|
84
|
-
component = @parse_queue.shift
|
|
85
|
-
|
|
86
|
-
purl_text = block.call(component, @parse_queue)
|
|
87
|
-
|
|
88
|
-
next unless purl_text
|
|
89
|
-
|
|
90
|
-
purl = PackageURL.parse(purl_text)
|
|
91
|
-
|
|
92
|
-
add(purl, source)
|
|
93
|
-
end
|
|
94
|
-
end
|
|
95
|
-
|
|
96
|
-
def [](key)
|
|
97
|
-
@manifests[key]&.to_a
|
|
98
|
-
end
|
|
99
|
-
end
|
|
100
|
-
|
|
101
|
-
def self.mapping
|
|
102
|
-
{
|
|
103
|
-
match_filename("cyclonedx.json") => {
|
|
104
|
-
kind: "lockfile",
|
|
105
|
-
parser: :parse_cyclonedx_json,
|
|
106
|
-
ungroupable: true,
|
|
107
|
-
},
|
|
108
|
-
match_extension("cdx.json") => {
|
|
109
|
-
kind: "lockfile",
|
|
110
|
-
parser: :parse_cyclonedx_json,
|
|
111
|
-
ungroupable: true,
|
|
112
|
-
},
|
|
113
|
-
match_filename("cyclonedx.xml") => {
|
|
114
|
-
kind: "lockfile",
|
|
115
|
-
parser: :parse_cyclonedx_xml,
|
|
116
|
-
ungroupable: true,
|
|
117
|
-
},
|
|
118
|
-
match_extension(".cdx.xml") => {
|
|
119
|
-
kind: "lockfile",
|
|
120
|
-
parser: :parse_cyclonedx_xml,
|
|
121
|
-
ungroupable: true,
|
|
122
|
-
},
|
|
123
|
-
}
|
|
124
|
-
end
|
|
125
|
-
|
|
126
|
-
def parse_cyclonedx_json(file_contents, options: {})
|
|
127
|
-
manifest = try_cache(options, options[:filename]) do
|
|
128
|
-
JSON.parse(file_contents)
|
|
129
|
-
end
|
|
130
|
-
|
|
131
|
-
raise NoComponents unless manifest["components"]
|
|
132
|
-
|
|
133
|
-
entries = ManifestEntries.new(parse_queue: manifest["components"])
|
|
134
|
-
|
|
135
|
-
entries.parse!(options.fetch(:filename, nil)) do |component, parse_queue|
|
|
136
|
-
parse_queue.concat(component["components"]) if component["components"]
|
|
137
|
-
|
|
138
|
-
component["purl"]
|
|
139
|
-
end
|
|
140
|
-
|
|
141
|
-
ParserResult.new(dependencies: entries[platform_name.to_sym] || [])
|
|
142
|
-
end
|
|
143
|
-
|
|
144
|
-
def parse_cyclonedx_xml(file_contents, options: {})
|
|
145
|
-
manifest = try_cache(options, options[:filename]) do
|
|
146
|
-
Ox.parse(file_contents)
|
|
147
|
-
end
|
|
148
|
-
|
|
149
|
-
root = manifest
|
|
150
|
-
if root.respond_to?(:bom)
|
|
151
|
-
root = root.bom
|
|
152
|
-
end
|
|
153
|
-
|
|
154
|
-
raise NoComponents unless root.locate("components").first
|
|
155
|
-
|
|
156
|
-
entries = ManifestEntries.new(parse_queue: root.locate("components/*"))
|
|
157
|
-
|
|
158
|
-
entries.parse!(options.fetch(:filename, nil)) do |component, parse_queue|
|
|
159
|
-
# #locate returns an empty array if nothing is found, so we can
|
|
160
|
-
# always safely concatenate it to the parse queue.
|
|
161
|
-
parse_queue.concat(component.locate("components/*"))
|
|
162
|
-
|
|
163
|
-
component.locate("purl").first&.text
|
|
164
|
-
end
|
|
165
|
-
|
|
166
|
-
ParserResult.new(dependencies: entries[platform_name.to_sym] || [])
|
|
167
|
-
end
|
|
168
|
-
end
|
|
169
|
-
end
|
|
170
|
-
end
|
|
@@ -1,155 +0,0 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
|
|
3
|
-
require "csv"
|
|
4
|
-
|
|
5
|
-
module Bibliothecary
|
|
6
|
-
module MultiParsers
|
|
7
|
-
module DependenciesCSV
|
|
8
|
-
include Bibliothecary::Analyser
|
|
9
|
-
include Bibliothecary::Analyser::TryCache
|
|
10
|
-
|
|
11
|
-
def self.mapping
|
|
12
|
-
{
|
|
13
|
-
match_filename("dependencies.csv") => {
|
|
14
|
-
kind: "lockfile",
|
|
15
|
-
ungroupable: true,
|
|
16
|
-
parser: :parse_dependencies_csv,
|
|
17
|
-
},
|
|
18
|
-
}
|
|
19
|
-
end
|
|
20
|
-
|
|
21
|
-
# Processing a CSV file isn't as exact as using a real manifest file,
|
|
22
|
-
# but you can get pretty close as long as the data you're importing
|
|
23
|
-
# is simple.
|
|
24
|
-
class CSVFile
|
|
25
|
-
# Header structures are:
|
|
26
|
-
#
|
|
27
|
-
# <field to fill in for dependency> => {
|
|
28
|
-
# match: [<regexp of incoming column name to match in priority order, highest priority first>...],
|
|
29
|
-
# [default]: <optional default value for this field>
|
|
30
|
-
# }
|
|
31
|
-
HEADERS = {
|
|
32
|
-
"platform" => {
|
|
33
|
-
match: [
|
|
34
|
-
/^platform$/i,
|
|
35
|
-
],
|
|
36
|
-
},
|
|
37
|
-
"name" => {
|
|
38
|
-
match: [
|
|
39
|
-
/^name$/i,
|
|
40
|
-
],
|
|
41
|
-
},
|
|
42
|
-
# Manifests have versions that can have operators.
|
|
43
|
-
# However, since Bibliothecary only currently supports analyzing a
|
|
44
|
-
# single file as a single thing (either manifest or lockfile)
|
|
45
|
-
# we can't return manifest-y data. Only take the lockfile requirement
|
|
46
|
-
# when processing dependencies.csv for now.
|
|
47
|
-
"requirement" => {
|
|
48
|
-
match: [
|
|
49
|
-
/^(lockfile |)requirement$/i,
|
|
50
|
-
/^version$/i,
|
|
51
|
-
],
|
|
52
|
-
},
|
|
53
|
-
"type" => {
|
|
54
|
-
default: "runtime",
|
|
55
|
-
match: [
|
|
56
|
-
/^(lockfile |)type$/i,
|
|
57
|
-
/^(manifest |)type$/i,
|
|
58
|
-
],
|
|
59
|
-
},
|
|
60
|
-
}.freeze
|
|
61
|
-
|
|
62
|
-
attr_reader :result
|
|
63
|
-
|
|
64
|
-
def initialize(file_contents)
|
|
65
|
-
@file_contents = file_contents
|
|
66
|
-
|
|
67
|
-
@result = nil
|
|
68
|
-
|
|
69
|
-
# A Hash of "our field name" => ["header in CSV file", "lower priority header in CSV file"]
|
|
70
|
-
@header_mappings = {}
|
|
71
|
-
end
|
|
72
|
-
|
|
73
|
-
def parse!
|
|
74
|
-
table = parse_and_validate_csv_file
|
|
75
|
-
|
|
76
|
-
@result = table.map.with_index do |row, idx|
|
|
77
|
-
HEADERS.each_with_object({}) do |(header, info), obj|
|
|
78
|
-
# find the first non-empty field in the row for this header, or nil if not found
|
|
79
|
-
row_data = row[@header_mappings[header]]
|
|
80
|
-
|
|
81
|
-
# some column have default data to fall back on
|
|
82
|
-
if row_data
|
|
83
|
-
obj[header.to_sym] = row_data
|
|
84
|
-
elsif info.key?(:default)
|
|
85
|
-
# if the default is nil, don't even add the key to the hash
|
|
86
|
-
obj[header.to_sym] = info[:default] if info[:default]
|
|
87
|
-
else
|
|
88
|
-
# use 1-based index just like the 'csv' std lib, and count the headers as first row.
|
|
89
|
-
raise "Missing required field '#{header}' on line #{idx + 2}."
|
|
90
|
-
end
|
|
91
|
-
end
|
|
92
|
-
end
|
|
93
|
-
end
|
|
94
|
-
|
|
95
|
-
private
|
|
96
|
-
|
|
97
|
-
def parse_and_validate_csv_file
|
|
98
|
-
table = CSV.parse(@file_contents, headers: true)
|
|
99
|
-
|
|
100
|
-
header_examination_results = map_table_headers_to_local_lookups(table, HEADERS)
|
|
101
|
-
unless header_examination_results[:missing].empty?
|
|
102
|
-
raise "Missing required headers #{header_examination_results[:missing].join(', ')} in CSV. Check to make sure header names are all lowercase."
|
|
103
|
-
end
|
|
104
|
-
|
|
105
|
-
@header_mappings = header_examination_results[:found]
|
|
106
|
-
|
|
107
|
-
table
|
|
108
|
-
end
|
|
109
|
-
|
|
110
|
-
def map_table_headers_to_local_lookups(table, local_lookups)
|
|
111
|
-
result = local_lookups.each_with_object({ found: {}, missing: [] }) do |(header, info), obj|
|
|
112
|
-
results = table.headers.each_with_object([]) do |table_header, matches|
|
|
113
|
-
info[:match].each_with_index do |match_regexp, index|
|
|
114
|
-
matches << [table_header, index] if table_header[match_regexp]
|
|
115
|
-
end
|
|
116
|
-
end
|
|
117
|
-
|
|
118
|
-
if results.empty?
|
|
119
|
-
# if a header has a default value it's optional
|
|
120
|
-
obj[:missing] << header unless info.key?(:default)
|
|
121
|
-
else
|
|
122
|
-
# select the highest priority header possible
|
|
123
|
-
obj[:found][header] ||= nil
|
|
124
|
-
obj[:found][header] = ([obj[:found][header]] + results).compact.min_by(&:last)
|
|
125
|
-
end
|
|
126
|
-
end
|
|
127
|
-
|
|
128
|
-
# strip off the priorities. only one mapping should remain.
|
|
129
|
-
result[:found].transform_values!(&:first)
|
|
130
|
-
|
|
131
|
-
result
|
|
132
|
-
end
|
|
133
|
-
end
|
|
134
|
-
|
|
135
|
-
def parse_dependencies_csv(file_contents, options: {})
|
|
136
|
-
csv_file = try_cache(options, options[:filename]) do
|
|
137
|
-
raw_csv_file = CSVFile.new(file_contents)
|
|
138
|
-
raw_csv_file.parse!
|
|
139
|
-
raw_csv_file
|
|
140
|
-
end
|
|
141
|
-
|
|
142
|
-
dependencies = csv_file
|
|
143
|
-
.result
|
|
144
|
-
.find_all { |dependency| dependency[:platform] == platform_name.to_s }
|
|
145
|
-
.map do |dep_kvs|
|
|
146
|
-
Dependency.new(
|
|
147
|
-
**dep_kvs, source: options.fetch(:filename, nil)
|
|
148
|
-
)
|
|
149
|
-
end
|
|
150
|
-
|
|
151
|
-
ParserResult.new(dependencies: dependencies)
|
|
152
|
-
end
|
|
153
|
-
end
|
|
154
|
-
end
|
|
155
|
-
end
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
|
|
3
|
-
module Bibliothecary
|
|
4
|
-
module MultiParsers
|
|
5
|
-
# Provide JSON Runtime Manifest parsing
|
|
6
|
-
module JSONRuntime
|
|
7
|
-
def parse_json_runtime_manifest(file_contents, options: {})
|
|
8
|
-
dependencies = JSON.parse(file_contents).fetch("dependencies", []).map do |name, requirement|
|
|
9
|
-
Dependency.new(
|
|
10
|
-
platform: platform_name,
|
|
11
|
-
name: name,
|
|
12
|
-
requirement: requirement,
|
|
13
|
-
type: "runtime",
|
|
14
|
-
source: options.fetch(:filename, nil)
|
|
15
|
-
)
|
|
16
|
-
end
|
|
17
|
-
|
|
18
|
-
ParserResult.new(dependencies: dependencies)
|
|
19
|
-
end
|
|
20
|
-
end
|
|
21
|
-
end
|
|
22
|
-
end
|
|
@@ -1,149 +0,0 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
|
|
3
|
-
# packageurl-ruby uses pattern-matching (https://docs.ruby-lang.org/en/2.7.0/NEWS.html#label-Pattern+matching)
|
|
4
|
-
# which warns a whole bunch in Ruby 2.7 as being an experimental feature, but has
|
|
5
|
-
# been accepted in Ruby 3.0 (https://rubyreferences.github.io/rubychanges/3.0.html#pattern-matching).
|
|
6
|
-
Warning[:experimental] = false
|
|
7
|
-
require "package_url"
|
|
8
|
-
Warning[:experimental] = true
|
|
9
|
-
|
|
10
|
-
module Bibliothecary
|
|
11
|
-
module MultiParsers
|
|
12
|
-
module Spdx
|
|
13
|
-
include Bibliothecary::Analyser
|
|
14
|
-
include Bibliothecary::Analyser::TryCache
|
|
15
|
-
|
|
16
|
-
# e.g. 'SomeText:' (allowing for leading whitespace)
|
|
17
|
-
WELLFORMED_LINE_REGEXP = /^\s*[a-zA-Z]+:/
|
|
18
|
-
|
|
19
|
-
# e.g. 'PackageName: (allowing for excessive whitespace)
|
|
20
|
-
PACKAGE_NAME_REGEXP = /^\s*PackageName:\s*(.*)/
|
|
21
|
-
|
|
22
|
-
# e.g. 'PackageVersion:' (allowing for excessive whitespace)
|
|
23
|
-
PACKAGE_VERSION_REGEXP = /^\s*PackageVersion:\s*(.*)/
|
|
24
|
-
|
|
25
|
-
# e.g. "ExternalRef: PACKAGE-MANAGER purl (allowing for excessive whitespace)
|
|
26
|
-
PURL_REGEXP = /^\s*ExternalRef:\s*PACKAGE[-|_]MANAGER\s*purl\s*(.*)/
|
|
27
|
-
|
|
28
|
-
NoEntries = Class.new(StandardError)
|
|
29
|
-
MalformedFile = Class.new(StandardError)
|
|
30
|
-
|
|
31
|
-
def self.mapping
|
|
32
|
-
{
|
|
33
|
-
match_extension(".spdx") => {
|
|
34
|
-
kind: "lockfile",
|
|
35
|
-
parser: :parse_spdx_tag_value,
|
|
36
|
-
ungroupable: true,
|
|
37
|
-
},
|
|
38
|
-
match_extension(".spdx.json") => {
|
|
39
|
-
kind: "lockfile",
|
|
40
|
-
parser: :parse_spdx_json,
|
|
41
|
-
ungroupable: true,
|
|
42
|
-
},
|
|
43
|
-
}
|
|
44
|
-
end
|
|
45
|
-
|
|
46
|
-
def parse_spdx_tag_value(file_contents, options: {})
|
|
47
|
-
entries = try_cache(options, options[:filename]) do
|
|
48
|
-
parse_spdx_tag_value_file_contents(file_contents, options.fetch(:filename, nil))
|
|
49
|
-
end
|
|
50
|
-
|
|
51
|
-
raise NoEntries if entries.empty?
|
|
52
|
-
|
|
53
|
-
Bibliothecary::ParserResult.new(dependencies: entries[platform_name.to_sym] || [])
|
|
54
|
-
end
|
|
55
|
-
|
|
56
|
-
def parse_spdx_tag_value_file_contents(file_contents, source = nil)
|
|
57
|
-
entries = {}
|
|
58
|
-
spdx_name = spdx_version = platform = purl_name = purl_version = nil
|
|
59
|
-
|
|
60
|
-
file_contents.each_line do |line|
|
|
61
|
-
stripped_line = line.strip
|
|
62
|
-
next if skip_tag_value_line?(stripped_line)
|
|
63
|
-
|
|
64
|
-
raise MalformedFile unless stripped_line.match?(WELLFORMED_LINE_REGEXP)
|
|
65
|
-
|
|
66
|
-
if (match = stripped_line.match(PACKAGE_NAME_REGEXP))
|
|
67
|
-
# Per the spec:
|
|
68
|
-
# > A new package Information section is denoted by the package name (7.1) field.
|
|
69
|
-
add_entry(entries: entries, platform: platform, purl_name: purl_name,
|
|
70
|
-
spdx_name: spdx_name, purl_version: purl_version, spdx_version: spdx_version,
|
|
71
|
-
source: source)
|
|
72
|
-
|
|
73
|
-
# reset for this new package
|
|
74
|
-
spdx_name = spdx_version = platform = purl_name = purl_version = nil
|
|
75
|
-
|
|
76
|
-
# capture the new package's name
|
|
77
|
-
spdx_name = match[1]
|
|
78
|
-
elsif (match = stripped_line.match(PACKAGE_VERSION_REGEXP))
|
|
79
|
-
spdx_version = match[1]
|
|
80
|
-
elsif (match = stripped_line.match(PURL_REGEXP))
|
|
81
|
-
purl = PackageURL.parse(match[1])
|
|
82
|
-
platform ||= PurlUtil::PURL_TYPE_MAPPING[purl.type]
|
|
83
|
-
purl_name ||= PurlUtil.full_name(purl)
|
|
84
|
-
purl_version ||= purl.version
|
|
85
|
-
end
|
|
86
|
-
end
|
|
87
|
-
|
|
88
|
-
add_entry(entries: entries, platform: platform, purl_name: purl_name,
|
|
89
|
-
spdx_name: spdx_name, purl_version: purl_version, spdx_version: spdx_version,
|
|
90
|
-
source: source)
|
|
91
|
-
|
|
92
|
-
entries
|
|
93
|
-
end
|
|
94
|
-
|
|
95
|
-
def skip_tag_value_line?(stripped_line)
|
|
96
|
-
# Ignore blank lines and comments
|
|
97
|
-
stripped_line.empty? || stripped_line.start_with?("#")
|
|
98
|
-
end
|
|
99
|
-
|
|
100
|
-
def parse_spdx_json(file_contents, options: {})
|
|
101
|
-
entries = try_cache(options, options[:filename]) do
|
|
102
|
-
parse_spdx_json_file_contents(file_contents, options.fetch(:filename, nil))
|
|
103
|
-
end
|
|
104
|
-
|
|
105
|
-
raise NoEntries if entries.empty?
|
|
106
|
-
|
|
107
|
-
Bibliothecary::ParserResult.new(dependencies: entries[platform_name.to_sym] || [])
|
|
108
|
-
end
|
|
109
|
-
|
|
110
|
-
def parse_spdx_json_file_contents(file_contents, source = nil)
|
|
111
|
-
entries = {}
|
|
112
|
-
manifest = JSON.parse(file_contents)
|
|
113
|
-
|
|
114
|
-
manifest["packages"]&.each do |package|
|
|
115
|
-
spdx_name = package["name"]
|
|
116
|
-
spdx_version = package["versionInfo"]
|
|
117
|
-
|
|
118
|
-
first_purl_string = package["externalRefs"]&.find { |ref| ref["referenceType"] == "purl" }&.dig("referenceLocator")
|
|
119
|
-
purl = first_purl_string && PackageURL.parse(first_purl_string)
|
|
120
|
-
platform = PurlUtil::PURL_TYPE_MAPPING[purl&.type]
|
|
121
|
-
purl_name = PurlUtil.full_name(purl)
|
|
122
|
-
purl_version = purl&.version
|
|
123
|
-
|
|
124
|
-
add_entry(entries: entries, platform: platform, purl_name: purl_name,
|
|
125
|
-
spdx_name: spdx_name, purl_version: purl_version, spdx_version: spdx_version,
|
|
126
|
-
source: source)
|
|
127
|
-
end
|
|
128
|
-
|
|
129
|
-
entries
|
|
130
|
-
end
|
|
131
|
-
|
|
132
|
-
def add_entry(entries:, platform:, purl_name:, spdx_name:, purl_version:, spdx_version:, source: nil)
|
|
133
|
-
package_name = purl_name || spdx_name
|
|
134
|
-
package_version = purl_version || spdx_version
|
|
135
|
-
|
|
136
|
-
return unless platform && package_name && package_version
|
|
137
|
-
|
|
138
|
-
entries[platform.to_sym] ||= []
|
|
139
|
-
entries[platform.to_sym] << Dependency.new(
|
|
140
|
-
platform: platform.to_s,
|
|
141
|
-
name: package_name,
|
|
142
|
-
requirement: package_version,
|
|
143
|
-
type: "lockfile",
|
|
144
|
-
source: source
|
|
145
|
-
)
|
|
146
|
-
end
|
|
147
|
-
end
|
|
148
|
-
end
|
|
149
|
-
end
|
|
@@ -1,37 +0,0 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
|
|
3
|
-
module Bibliothecary
|
|
4
|
-
class PurlUtil
|
|
5
|
-
# If a purl type (key) exists, it will be used in a manifest for
|
|
6
|
-
# the key's value. If not, it's ignored.
|
|
7
|
-
#
|
|
8
|
-
# https://github.com/package-url/purl-spec/blob/master/PURL-TYPES.rst
|
|
9
|
-
PURL_TYPE_MAPPING = {
|
|
10
|
-
"golang" => :go,
|
|
11
|
-
"maven" => :maven,
|
|
12
|
-
"npm" => :npm,
|
|
13
|
-
"cargo" => :cargo,
|
|
14
|
-
"composer" => :packagist,
|
|
15
|
-
"conda" => :conda,
|
|
16
|
-
"cran" => :cran,
|
|
17
|
-
"gem" => :rubygems,
|
|
18
|
-
"nuget" => :nuget,
|
|
19
|
-
"pypi" => :pypi,
|
|
20
|
-
}.freeze
|
|
21
|
-
|
|
22
|
-
# @param purl [PackageURL]
|
|
23
|
-
# @return [String] The properly namespaced package name
|
|
24
|
-
def self.full_name(purl)
|
|
25
|
-
return nil if purl.nil?
|
|
26
|
-
|
|
27
|
-
parts = [purl.namespace, purl.name].compact
|
|
28
|
-
|
|
29
|
-
case purl.type
|
|
30
|
-
when "maven"
|
|
31
|
-
parts.join(":")
|
|
32
|
-
else
|
|
33
|
-
parts.join("/")
|
|
34
|
-
end
|
|
35
|
-
end
|
|
36
|
-
end
|
|
37
|
-
end
|
|
@@ -1,92 +0,0 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
|
|
3
|
-
module Bibliothecary
|
|
4
|
-
class Runner
|
|
5
|
-
class MultiManifestFilter
|
|
6
|
-
# Wrap up a file analysis for easier validity testing
|
|
7
|
-
class FileAnalysis
|
|
8
|
-
def initialize(file_analysis)
|
|
9
|
-
@file_analysis = file_analysis
|
|
10
|
-
end
|
|
11
|
-
|
|
12
|
-
# Determine if we should skip this file analysis when processing
|
|
13
|
-
# @return [Boolean] True if we should skip processing
|
|
14
|
-
def skip?
|
|
15
|
-
!@file_analysis ||
|
|
16
|
-
!@file_analysis[:dependencies] ||
|
|
17
|
-
@file_analysis[:dependencies].empty?
|
|
18
|
-
end
|
|
19
|
-
end
|
|
20
|
-
|
|
21
|
-
def initialize(path:, related_files_info_entries:, runner:)
|
|
22
|
-
@path = path
|
|
23
|
-
@related_files_info_entries = related_files_info_entries
|
|
24
|
-
@runner = runner
|
|
25
|
-
end
|
|
26
|
-
|
|
27
|
-
# Standalone multi manifest files should *always* be treated as lockfiles,
|
|
28
|
-
# since there's no human-written manifest file to go with them.
|
|
29
|
-
def files_to_check
|
|
30
|
-
@files_to_check ||= @related_files_info_entries.each_with_object({}) do |files_info, all|
|
|
31
|
-
files_info.lockfiles.each do |file|
|
|
32
|
-
all[file] ||= 0
|
|
33
|
-
all[file] += 1
|
|
34
|
-
end
|
|
35
|
-
end
|
|
36
|
-
end
|
|
37
|
-
|
|
38
|
-
def results
|
|
39
|
-
partition_file_entries!
|
|
40
|
-
|
|
41
|
-
(no_lockfile_results + single_file_results + multiple_file_results).uniq
|
|
42
|
-
end
|
|
43
|
-
|
|
44
|
-
def no_lockfile_results
|
|
45
|
-
@no_lockfile_results ||= @related_files_info_entries.find_all { |rfi| rfi.lockfiles.empty? }
|
|
46
|
-
end
|
|
47
|
-
|
|
48
|
-
def single_file_results
|
|
49
|
-
@single_file_results ||= @single_file_entries.map do |file|
|
|
50
|
-
@related_files_info_entries.find { |rfi| rfi.lockfiles.include?(file) }
|
|
51
|
-
end
|
|
52
|
-
end
|
|
53
|
-
|
|
54
|
-
def multiple_file_results
|
|
55
|
-
return @multiple_file_results if @multiple_file_results
|
|
56
|
-
|
|
57
|
-
@multiple_file_results = []
|
|
58
|
-
|
|
59
|
-
each_analysis_and_rfis do |analysis, rfis_for_file|
|
|
60
|
-
rfis_for_file.each do |rfi|
|
|
61
|
-
file_analysis = FileAnalysis.new(
|
|
62
|
-
analysis.find { |a| a[:platform] == rfi.platform }
|
|
63
|
-
)
|
|
64
|
-
|
|
65
|
-
next if file_analysis.skip?
|
|
66
|
-
|
|
67
|
-
@multiple_file_results << rfi
|
|
68
|
-
end
|
|
69
|
-
end
|
|
70
|
-
|
|
71
|
-
@multiple_file_results
|
|
72
|
-
end
|
|
73
|
-
|
|
74
|
-
def each_analysis_and_rfis
|
|
75
|
-
@multiple_file_entries.each do |file|
|
|
76
|
-
contents = Bibliothecary.utf8_string(File.read(File.join(@path, file)))
|
|
77
|
-
analysis = @runner.analyse_file(file, contents)
|
|
78
|
-
rfis_for_file = @related_files_info_entries.find_all { |rfi| rfi.lockfiles.include?(file) }
|
|
79
|
-
|
|
80
|
-
yield analysis, rfis_for_file
|
|
81
|
-
end
|
|
82
|
-
end
|
|
83
|
-
|
|
84
|
-
def partition_file_entries!
|
|
85
|
-
@single_file_entries, @multiple_file_entries = files_to_check.partition { |_file, count| count == 1 }
|
|
86
|
-
|
|
87
|
-
@single_file_entries = @single_file_entries.map(&:first)
|
|
88
|
-
@multiple_file_entries = @multiple_file_entries.map(&:first)
|
|
89
|
-
end
|
|
90
|
-
end
|
|
91
|
-
end
|
|
92
|
-
end
|
data/lib/sdl_parser.rb
DELETED
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
# frozen_string_literal: true
|
|
2
|
-
|
|
3
|
-
require "sdl4r"
|
|
4
|
-
|
|
5
|
-
class SdlParser
|
|
6
|
-
attr_reader :contents, :type
|
|
7
|
-
|
|
8
|
-
def initialize(type, contents, platform, source = nil)
|
|
9
|
-
@contents = contents
|
|
10
|
-
@type = type || "runtime"
|
|
11
|
-
@platform = platform
|
|
12
|
-
@source = source
|
|
13
|
-
end
|
|
14
|
-
|
|
15
|
-
def dependencies
|
|
16
|
-
parse.children("dependency").inject([]) do |deps, dep|
|
|
17
|
-
deps.push(Bibliothecary::Dependency.new(
|
|
18
|
-
platform: @platform,
|
|
19
|
-
name: dep.value,
|
|
20
|
-
requirement: dep.attribute("version") || ">= 0",
|
|
21
|
-
type: type,
|
|
22
|
-
source: @source
|
|
23
|
-
))
|
|
24
|
-
end.uniq
|
|
25
|
-
end
|
|
26
|
-
|
|
27
|
-
def parse
|
|
28
|
-
SDL4R.read(contents)
|
|
29
|
-
end
|
|
30
|
-
end
|