puppet_forge 1.0.6 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/CHANGELOG.md +23 -0
- data/MAINTAINERS +13 -0
- data/README.md +48 -6
- data/lib/puppet_forge.rb +4 -0
- data/lib/puppet_forge/connection.rb +81 -0
- data/lib/puppet_forge/connection/connection_failure.rb +26 -0
- data/lib/puppet_forge/error.rb +34 -0
- data/lib/{her → puppet_forge}/lazy_accessors.rb +20 -27
- data/lib/{her → puppet_forge}/lazy_relations.rb +28 -9
- data/lib/puppet_forge/middleware/symbolify_json.rb +72 -0
- data/lib/puppet_forge/tar.rb +10 -0
- data/lib/puppet_forge/tar/mini.rb +81 -0
- data/lib/puppet_forge/unpacker.rb +68 -0
- data/lib/puppet_forge/v3.rb +11 -0
- data/lib/puppet_forge/v3/base.rb +106 -73
- data/lib/puppet_forge/v3/base/paginated_collection.rb +23 -14
- data/lib/puppet_forge/v3/metadata.rb +197 -0
- data/lib/puppet_forge/v3/module.rb +2 -1
- data/lib/puppet_forge/v3/release.rb +33 -8
- data/lib/puppet_forge/v3/user.rb +2 -0
- data/lib/puppet_forge/version.rb +1 -1
- data/puppet_forge.gemspec +6 -3
- data/spec/fixtures/v3/modules/puppetlabs-apache.json +21 -1
- data/spec/fixtures/v3/releases/puppetlabs-apache-0.0.1.json +4 -1
- data/spec/integration/forge/v3/module_spec.rb +79 -0
- data/spec/integration/forge/v3/release_spec.rb +75 -0
- data/spec/integration/forge/v3/user_spec.rb +70 -0
- data/spec/spec_helper.rb +15 -8
- data/spec/unit/forge/connection/connection_failure_spec.rb +30 -0
- data/spec/unit/forge/connection_spec.rb +53 -0
- data/spec/unit/{her → forge}/lazy_accessors_spec.rb +20 -13
- data/spec/unit/{her → forge}/lazy_relations_spec.rb +60 -46
- data/spec/unit/forge/middleware/symbolify_json_spec.rb +63 -0
- data/spec/unit/forge/tar/mini_spec.rb +85 -0
- data/spec/unit/forge/tar_spec.rb +9 -0
- data/spec/unit/forge/unpacker_spec.rb +58 -0
- data/spec/unit/forge/v3/base/paginated_collection_spec.rb +68 -46
- data/spec/unit/forge/v3/base_spec.rb +1 -1
- data/spec/unit/forge/v3/metadata_spec.rb +300 -0
- data/spec/unit/forge/v3/module_spec.rb +14 -36
- data/spec/unit/forge/v3/release_spec.rb +9 -30
- data/spec/unit/forge/v3/user_spec.rb +7 -7
- metadata +127 -41
- checksums.yaml +0 -7
- data/lib/puppet_forge/middleware/json_for_her.rb +0 -37
@@ -0,0 +1,63 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe PuppetForge::Middleware::SymbolifyJson do
|
4
|
+
let(:basic_array) { [1, "two", 3] }
|
5
|
+
let(:basic_hash) { { "id" => 1, "data" => "x" } }
|
6
|
+
let(:symbolified_hash) { { :id => 1, :data => "x" } }
|
7
|
+
let(:internal_hash) { { :id => 2, :data => basic_hash } }
|
8
|
+
|
9
|
+
let(:hash_with_array) { { "id" => 3, "data" => basic_array } }
|
10
|
+
let(:array_with_hash) { [1, "two", basic_hash] }
|
11
|
+
|
12
|
+
let(:complex_array) { [array_with_hash, hash_with_array] }
|
13
|
+
let(:complex_hash) { { "id" => 4, "data" => [complex_array, basic_array], "more_data" => hash_with_array } }
|
14
|
+
let(:complex_request) { { "id" => 5, "data" => complex_hash } }
|
15
|
+
|
16
|
+
let(:middleware) { described_class.new() }
|
17
|
+
|
18
|
+
context "#process_array" do
|
19
|
+
it "doesn't change an array with no array or hash inside" do
|
20
|
+
processed_array = middleware.process_array(basic_array)
|
21
|
+
expect(processed_array).to eql( [1, "two", 3] )
|
22
|
+
end
|
23
|
+
|
24
|
+
it "changes all keys of a hash inside the array" do
|
25
|
+
processed_array = middleware.process_array(array_with_hash)
|
26
|
+
expect(processed_array).to eql( [ 1, "two", { :id => 1, :data => "x" } ] )
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
context "#process_hash" do
|
31
|
+
it "changes all keys that respond to :to_sym into Symbols and doesn't change values." do
|
32
|
+
processed_hash = middleware.process_hash(basic_hash)
|
33
|
+
expect(processed_hash).to eql( { :id => 1, :data => "x" } )
|
34
|
+
end
|
35
|
+
|
36
|
+
it "doesn't change keys that don't respond to :to_sym" do
|
37
|
+
processed_hash = middleware.process_hash(basic_hash.merge({ 1 => 2 }))
|
38
|
+
expect(processed_hash).to eql( { :id => 1, :data => "x", 1 => 2 } )
|
39
|
+
end
|
40
|
+
|
41
|
+
it "can process a hash that is already symbolified" do
|
42
|
+
processed_hash = middleware.process_hash(symbolified_hash)
|
43
|
+
expect(processed_hash).to eql( { :id => 1, :data => "x" })
|
44
|
+
end
|
45
|
+
|
46
|
+
it "can process a hash with a hash inside of it" do
|
47
|
+
processed_hash = middleware.process_hash(internal_hash)
|
48
|
+
expect(processed_hash).to eql( {:id => 2, :data => { :id => 1, :data => "x" } })
|
49
|
+
end
|
50
|
+
|
51
|
+
it "can process a hash with an array inside of it" do
|
52
|
+
processed_hash = middleware.process_hash(hash_with_array)
|
53
|
+
expect(processed_hash).to eql( { :id => 3, :data => [1, "two", 3] } )
|
54
|
+
end
|
55
|
+
|
56
|
+
it "can handle extensively nested arrays and hashes" do
|
57
|
+
processed_hash = middleware.process_hash(complex_request)
|
58
|
+
expect(processed_hash).to eql( { :id => 5, :data => { :id => 4 , :data=>[ [ [1, "two", { :id => 1, :data => "x" } ], { :id=>3, :data => [1, "two", 3] } ], [1, "two", 3] ], :more_data => { :id => 3, :data => [1, "two", 3] } } } )
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
end
|
63
|
+
|
@@ -0,0 +1,85 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe PuppetForge::Tar::Mini do
|
4
|
+
let(:entry_class) do
|
5
|
+
Class.new do
|
6
|
+
attr_accessor :typeflag, :name
|
7
|
+
def initialize(name, typeflag)
|
8
|
+
@name = name
|
9
|
+
@typeflag = typeflag
|
10
|
+
end
|
11
|
+
end
|
12
|
+
end
|
13
|
+
let(:sourcefile) { '/the/module.tar.gz' }
|
14
|
+
let(:destdir) { File.expand_path '/the/dest/dir' }
|
15
|
+
let(:sourcedir) { '/the/src/dir' }
|
16
|
+
let(:destfile) { '/the/dest/file.tar.gz' }
|
17
|
+
let(:minitar) { described_class.new }
|
18
|
+
let(:tarfile_contents) { [entry_class.new('file', '0'), \
|
19
|
+
entry_class.new('symlink', '2'), \
|
20
|
+
entry_class.new('invalid', 'F')] }
|
21
|
+
|
22
|
+
it "unpacks a tar file" do
|
23
|
+
unpacks_the_entry(:file_start, 'thefile')
|
24
|
+
|
25
|
+
minitar.unpack(sourcefile, destdir)
|
26
|
+
end
|
27
|
+
|
28
|
+
it "does not allow an absolute path" do
|
29
|
+
unpacks_the_entry(:file_start, '/thefile')
|
30
|
+
|
31
|
+
expect {
|
32
|
+
minitar.unpack(sourcefile, destdir)
|
33
|
+
}.to raise_error(PuppetForge::InvalidPathInPackageError,
|
34
|
+
"Attempt to install file into \"/thefile\" under \"#{destdir}\"")
|
35
|
+
end
|
36
|
+
|
37
|
+
it "does not allow a file to be written outside the destination directory" do
|
38
|
+
unpacks_the_entry(:file_start, '../../thefile')
|
39
|
+
|
40
|
+
expect {
|
41
|
+
minitar.unpack(sourcefile, destdir)
|
42
|
+
}.to raise_error(PuppetForge::InvalidPathInPackageError,
|
43
|
+
"Attempt to install file into \"#{File.expand_path('/the/thefile')}\" under \"#{destdir}\"")
|
44
|
+
end
|
45
|
+
|
46
|
+
it "does not allow a directory to be written outside the destination directory" do
|
47
|
+
unpacks_the_entry(:dir, '../../thedir')
|
48
|
+
|
49
|
+
expect {
|
50
|
+
minitar.unpack(sourcefile, destdir)
|
51
|
+
}.to raise_error(PuppetForge::InvalidPathInPackageError,
|
52
|
+
"Attempt to install file into \"#{File.expand_path('/the/thedir')}\" under \"#{destdir}\"")
|
53
|
+
end
|
54
|
+
|
55
|
+
it "packs a tar file" do
|
56
|
+
writer = double('GzipWriter')
|
57
|
+
|
58
|
+
expect(Zlib::GzipWriter).to receive(:open).with(destfile).and_yield(writer)
|
59
|
+
expect(Archive::Tar::Minitar).to receive(:pack).with(sourcedir, writer)
|
60
|
+
|
61
|
+
minitar.pack(sourcedir, destfile)
|
62
|
+
end
|
63
|
+
|
64
|
+
it "returns filenames in a tar separated into correct categories" do
|
65
|
+
reader = double('GzipReader')
|
66
|
+
|
67
|
+
expect(Zlib::GzipReader).to receive(:open).with(sourcefile).and_yield(reader)
|
68
|
+
expect(Archive::Tar::Minitar).to receive(:open).with(reader).and_return(tarfile_contents)
|
69
|
+
expect(Archive::Tar::Minitar).to receive(:unpack).with(reader, destdir, ['file']).and_yield(:file_start, 'thefile', nil)
|
70
|
+
|
71
|
+
file_lists = minitar.unpack(sourcefile, destdir)
|
72
|
+
|
73
|
+
expect(file_lists[:valid]).to eq(['file'])
|
74
|
+
expect(file_lists[:invalid]).to eq(['invalid'])
|
75
|
+
expect(file_lists[:symlinks]).to eq(['symlink'])
|
76
|
+
end
|
77
|
+
|
78
|
+
def unpacks_the_entry(type, name)
|
79
|
+
reader = double('GzipReader')
|
80
|
+
|
81
|
+
expect(Zlib::GzipReader).to receive(:open).with(sourcefile).and_yield(reader)
|
82
|
+
expect(minitar).to receive(:validate_files).with(reader).and_return({:valid => [name]})
|
83
|
+
expect(Archive::Tar::Minitar).to receive(:unpack).with(reader, destdir, [name]).and_yield(type, name, nil)
|
84
|
+
end
|
85
|
+
end
|
@@ -0,0 +1,58 @@
|
|
1
|
+
require 'tmpdir'
|
2
|
+
require 'spec_helper'
|
3
|
+
|
4
|
+
describe PuppetForge::Unpacker do
|
5
|
+
|
6
|
+
let(:source) { Dir.mktmpdir("source") }
|
7
|
+
let(:target) { Dir.mktmpdir("unpacker") }
|
8
|
+
let(:module_name) { 'myusername-mytarball' }
|
9
|
+
let(:filename) { Dir.mktmpdir("module") + "/module.tar.gz" }
|
10
|
+
let(:working_dir) { Dir.mktmpdir("working_dir") }
|
11
|
+
let(:trash_dir) { Dir.mktmpdir("trash_dir") }
|
12
|
+
|
13
|
+
it "attempts to untar file to temporary location" do
|
14
|
+
|
15
|
+
minitar = double('PuppetForge::Tar::Mini')
|
16
|
+
|
17
|
+
expect(minitar).to receive(:unpack).with(filename, anything()) do |src, dest|
|
18
|
+
FileUtils.mkdir(File.join(dest, 'extractedmodule'))
|
19
|
+
File.open(File.join(dest, 'extractedmodule', 'metadata.json'), 'w+') do |file|
|
20
|
+
file.puts JSON.generate('name' => module_name, 'version' => '1.0.0')
|
21
|
+
end
|
22
|
+
true
|
23
|
+
end
|
24
|
+
|
25
|
+
expect(PuppetForge::Tar).to receive(:instance).and_return(minitar)
|
26
|
+
PuppetForge::Unpacker.unpack(filename, target, trash_dir)
|
27
|
+
expect(File).to be_directory(target)
|
28
|
+
end
|
29
|
+
|
30
|
+
it "returns the appropriate categories of the contents of the tar file from the tar implementation" do
|
31
|
+
|
32
|
+
minitar = double('PuppetForge::Tar::Mini')
|
33
|
+
|
34
|
+
expect(minitar).to receive(:unpack).with(filename, anything()) do |src, dest|
|
35
|
+
FileUtils.mkdir(File.join(dest, 'extractedmodule'))
|
36
|
+
File.open(File.join(dest, 'extractedmodule', 'metadata.json'), 'w+') do |file|
|
37
|
+
file.puts JSON.generate('name' => module_name, 'version' => '1.0.0')
|
38
|
+
end
|
39
|
+
{ :valid => [File.join('extractedmodule', 'metadata.json')], :invalid => [], :symlinks => [] }
|
40
|
+
end
|
41
|
+
|
42
|
+
expect(PuppetForge::Tar).to receive(:instance).and_return(minitar)
|
43
|
+
file_lists = PuppetForge::Unpacker.unpack(filename, target, trash_dir)
|
44
|
+
expect(file_lists).to eq({:valid=>["extractedmodule/metadata.json"], :invalid=>[], :symlinks=>[]})
|
45
|
+
expect(File).to be_directory(target)
|
46
|
+
end
|
47
|
+
|
48
|
+
it "attempts to set the ownership of a target dir to a source dir's owner" do
|
49
|
+
|
50
|
+
source_path = Pathname.new(source)
|
51
|
+
target_path = Pathname.new(target)
|
52
|
+
|
53
|
+
expect(FileUtils).to receive(:chown_R).with(source_path.stat.uid, source_path.stat.gid, target_path)
|
54
|
+
|
55
|
+
PuppetForge::Unpacker.harmonize_ownership(source_path, target_path)
|
56
|
+
end
|
57
|
+
|
58
|
+
end
|
@@ -2,53 +2,65 @@ require 'spec_helper'
|
|
2
2
|
|
3
3
|
describe PuppetForge::V3::Base::PaginatedCollection do
|
4
4
|
let(:klass) do
|
5
|
-
|
6
|
-
|
7
|
-
data
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
PuppetForge::V3::Base::PaginatedCollection.new(self, data[url], meta[url], {})
|
44
|
-
end
|
5
|
+
allow(PuppetForge::V3::Base).to receive(:get_collection) do |url|
|
6
|
+
data = {
|
7
|
+
'/v3/collection' => [ { :data => :A }, { :data => :B }, { :data => :C } ],
|
8
|
+
'/v3/collection?page=2' => [ { :data => :D }, { :data => :E }, { :data => :F } ],
|
9
|
+
'/v3/collection?page=3' => [ { :data => :G }, { :data => :H } ],
|
10
|
+
}
|
11
|
+
|
12
|
+
meta = {
|
13
|
+
'/v3/collection' => {
|
14
|
+
:limit => 3,
|
15
|
+
:offset => 0,
|
16
|
+
:first => '/v3/collection',
|
17
|
+
:previous => nil,
|
18
|
+
:current => '/v3/collection',
|
19
|
+
:next => '/v3/collection?page=2',
|
20
|
+
:total => 8,
|
21
|
+
},
|
22
|
+
'/v3/collection?page=2' => {
|
23
|
+
:limit => 3,
|
24
|
+
:offset => 0,
|
25
|
+
:first => '/v3/collection',
|
26
|
+
:previous => '/v3/collection',
|
27
|
+
:current => '/v3/collection?page=2',
|
28
|
+
:next => '/v3/collection?page=3',
|
29
|
+
:total => 8,
|
30
|
+
},
|
31
|
+
'/v3/collection?page=3' => {
|
32
|
+
:limit => 3,
|
33
|
+
:offset => 0,
|
34
|
+
:first => '/v3/collection',
|
35
|
+
:previous => '/v3/collection?page=2',
|
36
|
+
:current => '/v3/collection?page=3',
|
37
|
+
:next => nil,
|
38
|
+
:total => 8,
|
39
|
+
},
|
40
|
+
}
|
41
|
+
|
42
|
+
PuppetForge::V3::Base::PaginatedCollection.new(PuppetForge::V3::Base, data[url], meta[url], {})
|
45
43
|
end
|
44
|
+
|
45
|
+
PuppetForge::V3::Base
|
46
46
|
end
|
47
47
|
|
48
48
|
subject { klass.get_collection('/v3/collection') }
|
49
49
|
|
50
|
+
def collect_data(paginated)
|
51
|
+
paginated.to_a.collect do |x|
|
52
|
+
x.data
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
it '#all returns self for backwards compatibility.' do
|
57
|
+
paginated = subject.all
|
58
|
+
|
59
|
+
expect(paginated).to eq(subject)
|
60
|
+
end
|
61
|
+
|
50
62
|
it 'maps to a single page of the collection' do
|
51
|
-
expect(subject
|
63
|
+
expect(collect_data(subject)).to eql([ :A, :B, :C ])
|
52
64
|
end
|
53
65
|
|
54
66
|
it 'knows the size of the entire collection' do
|
@@ -61,12 +73,12 @@ describe PuppetForge::V3::Base::PaginatedCollection do
|
|
61
73
|
|
62
74
|
it 'enables page navigation' do
|
63
75
|
expect(subject.next).to_not be_empty
|
64
|
-
expect(subject.next
|
65
|
-
expect(subject.next.previous
|
76
|
+
expect(collect_data(subject.next)).to_not eql(collect_data(subject))
|
77
|
+
expect(collect_data(subject.next.previous)).to eql(collect_data(subject))
|
66
78
|
end
|
67
79
|
|
68
80
|
it 'exposes the pagination metadata' do
|
69
|
-
expect(subject.
|
81
|
+
expect(subject.limit).to be subject.size
|
70
82
|
end
|
71
83
|
|
72
84
|
it 'exposes previous_url and next_url' do
|
@@ -77,12 +89,22 @@ describe PuppetForge::V3::Base::PaginatedCollection do
|
|
77
89
|
describe '#unpaginated' do
|
78
90
|
it 'provides an iterator over the entire collection' do
|
79
91
|
expected = [ :A, :B, :C, :D, :E, :F, :G, :H ]
|
80
|
-
|
92
|
+
actual = subject.unpaginated.to_a.collect do |x|
|
93
|
+
expect(x).to be_a(klass)
|
94
|
+
x.data
|
95
|
+
end
|
96
|
+
|
97
|
+
expect(actual).to eql(expected)
|
81
98
|
end
|
82
99
|
|
83
100
|
it "provides a full iterator regardless of which page it's started on" do
|
84
101
|
expected = [ :A, :B, :C, :D, :E, :F, :G, :H ]
|
85
|
-
|
102
|
+
|
103
|
+
actual = subject.next.next.unpaginated.to_a.collect do |x|
|
104
|
+
expect(x).to be_a(klass)
|
105
|
+
x.data
|
106
|
+
end
|
107
|
+
expect(actual).to eql(expected)
|
86
108
|
end
|
87
109
|
end
|
88
110
|
end
|
@@ -13,7 +13,7 @@ describe PuppetForge::V3::Base do
|
|
13
13
|
|
14
14
|
collection = PuppetForge::V3::Base.new_collection(response_data)
|
15
15
|
|
16
|
-
expect(collection.limit).to eq(
|
16
|
+
expect(collection.limit).to eq(20)
|
17
17
|
expect(collection.offset).to eq(0)
|
18
18
|
expect(collection.total).to eq(0)
|
19
19
|
end
|
@@ -0,0 +1,300 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe PuppetForge::Metadata do
|
4
|
+
let(:data) { {} }
|
5
|
+
let(:metadata) { PuppetForge::Metadata.new }
|
6
|
+
|
7
|
+
describe 'property lookups' do
|
8
|
+
subject { metadata }
|
9
|
+
|
10
|
+
%w[ name version author summary license source project_page issues_url
|
11
|
+
dependencies dashed_name release_name description ].each do |prop|
|
12
|
+
describe "##{prop}" do
|
13
|
+
it "responds to the property" do
|
14
|
+
subject.send(prop)
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
describe "#update" do
|
21
|
+
subject { metadata.update(data) }
|
22
|
+
|
23
|
+
context "with a valid name" do
|
24
|
+
let(:data) { { 'name' => 'billgates-mymodule' } }
|
25
|
+
|
26
|
+
it "extracts the author name from the name field" do
|
27
|
+
expect(subject.to_hash['author']).to eq('billgates')
|
28
|
+
end
|
29
|
+
|
30
|
+
it "extracts a module name from the name field" do
|
31
|
+
expect(subject.module_name).to eq('mymodule')
|
32
|
+
end
|
33
|
+
|
34
|
+
context "and existing author" do
|
35
|
+
before { metadata.update('author' => 'foo') }
|
36
|
+
|
37
|
+
it "avoids overwriting the existing author" do
|
38
|
+
expect(subject.to_hash['author']).to eq('foo')
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
context "with a valid name and author" do
|
44
|
+
let(:data) { { 'name' => 'billgates-mymodule', 'author' => 'foo' } }
|
45
|
+
|
46
|
+
it "use the author name from the author field" do
|
47
|
+
expect(subject.to_hash['author']).to eq('foo')
|
48
|
+
end
|
49
|
+
|
50
|
+
context "and preexisting author" do
|
51
|
+
before { metadata.update('author' => 'bar') }
|
52
|
+
|
53
|
+
it "avoids overwriting the existing author" do
|
54
|
+
expect(subject.to_hash['author']).to eq('foo')
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
context "with an invalid name" do
|
60
|
+
context "(short module name)" do
|
61
|
+
let(:data) { { 'name' => 'mymodule' } }
|
62
|
+
|
63
|
+
it "raises an exception" do
|
64
|
+
expect { subject }.to raise_error(ArgumentError, "Invalid 'name' field in metadata.json: the field must be a namespaced module name")
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
context "(missing namespace)" do
|
69
|
+
let(:data) { { 'name' => '/mymodule' } }
|
70
|
+
|
71
|
+
it "raises an exception" do
|
72
|
+
expect { subject }.to raise_error(ArgumentError, "Invalid 'name' field in metadata.json: the field must be a namespaced module name")
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
context "(missing module name)" do
|
77
|
+
let(:data) { { 'name' => 'namespace/' } }
|
78
|
+
|
79
|
+
it "raises an exception" do
|
80
|
+
expect { subject }.to raise_error(ArgumentError, "Invalid 'name' field in metadata.json: the field must be a namespaced module name")
|
81
|
+
end
|
82
|
+
end
|
83
|
+
|
84
|
+
context "(invalid namespace)" do
|
85
|
+
let(:data) { { 'name' => "dolla'bill$-mymodule" } }
|
86
|
+
|
87
|
+
it "raises an exception" do
|
88
|
+
expect { subject }.to raise_error(ArgumentError, "Invalid 'name' field in metadata.json: the namespace contains non-alphanumeric characters")
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
context "(non-alphanumeric module name)" do
|
93
|
+
let(:data) { { 'name' => "dollabils-fivedolla'" } }
|
94
|
+
|
95
|
+
it "raises an exception" do
|
96
|
+
expect { subject }.to raise_error(ArgumentError, "Invalid 'name' field in metadata.json: the module name contains non-alphanumeric (or underscore) characters")
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
100
|
+
context "(module name starts with a number)" do
|
101
|
+
let(:data) { { 'name' => "dollabills-5dollars" } }
|
102
|
+
|
103
|
+
it "raises an exception" do
|
104
|
+
expect { subject }.to raise_error(ArgumentError, "Invalid 'name' field in metadata.json: the module name must begin with a letter")
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
108
|
+
|
109
|
+
|
110
|
+
context "with an invalid version" do
|
111
|
+
let(:data) { { 'version' => '3.0' } }
|
112
|
+
|
113
|
+
it "raises an exception" do
|
114
|
+
expect { subject }.to raise_error(ArgumentError, "Invalid 'version' field in metadata.json: version string cannot be parsed as a valid Semantic Version")
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
118
|
+
context "with a valid source" do
|
119
|
+
context "which is a GitHub URL" do
|
120
|
+
context "with a scheme" do
|
121
|
+
before { metadata.update('source' => 'https://github.com/billgates/amazingness') }
|
122
|
+
|
123
|
+
it "predicts a default project_page" do
|
124
|
+
expect(subject.to_hash['project_page']).to eq('https://github.com/billgates/amazingness')
|
125
|
+
end
|
126
|
+
|
127
|
+
it "predicts a default issues_url" do
|
128
|
+
expect(subject.to_hash['issues_url']).to eq('https://github.com/billgates/amazingness/issues')
|
129
|
+
end
|
130
|
+
end
|
131
|
+
|
132
|
+
context "without a scheme" do
|
133
|
+
before { metadata.update('source' => 'github.com/billgates/amazingness') }
|
134
|
+
|
135
|
+
it "predicts a default project_page" do
|
136
|
+
expect(subject.to_hash['project_page']).to eq('https://github.com/billgates/amazingness')
|
137
|
+
end
|
138
|
+
|
139
|
+
it "predicts a default issues_url" do
|
140
|
+
expect(subject.to_hash['issues_url']).to eq('https://github.com/billgates/amazingness/issues')
|
141
|
+
end
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
145
|
+
context "which is not a GitHub URL" do
|
146
|
+
before { metadata.update('source' => 'https://notgithub.com/billgates/amazingness') }
|
147
|
+
|
148
|
+
it "does not predict a default project_page" do
|
149
|
+
expect(subject.to_hash['project_page']).to be nil
|
150
|
+
end
|
151
|
+
|
152
|
+
it "does not predict a default issues_url" do
|
153
|
+
expect(subject.to_hash['issues_url']).to be nil
|
154
|
+
end
|
155
|
+
end
|
156
|
+
|
157
|
+
context "which is not a URL" do
|
158
|
+
before { metadata.update('source' => 'my brain') }
|
159
|
+
|
160
|
+
it "does not predict a default project_page" do
|
161
|
+
expect(subject.to_hash['project_page']).to be nil
|
162
|
+
end
|
163
|
+
|
164
|
+
it "does not predict a default issues_url" do
|
165
|
+
expect(subject.to_hash['issues_url']).to be nil
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
end
|
170
|
+
|
171
|
+
context "with a valid dependency", :pending => "dependency resolution is not yet in scope" do
|
172
|
+
let(:data) { {'dependencies' => [{'name' => 'puppetlabs-goodmodule'}] }}
|
173
|
+
|
174
|
+
it "adds the dependency" do
|
175
|
+
expect(subject.dependencies.size).to eq(1)
|
176
|
+
end
|
177
|
+
end
|
178
|
+
|
179
|
+
context "with a invalid dependency name" do
|
180
|
+
let(:data) { {'dependencies' => [{'name' => 'puppetlabsbadmodule'}] }}
|
181
|
+
|
182
|
+
it "raises an exception" do
|
183
|
+
expect { subject }.to raise_error(ArgumentError)
|
184
|
+
end
|
185
|
+
end
|
186
|
+
|
187
|
+
context "with a valid dependency version range", :pending => "dependency resolution is not yet in scope" do
|
188
|
+
let(:data) { {'dependencies' => [{'name' => 'puppetlabs-badmodule', 'version_requirement' => '>= 2.0.0'}] }}
|
189
|
+
|
190
|
+
it "adds the dependency" do
|
191
|
+
expect(subject.dependencies.size).to eq(1)
|
192
|
+
end
|
193
|
+
end
|
194
|
+
|
195
|
+
context "with a invalid version range" do
|
196
|
+
let(:data) { {'dependencies' => [{'name' => 'puppetlabsbadmodule', 'version_requirement' => '>= banana'}] }}
|
197
|
+
|
198
|
+
it "raises an exception" do
|
199
|
+
expect { subject }.to raise_error(ArgumentError)
|
200
|
+
end
|
201
|
+
end
|
202
|
+
|
203
|
+
context "with duplicate dependencies", :pending => "dependency resolution is not yet in scope" do
|
204
|
+
let(:data) { {'dependencies' => [{'name' => 'puppetlabs-dupmodule', 'version_requirement' => '1.0.0'},
|
205
|
+
{'name' => 'puppetlabs-dupmodule', 'version_requirement' => '0.0.1'}] }
|
206
|
+
}
|
207
|
+
|
208
|
+
it "raises an exception" do
|
209
|
+
expect { subject }.to raise_error(ArgumentError)
|
210
|
+
end
|
211
|
+
end
|
212
|
+
|
213
|
+
context "adding a duplicate dependency", :pending => "dependency resolution is not yet in scope" do
|
214
|
+
let(:data) { {'dependencies' => [{'name' => 'puppetlabs-origmodule', 'version_requirement' => '1.0.0'}] }}
|
215
|
+
|
216
|
+
it "with a different version raises an exception" do
|
217
|
+
metadata.add_dependency('puppetlabs-origmodule', '>= 0.0.1')
|
218
|
+
expect { subject }.to raise_error(ArgumentError)
|
219
|
+
end
|
220
|
+
|
221
|
+
it "with the same version does not add another dependency" do
|
222
|
+
metadata.add_dependency('puppetlabs-origmodule', '1.0.0')
|
223
|
+
expect(subject.dependencies.size).to eq(1)
|
224
|
+
end
|
225
|
+
end
|
226
|
+
end
|
227
|
+
|
228
|
+
describe '#dashed_name' do
|
229
|
+
it 'returns nil in the absence of a module name' do
|
230
|
+
expect(metadata.update('version' => '1.0.0').release_name).to be_nil
|
231
|
+
end
|
232
|
+
|
233
|
+
it 'returns a hyphenated string containing namespace and module name' do
|
234
|
+
data = metadata.update('name' => 'foo-bar')
|
235
|
+
expect(data.dashed_name).to eq('foo-bar')
|
236
|
+
end
|
237
|
+
|
238
|
+
it 'properly handles slash-separated names' do
|
239
|
+
data = metadata.update('name' => 'foo/bar')
|
240
|
+
expect(data.dashed_name).to eq('foo-bar')
|
241
|
+
end
|
242
|
+
|
243
|
+
it 'is unaffected by author name' do
|
244
|
+
data = metadata.update('name' => 'foo/bar', 'author' => 'me')
|
245
|
+
expect(data.dashed_name).to eq('foo-bar')
|
246
|
+
end
|
247
|
+
end
|
248
|
+
|
249
|
+
describe '#release_name' do
|
250
|
+
it 'returns nil in the absence of a module name' do
|
251
|
+
expect(metadata.update('version' => '1.0.0').release_name).to be_nil
|
252
|
+
end
|
253
|
+
|
254
|
+
it 'returns nil in the absence of a version' do
|
255
|
+
expect(metadata.update('name' => 'foo/bar').release_name).to be_nil
|
256
|
+
end
|
257
|
+
|
258
|
+
it 'returns a hyphenated string containing module name and version' do
|
259
|
+
data = metadata.update('name' => 'foo/bar', 'version' => '1.0.0')
|
260
|
+
expect(data.release_name).to eq('foo-bar-1.0.0')
|
261
|
+
end
|
262
|
+
|
263
|
+
it 'is unaffected by author name' do
|
264
|
+
data = metadata.update('name' => 'foo/bar', 'version' => '1.0.0', 'author' => 'me')
|
265
|
+
expect(data.release_name).to eq('foo-bar-1.0.0')
|
266
|
+
end
|
267
|
+
end
|
268
|
+
|
269
|
+
describe "#to_hash" do
|
270
|
+
subject { metadata.to_hash }
|
271
|
+
|
272
|
+
it "contains the default set of keys" do
|
273
|
+
expect(subject.keys.sort).to eq(%w[ name version author summary license source issues_url project_page dependencies ].sort)
|
274
|
+
end
|
275
|
+
|
276
|
+
describe "['license']" do
|
277
|
+
it "defaults to Apache 2" do
|
278
|
+
expect(subject['license']).to eq("Apache-2.0")
|
279
|
+
end
|
280
|
+
end
|
281
|
+
|
282
|
+
describe "['dependencies']" do
|
283
|
+
it "defaults to an empty set" do
|
284
|
+
expect(subject['dependencies']).to eq(Set.new)
|
285
|
+
end
|
286
|
+
end
|
287
|
+
|
288
|
+
context "when updated with non-default data" do
|
289
|
+
subject { metadata.update('license' => 'MIT', 'non-standard' => 'yup').to_hash }
|
290
|
+
|
291
|
+
it "overrides the defaults" do
|
292
|
+
expect(subject['license']).to eq('MIT')
|
293
|
+
end
|
294
|
+
|
295
|
+
it 'contains unanticipated values' do
|
296
|
+
expect(subject['non-standard']).to eq('yup')
|
297
|
+
end
|
298
|
+
end
|
299
|
+
end
|
300
|
+
end
|