briard 2.4.2 → 2.6.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (81) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/codeql-analysis.yml +72 -0
  3. data/.github/workflows/rubocop.yml +50 -0
  4. data/.rubocop.yml +144 -620
  5. data/.rubocop_todo.yml +76 -0
  6. data/CHANGELOG.md +18 -0
  7. data/Gemfile +2 -0
  8. data/Gemfile.lock +40 -6
  9. data/Rakefile +1 -1
  10. data/{bolognese.gemspec → briard.gemspec} +46 -39
  11. data/lib/briard/array.rb +2 -2
  12. data/lib/briard/author_utils.rb +79 -71
  13. data/lib/briard/cli.rb +12 -13
  14. data/lib/briard/crossref_utils.rb +73 -61
  15. data/lib/briard/datacite_utils.rb +132 -106
  16. data/lib/briard/doi_utils.rb +10 -10
  17. data/lib/briard/metadata.rb +96 -106
  18. data/lib/briard/metadata_utils.rb +87 -78
  19. data/lib/briard/readers/bibtex_reader.rb +65 -65
  20. data/lib/briard/readers/cff_reader.rb +88 -70
  21. data/lib/briard/readers/citeproc_reader.rb +90 -84
  22. data/lib/briard/readers/codemeta_reader.rb +68 -50
  23. data/lib/briard/readers/crosscite_reader.rb +2 -2
  24. data/lib/briard/readers/crossref_reader.rb +249 -210
  25. data/lib/briard/readers/datacite_json_reader.rb +3 -3
  26. data/lib/briard/readers/datacite_reader.rb +225 -189
  27. data/lib/briard/readers/npm_reader.rb +49 -42
  28. data/lib/briard/readers/ris_reader.rb +82 -80
  29. data/lib/briard/readers/schema_org_reader.rb +182 -159
  30. data/lib/briard/string.rb +1 -1
  31. data/lib/briard/utils.rb +4 -4
  32. data/lib/briard/version.rb +3 -1
  33. data/lib/briard/whitelist_scrubber.rb +11 -4
  34. data/lib/briard/writers/bibtex_writer.rb +14 -8
  35. data/lib/briard/writers/cff_writer.rb +33 -26
  36. data/lib/briard/writers/codemeta_writer.rb +19 -15
  37. data/lib/briard/writers/csv_writer.rb +6 -4
  38. data/lib/briard/writers/datacite_json_writer.rb +8 -2
  39. data/lib/briard/writers/jats_writer.rb +33 -28
  40. data/lib/briard/writers/rdf_xml_writer.rb +1 -1
  41. data/lib/briard/writers/ris_writer.rb +30 -18
  42. data/lib/briard/writers/turtle_writer.rb +1 -1
  43. data/lib/briard.rb +6 -6
  44. data/rubocop.sarif +0 -0
  45. data/spec/array_spec.rb +5 -5
  46. data/spec/author_utils_spec.rb +151 -132
  47. data/spec/datacite_utils_spec.rb +135 -83
  48. data/spec/doi_utils_spec.rb +168 -164
  49. data/spec/find_from_format_spec.rb +69 -69
  50. data/spec/fixtures/vcr_cassettes/Briard_Metadata/sanitize/onlies_keep_specific_tags.yml +65 -0
  51. data/spec/fixtures/vcr_cassettes/Briard_Metadata/sanitize/removes_a_tags.yml +65 -0
  52. data/spec/metadata_spec.rb +91 -90
  53. data/spec/readers/bibtex_reader_spec.rb +43 -38
  54. data/spec/readers/cff_reader_spec.rb +165 -153
  55. data/spec/readers/citeproc_reader_spec.rb +45 -40
  56. data/spec/readers/codemeta_reader_spec.rb +128 -115
  57. data/spec/readers/crosscite_reader_spec.rb +34 -24
  58. data/spec/readers/crossref_reader_spec.rb +1098 -939
  59. data/spec/readers/datacite_json_reader_spec.rb +53 -40
  60. data/spec/readers/datacite_reader_spec.rb +1541 -1337
  61. data/spec/readers/npm_reader_spec.rb +48 -43
  62. data/spec/readers/ris_reader_spec.rb +53 -47
  63. data/spec/readers/schema_org_reader_spec.rb +329 -267
  64. data/spec/spec_helper.rb +6 -5
  65. data/spec/utils_spec.rb +371 -347
  66. data/spec/writers/bibtex_writer_spec.rb +143 -143
  67. data/spec/writers/cff_writer_spec.rb +96 -90
  68. data/spec/writers/citation_writer_spec.rb +34 -33
  69. data/spec/writers/citeproc_writer_spec.rb +226 -224
  70. data/spec/writers/codemeta_writer_spec.rb +18 -16
  71. data/spec/writers/crosscite_writer_spec.rb +91 -73
  72. data/spec/writers/crossref_writer_spec.rb +99 -91
  73. data/spec/writers/csv_writer_spec.rb +70 -70
  74. data/spec/writers/datacite_json_writer_spec.rb +78 -68
  75. data/spec/writers/datacite_writer_spec.rb +417 -322
  76. data/spec/writers/jats_writer_spec.rb +177 -161
  77. data/spec/writers/rdf_xml_writer_spec.rb +68 -63
  78. data/spec/writers/ris_writer_spec.rb +162 -162
  79. data/spec/writers/turtle_writer_spec.rb +47 -47
  80. metadata +242 -166
  81. data/.github/workflows/release.yml +0 -47
@@ -7,48 +7,55 @@ module Briard
7
7
  return nil unless valid? || show_errors
8
8
 
9
9
  # only use CFF for software
10
- return nil unless %w(Software Collection).include?(types["resourceTypeGeneral"])
10
+ return nil unless %w[Software Collection].include?(types['resourceTypeGeneral'])
11
+
12
+ title = parse_attributes(titles, content: 'title', first: true)
11
13
 
12
- title = parse_attributes(titles, content: "title", first: true)
13
-
14
14
  hsh = {
15
- "cff-version" => "1.2.0",
16
- "message" => "If you use #{title} in your work, please cite it using the following metadata",
17
- "doi" => normalize_doi(doi),
18
- "repository-code" => url,
19
- "title" => parse_attributes(titles, content: "title", first: true),
20
- "authors" => write_cff_creators(creators),
21
- "abstract" => parse_attributes(descriptions, content: "description", first: true),
22
- "version" => version_info,
23
- "keywords" => subjects.present? ? Array.wrap(subjects).map { |k| parse_attributes(k, content: "subject", first: true) } : nil,
24
- "date-released" => get_date(dates, "Issued") || publication_year,
25
- "license" => Array.wrap(rights_list).map { |l| l["rightsIdentifier"] }.compact.unwrap,
26
- "references" => write_references(related_identifiers)
15
+ 'cff-version' => '1.2.0',
16
+ 'message' => "If you use #{title} in your work, please cite it using the following metadata",
17
+ 'doi' => normalize_doi(doi),
18
+ 'repository-code' => url,
19
+ 'title' => parse_attributes(titles, content: 'title', first: true),
20
+ 'authors' => write_cff_creators(creators),
21
+ 'abstract' => parse_attributes(descriptions, content: 'description', first: true),
22
+ 'version' => version_info,
23
+ 'keywords' => if subjects.present?
24
+ Array.wrap(subjects).map do |k|
25
+ parse_attributes(k, content: 'subject', first: true)
26
+ end
27
+ end,
28
+ 'date-released' => get_date(dates, 'Issued') || publication_year,
29
+ 'license' => Array.wrap(rights_list).map { |l| l['rightsIdentifier'] }.compact.unwrap,
30
+ 'references' => write_references(related_identifiers)
27
31
  }.compact
28
32
  hsh.to_yaml
29
33
  end
30
34
 
31
35
  def write_cff_creators(creators)
32
36
  Array.wrap(creators).map do |a|
33
- if a["givenName"].present? || a["nameIdentifiers"].present?
34
- { "given-names" => a["givenName"],
35
- "family-names" => a["familyName"],
36
- "orcid" => parse_attributes(a["nameIdentifiers"], content: "nameIdentifier", first: true),
37
- "affiliation" => parse_attributes(a["affiliation"], content: "name", first: true) }.compact
37
+ if a['givenName'].present? || a['nameIdentifiers'].present?
38
+ { 'given-names' => a['givenName'],
39
+ 'family-names' => a['familyName'],
40
+ 'orcid' => parse_attributes(a['nameIdentifiers'], content: 'nameIdentifier',
41
+ first: true),
42
+ 'affiliation' => parse_attributes(a['affiliation'], content: 'name',
43
+ first: true) }.compact
38
44
  else
39
- { "name" => a["name"] }
45
+ { 'name' => a['name'] }
40
46
  end
41
47
  end
42
48
  end
43
49
 
44
50
  def write_references(related_identifiers)
45
51
  return nil if related_identifiers.blank?
46
-
47
- { "identifiers" =>
52
+
53
+ { 'identifiers' =>
48
54
  Array.wrap(related_identifiers).map do |r|
49
- {
50
- "type" => r["relatedIdentifierType"].downcase,
51
- "value" => r["relatedIdentifierType"] == "DOI" ? doi_from_url(r["relatedIdentifier"]) : r["relatedIdentifier"] }
55
+ {
56
+ 'type' => r['relatedIdentifierType'].downcase,
57
+ 'value' => r['relatedIdentifierType'] == 'DOI' ? doi_from_url(r['relatedIdentifier']) : r['relatedIdentifier']
58
+ }
52
59
  end }
53
60
  end
54
61
  end
@@ -5,22 +5,26 @@ module Briard
5
5
  module CodemetaWriter
6
6
  def codemeta
7
7
  return nil unless valid? || show_errors
8
-
8
+
9
9
  hsh = {
10
- "@context" => id.present? ? "https://raw.githubusercontent.com/codemeta/codemeta/master/codemeta.jsonld" : nil,
11
- "@type" => types.present? ? types["schemaOrg"] : nil,
12
- "@id" => normalize_doi(doi),
13
- "identifier" => to_schema_org_identifiers(identifiers),
14
- "codeRepository" => url,
15
- "name" => parse_attributes(titles, content: "title", first: true),
16
- "authors" => creators,
17
- "description" => parse_attributes(descriptions, content: "description", first: true),
18
- "version" => version_info,
19
- "tags" => subjects.present? ? Array.wrap(subjects).map { |k| parse_attributes(k, content: "subject", first: true) } : nil,
20
- "datePublished" => get_date(dates, "Issued") || publication_year,
21
- "dateModified" => get_date(dates, "Updated"),
22
- "publisher" => publisher,
23
- "license" => Array.wrap(rights_list).map { |l| l["rightsUri"] }.compact.unwrap,
10
+ '@context' => id.present? ? 'https://raw.githubusercontent.com/codemeta/codemeta/master/codemeta.jsonld' : nil,
11
+ '@type' => types.present? ? types['schemaOrg'] : nil,
12
+ '@id' => normalize_doi(doi),
13
+ 'identifier' => to_schema_org_identifiers(identifiers),
14
+ 'codeRepository' => url,
15
+ 'name' => parse_attributes(titles, content: 'title', first: true),
16
+ 'authors' => creators,
17
+ 'description' => parse_attributes(descriptions, content: 'description', first: true),
18
+ 'version' => version_info,
19
+ 'tags' => if subjects.present?
20
+ Array.wrap(subjects).map do |k|
21
+ parse_attributes(k, content: 'subject', first: true)
22
+ end
23
+ end,
24
+ 'datePublished' => get_date(dates, 'Issued') || publication_year,
25
+ 'dateModified' => get_date(dates, 'Updated'),
26
+ 'publisher' => publisher,
27
+ 'license' => Array.wrap(rights_list).map { |l| l['rightsUri'] }.compact.unwrap
24
28
  }.compact
25
29
  JSON.pretty_generate hsh.presence
26
30
  end
@@ -1,7 +1,9 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Briard
2
4
  module Writers
3
5
  module CsvWriter
4
- require "csv"
6
+ require 'csv'
5
7
 
6
8
  def csv
7
9
  return nil unless valid?
@@ -11,9 +13,9 @@ module Briard
11
13
  url: url,
12
14
  registered: get_iso8601_date(date_registered),
13
15
  state: state,
14
- resource_type_general: types["resourceTypeGeneral"],
15
- resource_type: types["resourceType"],
16
- title: parse_attributes(titles, content: "title", first: true),
16
+ resource_type_general: types['resourceTypeGeneral'],
17
+ resource_type: types['resourceType'],
18
+ title: parse_attributes(titles, content: 'title', first: true),
17
19
  author: authors_as_string(creators),
18
20
  publisher: publisher,
19
21
  publication_year: publication_year
@@ -1,10 +1,16 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Briard
2
4
  # frozen_string_literal: true
3
-
5
+
4
6
  module Writers
5
7
  module DataciteJsonWriter
6
8
  def datacite_json
7
- JSON.pretty_generate crosscite_hsh.transform_keys! { |key| key.camelcase(uppercase_first_letter = :lower) } if crosscite_hsh.present?
9
+ if crosscite_hsh.present?
10
+ JSON.pretty_generate crosscite_hsh.transform_keys! { |key|
11
+ key.camelcase(uppercase_first_letter = :lower)
12
+ }
13
+ end
8
14
  end
9
15
  end
10
16
  end
@@ -4,8 +4,8 @@ module Briard
4
4
  module Writers
5
5
  module JatsWriter
6
6
  def jats
7
- @jats ||= Nokogiri::XML::Builder.new(:encoding => 'UTF-8') do |xml|
8
- xml.send("element-citation", publication_type) do
7
+ @jats ||= Nokogiri::XML::Builder.new(encoding: 'UTF-8') do |xml|
8
+ xml.send(:'element-citation', publication_type) do
9
9
  insert_citation(xml)
10
10
  end
11
11
  end.to_xml
@@ -18,29 +18,29 @@ module Briard
18
18
  insert_source(xml)
19
19
  insert_publisher_name(xml) if publisher.present? && !is_data?
20
20
  insert_publication_date(xml)
21
- insert_volume(xml) if container.to_h["volume"].present?
22
- insert_issue(xml) if container.to_h["issue"].present?
23
- insert_fpage(xml) if container.to_h["firstPage"].present?
24
- insert_lpage(xml) if container.to_h["lastPage"].present?
21
+ insert_volume(xml) if container.to_h['volume'].present?
22
+ insert_issue(xml) if container.to_h['issue'].present?
23
+ insert_fpage(xml) if container.to_h['firstPage'].present?
24
+ insert_lpage(xml) if container.to_h['lastPage'].present?
25
25
  insert_version(xml) if version_info.present?
26
26
  insert_pub_id(xml)
27
27
  end
28
28
 
29
29
  def is_article?
30
- publication_type.fetch('publication-type', nil) == "journal"
30
+ publication_type.fetch('publication-type', nil) == 'journal'
31
31
  end
32
32
 
33
33
  def is_data?
34
- publication_type.fetch('publication-type', nil) == "data"
34
+ publication_type.fetch('publication-type', nil) == 'data'
35
35
  end
36
36
 
37
37
  def is_chapter?
38
- publication_type.fetch('publication-type', nil) == "chapter"
38
+ publication_type.fetch('publication-type', nil) == 'chapter'
39
39
  end
40
40
 
41
41
  def insert_authors(xml)
42
42
  if creators.present?
43
- xml.send("person-group", "person-group-type" => "author") do
43
+ xml.send(:'person-group', 'person-group-type' => 'author') do
44
44
  Array.wrap(creators).each do |au|
45
45
  xml.name do
46
46
  insert_contributor(xml, au)
@@ -52,7 +52,7 @@ module Briard
52
52
 
53
53
  def insert_editors(xml)
54
54
  if contributors.present?
55
- xml.send("person-group", "person-group-type" => "editor") do
55
+ xml.send(:'person-group', 'person-group-type' => 'editor') do
56
56
  Array.wrap(contributors).each do |con|
57
57
  xml.name do
58
58
  insert_contributor(xml, con)
@@ -63,15 +63,18 @@ module Briard
63
63
  end
64
64
 
65
65
  def insert_contributor(xml, person)
66
- xml.surname(person["familyName"]) if person["familyName"].present?
67
- xml.send("given-names", person["givenName"]) if person["givenName"].present?
66
+ xml.surname(person['familyName']) if person['familyName'].present?
67
+ xml.send(:'given-names', person['givenName']) if person['givenName'].present?
68
68
  end
69
69
 
70
70
  def insert_citation_title(xml)
71
71
  case publication_type.fetch('publication-type', nil)
72
- when "data" then xml.send("data-title", parse_attributes(titles, content: "title", first: true))
73
- when "journal" then xml.send("article-title", parse_attributes(titles, content: "title", first: true))
74
- when "chapter" then xml.send("chapter-title", parse_attributes(titles, content: "title", first: true))
72
+ when 'data' then xml.send(:'data-title',
73
+ parse_attributes(titles, content: 'title', first: true))
74
+ when 'journal' then xml.send(:'article-title',
75
+ parse_attributes(titles, content: 'title', first: true))
76
+ when 'chapter' then xml.send(:'chapter-title',
77
+ parse_attributes(titles, content: 'title', first: true))
75
78
  end
76
79
  end
77
80
 
@@ -79,38 +82,39 @@ module Briard
79
82
  if is_chapter?
80
83
  xml.source(publisher)
81
84
  elsif is_article? || is_data?
82
- xml.source(container && container["title"] || publisher)
85
+ xml.source((container && container['title']) || publisher)
83
86
  else
84
- xml.source(parse_attributes(titles, content: "title", first: true))
87
+ xml.source(parse_attributes(titles, content: 'title', first: true))
85
88
  end
86
89
  end
87
90
 
88
91
  def insert_publisher_name(xml)
89
- xml.send("publisher-name", publisher)
92
+ xml.send(:'publisher-name', publisher)
90
93
  end
91
94
 
92
95
  def insert_publication_date(xml)
93
- year, month, day = get_date_parts(get_date(dates, "Issued")).to_h.fetch("date-parts", []).first
96
+ year, month, day = get_date_parts(get_date(dates, 'Issued')).to_h.fetch('date-parts',
97
+ []).first
94
98
 
95
- xml.year(year, "iso-8601-date" => get_date(dates, "Issued"))
99
+ xml.year(year, 'iso-8601-date' => get_date(dates, 'Issued'))
96
100
  xml.month(month.to_s.rjust(2, '0')) if month.present?
97
101
  xml.day(day.to_s.rjust(2, '0')) if day.present?
98
102
  end
99
103
 
100
104
  def insert_volume(xml)
101
- xml.volume(container["volume"])
105
+ xml.volume(container['volume'])
102
106
  end
103
107
 
104
108
  def insert_issue(xml)
105
- xml.issue(container["issue"])
109
+ xml.issue(container['issue'])
106
110
  end
107
111
 
108
112
  def insert_fpage(xml)
109
- xml.fpage(container["firstPage"])
113
+ xml.fpage(container['firstPage'])
110
114
  end
111
115
 
112
116
  def insert_lpage(xml)
113
- xml.lpage(container["lastPage"])
117
+ xml.lpage(container['lastPage'])
114
118
  end
115
119
 
116
120
  def insert_version(xml)
@@ -119,15 +123,16 @@ module Briard
119
123
 
120
124
  def insert_pub_id(xml)
121
125
  return nil unless doi.present?
122
- xml.send("pub-id", doi, "pub-id-type" => "doi")
126
+
127
+ xml.send(:'pub-id', doi, 'pub-id-type' => 'doi')
123
128
  end
124
129
 
125
130
  def date
126
- get_date(dates, "Issued") ? get_date_parts(get_date(dates, "Issued")) : get_date_parts(publication_year)
131
+ get_date_parts(get_date(dates, 'Issued') || publication_year)
127
132
  end
128
133
 
129
134
  def publication_type
130
- { 'publication-type' => Briard::Utils::CR_TO_JATS_TRANSLATIONS[types["resourceType"]] || Briard::Utils::SO_TO_JATS_TRANSLATIONS[types["schemaOrg"]] }.compact
135
+ { 'publication-type' => Briard::Utils::CR_TO_JATS_TRANSLATIONS[types['resourceType']] || Briard::Utils::SO_TO_JATS_TRANSLATIONS[types['schemaOrg']] }.compact
131
136
  end
132
137
  end
133
138
  end
@@ -4,7 +4,7 @@ module Briard
4
4
  module Writers
5
5
  module RdfXmlWriter
6
6
  def rdf_xml
7
- graph.dump(:rdfxml, prefixes: { schema: "http://schema.org/" })
7
+ graph.dump(:rdfxml, prefixes: { schema: 'http://schema.org/' })
8
8
  end
9
9
  end
10
10
  end
@@ -5,24 +5,36 @@ module Briard
5
5
  module RisWriter
6
6
  def ris
7
7
  {
8
- "TY" => types["ris"],
9
- "T1" => parse_attributes(titles, content: "title", first: true),
10
- "T2" => container && container["title"],
11
- "AU" => to_ris(creators),
12
- "DO" => doi,
13
- "UR" => url,
14
- "AB" => parse_attributes(descriptions, content: "description", first: true),
15
- "KW" => Array.wrap(subjects).map { |k| parse_attributes(k, content: "subject", first: true) }.presence,
16
- "PY" => publication_year,
17
- "PB" => publisher,
18
- "LA" => language,
19
- "VL" => container.to_h["volume"],
20
- "IS" => container.to_h["issue"],
21
- "SP" => container.to_h["firstPage"],
22
- "EP" => container.to_h["lastPage"],
23
- "SN" => Array.wrap(related_identifiers).find { |ri| ri["relationType"] == "IsPartOf" }.to_h.fetch("relatedIdentifier", nil),
24
- "ER" => ""
25
- }.compact.map { |k, v| v.is_a?(Array) ? v.map { |vi| "#{k} - #{vi}" }.join("\r\n") : "#{k} - #{v}" }.join("\r\n")
8
+ 'TY' => types['ris'],
9
+ 'T1' => parse_attributes(titles, content: 'title', first: true),
10
+ 'T2' => container && container['title'],
11
+ 'AU' => to_ris(creators),
12
+ 'DO' => doi,
13
+ 'UR' => url,
14
+ 'AB' => parse_attributes(descriptions, content: 'description', first: true),
15
+ 'KW' => Array.wrap(subjects).map do |k|
16
+ parse_attributes(k, content: 'subject', first: true)
17
+ end.presence,
18
+ 'PY' => publication_year,
19
+ 'PB' => publisher,
20
+ 'LA' => language,
21
+ 'VL' => container.to_h['volume'],
22
+ 'IS' => container.to_h['issue'],
23
+ 'SP' => container.to_h['firstPage'],
24
+ 'EP' => container.to_h['lastPage'],
25
+ 'SN' => Array.wrap(related_identifiers).find do |ri|
26
+ ri['relationType'] == 'IsPartOf'
27
+ end.to_h.fetch('relatedIdentifier', nil),
28
+ 'ER' => ''
29
+ }.compact.map do |k, v|
30
+ if v.is_a?(Array)
31
+ v.map do |vi|
32
+ "#{k} - #{vi}"
33
+ end.join("\r\n")
34
+ else
35
+ "#{k} - #{v}"
36
+ end
37
+ end.join("\r\n")
26
38
  end
27
39
  end
28
40
  end
@@ -4,7 +4,7 @@ module Briard
4
4
  module Writers
5
5
  module TurtleWriter
6
6
  def turtle
7
- graph.dump(:ttl, prefixes: { schema: "http://schema.org/" })
7
+ graph.dump(:ttl, prefixes: { schema: 'http://schema.org/' })
8
8
  end
9
9
  end
10
10
  end
data/lib/briard.rb CHANGED
@@ -22,11 +22,11 @@ require 'csl/styles'
22
22
  require 'edtf'
23
23
  require 'base32/url'
24
24
 
25
- require "briard/version"
26
- require "briard/metadata"
27
- require "briard/cli"
28
- require "briard/string"
29
- require "briard/array"
30
- require "briard/whitelist_scrubber"
25
+ require 'briard/version'
26
+ require 'briard/metadata'
27
+ require 'briard/cli'
28
+ require 'briard/string'
29
+ require 'briard/array'
30
+ require 'briard/whitelist_scrubber'
31
31
 
32
32
  ENV['USER_AGENT'] ||= "Mozilla/5.0 (compatible; Maremma/#{Maremma::VERSION}; mailto:info@front-matter.io)"
data/rubocop.sarif ADDED
File without changes
data/spec/array_spec.rb CHANGED
@@ -3,20 +3,20 @@
3
3
  require 'spec_helper'
4
4
 
5
5
  describe Array do
6
- describe "unwrap" do
7
- it "to array" do
6
+ describe 'unwrap' do
7
+ it 'to array' do
8
8
  arr = [1, 2, 3]
9
9
  expect(arr.unwrap).to eq(arr)
10
10
  end
11
11
 
12
- it "to integer" do
12
+ it 'to integer' do
13
13
  arr = [1]
14
14
  expect(arr.unwrap).to eq(1)
15
15
  end
16
16
 
17
- it "to nil" do
17
+ it 'to nil' do
18
18
  arr = []
19
- expect(arr.unwrap).to be_nil
19
+ expect(arr.unwrap.nil?).to be(true)
20
20
  end
21
21
  end
22
22
  end