json_schema 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/README.md +36 -0
- data/lib/json_pointer.rb +7 -0
- data/lib/json_pointer/evaluator.rb +75 -0
- data/lib/json_reference.rb +39 -0
- data/lib/json_schema.rb +27 -0
- data/lib/json_schema/parser.rb +296 -0
- data/lib/json_schema/reference_expander.rb +156 -0
- data/lib/json_schema/schema.rb +155 -0
- data/lib/json_schema/schema_error.rb +25 -0
- data/lib/json_schema/validator.rb +405 -0
- data/test/data_scaffold.rb +238 -0
- data/test/json_pointer/evaluator_test.rb +60 -0
- data/test/json_schema/parser_test.rb +230 -0
- data/test/json_schema/reference_expander_test.rb +149 -0
- data/test/json_schema/validator_test.rb +606 -0
- data/test/json_schema_test.rb +67 -0
- data/test/test_helper.rb +4 -0
- metadata +64 -0
@@ -0,0 +1,156 @@
|
|
1
|
+
require "json_schema/parser"
|
2
|
+
require "set"
|
3
|
+
|
4
|
+
module JsonSchema
|
5
|
+
class ReferenceExpander
|
6
|
+
attr_accessor :errors
|
7
|
+
|
8
|
+
def expand(schema)
|
9
|
+
@errors = []
|
10
|
+
@schema = schema
|
11
|
+
@store = {}
|
12
|
+
@unresolved_refs = Set.new
|
13
|
+
last_num_unresolved_refs = 0
|
14
|
+
|
15
|
+
loop do
|
16
|
+
traverse_schema(schema)
|
17
|
+
|
18
|
+
# nothing left unresolved, we're done!
|
19
|
+
if @unresolved_refs.count == 0
|
20
|
+
break
|
21
|
+
end
|
22
|
+
|
23
|
+
# a new traversal pass still hasn't managed to resolved anymore
|
24
|
+
# references; we're out of luck
|
25
|
+
if @unresolved_refs.count == last_num_unresolved_refs
|
26
|
+
refs = @unresolved_refs.to_a.join(", ")
|
27
|
+
message = %{Couldn't resolve references (possible circular dependency): #{refs}.}
|
28
|
+
@errors << SchemaError.new(schema, message)
|
29
|
+
break
|
30
|
+
end
|
31
|
+
|
32
|
+
last_num_unresolved_refs = @unresolved_refs.count
|
33
|
+
end
|
34
|
+
|
35
|
+
@errors.count == 0
|
36
|
+
end
|
37
|
+
|
38
|
+
def expand!(schema)
|
39
|
+
if !expand(schema)
|
40
|
+
raise SchemaError.aggregate(@errors)
|
41
|
+
end
|
42
|
+
true
|
43
|
+
end
|
44
|
+
|
45
|
+
private
|
46
|
+
|
47
|
+
def dereference(schema)
|
48
|
+
ref = schema.reference
|
49
|
+
uri = ref.uri
|
50
|
+
|
51
|
+
if uri && uri.host
|
52
|
+
scheme = uri.scheme || "http"
|
53
|
+
message = %{Reference resolution over #{scheme} is not currently supported.}
|
54
|
+
@errors << SchemaError.new(schema, message)
|
55
|
+
# absolute
|
56
|
+
elsif uri && uri.path[0] == "/"
|
57
|
+
resolve(schema, uri.path, ref)
|
58
|
+
# relative
|
59
|
+
elsif uri
|
60
|
+
# build an absolute path using the URI of the current schema
|
61
|
+
schema_uri = schema.uri.chomp("/")
|
62
|
+
resolve(schema, schema_uri + "/" + uri.path, ref)
|
63
|
+
# just a JSON Pointer -- resolve against schema root
|
64
|
+
else
|
65
|
+
evaluate(schema, @schema, ref)
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
def evaluate(schema, schema_context, ref)
|
70
|
+
data = JsonPointer.evaluate(schema_context.data, ref.pointer)
|
71
|
+
|
72
|
+
# couldn't resolve pointer within known schema; that's an error
|
73
|
+
if data.nil?
|
74
|
+
message = %{Couldn't resolve pointer "#{ref.pointer}".}
|
75
|
+
@errors << SchemaError.new(schema_context, message)
|
76
|
+
return
|
77
|
+
end
|
78
|
+
|
79
|
+
# this counts as a resolution
|
80
|
+
@unresolved_refs.delete(ref.to_s)
|
81
|
+
|
82
|
+
# parse a new schema and use the same parent node
|
83
|
+
new_schema = Parser.new.parse(data, schema.parent)
|
84
|
+
|
85
|
+
# mark a new unresolved reference if the schema we got back is also a
|
86
|
+
# reference
|
87
|
+
if new_schema.reference
|
88
|
+
@unresolved_refs.add(new_schema.reference.to_s)
|
89
|
+
end
|
90
|
+
|
91
|
+
# copy new schema into existing one while preserving parent
|
92
|
+
parent = schema.parent
|
93
|
+
schema.copy_from(new_schema)
|
94
|
+
schema.parent = parent
|
95
|
+
|
96
|
+
new_schema
|
97
|
+
end
|
98
|
+
|
99
|
+
def resolve(schema, uri, ref)
|
100
|
+
if schema_context = @store[uri]
|
101
|
+
evaluate(schema, schema_context, ref)
|
102
|
+
else
|
103
|
+
# couldn't resolve, return original reference
|
104
|
+
@unresolved_refs.add(ref.to_s)
|
105
|
+
schema
|
106
|
+
end
|
107
|
+
end
|
108
|
+
|
109
|
+
def schema_children(schema)
|
110
|
+
Enumerator.new do |yielder|
|
111
|
+
schema.all_of.each { |s| yielder << s }
|
112
|
+
schema.any_of.each { |s| yielder << s }
|
113
|
+
schema.one_of.each { |s| yielder << s }
|
114
|
+
schema.definitions.each { |_, s| yielder << s }
|
115
|
+
schema.links.map { |l| l.schema }.compact.each { |s| yielder << s }
|
116
|
+
schema.pattern_properties.each { |_, s| yielder << s }
|
117
|
+
schema.properties.each { |_, s| yielder << s }
|
118
|
+
|
119
|
+
if schema.not
|
120
|
+
yielder << schema.not
|
121
|
+
end
|
122
|
+
|
123
|
+
# can either be a single schema (list validation) or multiple (tuple
|
124
|
+
# validation)
|
125
|
+
if schema.items
|
126
|
+
if schema.items.is_a?(Array)
|
127
|
+
schema.items.each { |s| yielder << s }
|
128
|
+
else
|
129
|
+
yielder << schema.items
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
# dependencies can either be simple or "schema"; only replace the
|
134
|
+
# latter
|
135
|
+
schema.dependencies.values.
|
136
|
+
select { |s| s.is_a?(Schema) }.
|
137
|
+
each { |s| yielder << s }
|
138
|
+
end
|
139
|
+
end
|
140
|
+
|
141
|
+
def traverse_schema(schema)
|
142
|
+
# Children without an ID keep the same URI as their parents. So since we
|
143
|
+
# traverse trees from top to bottom, just keep the first reference.
|
144
|
+
if !@store.key?(schema.uri)
|
145
|
+
@store[schema.uri] = schema
|
146
|
+
end
|
147
|
+
|
148
|
+
schema_children(schema).each do |subschema|
|
149
|
+
if subschema.reference
|
150
|
+
dereference(subschema)
|
151
|
+
end
|
152
|
+
traverse_schema(subschema)
|
153
|
+
end
|
154
|
+
end
|
155
|
+
end
|
156
|
+
end
|
@@ -0,0 +1,155 @@
|
|
1
|
+
module JsonSchema
|
2
|
+
class Schema
|
3
|
+
@@copyable = []
|
4
|
+
|
5
|
+
# identical to attr_accessible, but allows us to copy in values from a
|
6
|
+
# target schema to help preserve our hierarchy during reference expansion
|
7
|
+
def self.attr_copyable(attr)
|
8
|
+
attr_accessor(attr)
|
9
|
+
@@copyable << "@#{attr}".to_sym
|
10
|
+
end
|
11
|
+
|
12
|
+
def self.attr_reader_default(attr, default)
|
13
|
+
class_eval("def #{attr} ; !@#{attr}.nil? ? @#{attr} : #{default} ; end")
|
14
|
+
end
|
15
|
+
|
16
|
+
# Rather than a normal schema, the node may be a JSON Reference. In this
|
17
|
+
# case, no other attributes will be filled in except for #parent.
|
18
|
+
attr_copyable :reference
|
19
|
+
|
20
|
+
# the schema keeps a reference to the data it was initialized from for JSON
|
21
|
+
# Pointer resolution
|
22
|
+
attr_copyable :data
|
23
|
+
|
24
|
+
# parent and children schemas
|
25
|
+
attr_copyable :parent
|
26
|
+
|
27
|
+
# the normalize URI of this schema
|
28
|
+
attr_copyable :uri
|
29
|
+
|
30
|
+
# basic descriptors
|
31
|
+
attr_copyable :id
|
32
|
+
attr_copyable :title
|
33
|
+
attr_copyable :description
|
34
|
+
attr_copyable :default
|
35
|
+
|
36
|
+
# validation: any
|
37
|
+
attr_copyable :all_of
|
38
|
+
attr_copyable :any_of
|
39
|
+
attr_copyable :definitions
|
40
|
+
attr_copyable :enum
|
41
|
+
attr_copyable :one_of
|
42
|
+
attr_copyable :not
|
43
|
+
attr_copyable :type
|
44
|
+
|
45
|
+
# validation: array
|
46
|
+
attr_copyable :additional_items
|
47
|
+
attr_copyable :items
|
48
|
+
attr_copyable :max_items
|
49
|
+
attr_copyable :min_items
|
50
|
+
attr_copyable :unique_items
|
51
|
+
|
52
|
+
# validation: number/integer
|
53
|
+
attr_copyable :max
|
54
|
+
attr_copyable :max_exclusive
|
55
|
+
attr_copyable :min
|
56
|
+
attr_copyable :min_exclusive
|
57
|
+
attr_copyable :multiple_of
|
58
|
+
|
59
|
+
# validation: object
|
60
|
+
attr_copyable :additional_properties
|
61
|
+
attr_copyable :dependencies
|
62
|
+
attr_copyable :max_properties
|
63
|
+
attr_copyable :min_properties
|
64
|
+
attr_copyable :pattern_properties
|
65
|
+
attr_copyable :properties
|
66
|
+
attr_copyable :required
|
67
|
+
|
68
|
+
# validation: string
|
69
|
+
attr_copyable :format
|
70
|
+
attr_copyable :max_length
|
71
|
+
attr_copyable :min_length
|
72
|
+
attr_copyable :pattern
|
73
|
+
|
74
|
+
# hyperschema
|
75
|
+
attr_copyable :links
|
76
|
+
attr_copyable :media
|
77
|
+
attr_copyable :path_start
|
78
|
+
attr_copyable :read_only
|
79
|
+
|
80
|
+
# Give these properties reader defaults for particular behavior so that we
|
81
|
+
# can preserve the `nil` nature of their instance variables. Knowing that
|
82
|
+
# these were `nil` when we read them allows us to properly reflect the
|
83
|
+
# parsed schema back to JSON.
|
84
|
+
attr_reader_default :additional_items, true
|
85
|
+
attr_reader_default :additional_properties, true
|
86
|
+
attr_reader_default :all_of, []
|
87
|
+
attr_reader_default :any_of, []
|
88
|
+
attr_reader_default :definitions, {}
|
89
|
+
attr_reader_default :dependencies, {}
|
90
|
+
attr_reader_default :links, []
|
91
|
+
attr_reader_default :one_of, []
|
92
|
+
attr_reader_default :max_exclusive, false
|
93
|
+
attr_reader_default :min_exclusive, false
|
94
|
+
attr_reader_default :pattern_properties, {}
|
95
|
+
attr_reader_default :properties, {}
|
96
|
+
attr_reader_default :type, []
|
97
|
+
|
98
|
+
# allow booleans to be access with question mark
|
99
|
+
alias :additional_items? :additional_items
|
100
|
+
alias :additional_properties? :additional_properties
|
101
|
+
alias :max_exclusive? :max_exclusive
|
102
|
+
alias :min_exclusive? :min_exclusive
|
103
|
+
alias :read_only? :read_only
|
104
|
+
alias :unique_items? :unique_items
|
105
|
+
|
106
|
+
def copy_from(schema)
|
107
|
+
@@copyable.each do |copyable|
|
108
|
+
instance_variable_set(copyable, schema.instance_variable_get(copyable))
|
109
|
+
end
|
110
|
+
end
|
111
|
+
|
112
|
+
def expand_references
|
113
|
+
expander = ReferenceExpander.new
|
114
|
+
if expander.expand(self)
|
115
|
+
[true, nil]
|
116
|
+
else
|
117
|
+
[false, expander.errors]
|
118
|
+
end
|
119
|
+
end
|
120
|
+
|
121
|
+
def expand_references!
|
122
|
+
ReferenceExpander.new.expand!(self)
|
123
|
+
true
|
124
|
+
end
|
125
|
+
|
126
|
+
def validate(data)
|
127
|
+
validator = Validator.new(self)
|
128
|
+
valid = validator.validate(data)
|
129
|
+
[valid, validator.errors]
|
130
|
+
end
|
131
|
+
|
132
|
+
def validate!(data)
|
133
|
+
Validator.new(self).validate!(data)
|
134
|
+
end
|
135
|
+
|
136
|
+
# Link subobject for a hyperschema.
|
137
|
+
class Link
|
138
|
+
attr_accessor :parent
|
139
|
+
|
140
|
+
# schema attributes
|
141
|
+
attr_accessor :description
|
142
|
+
attr_accessor :href
|
143
|
+
attr_accessor :method
|
144
|
+
attr_accessor :rel
|
145
|
+
attr_accessor :schema
|
146
|
+
attr_accessor :title
|
147
|
+
end
|
148
|
+
|
149
|
+
# Media type subobject for a hyperschema.
|
150
|
+
class Media
|
151
|
+
attr_accessor :binary_encoding
|
152
|
+
attr_accessor :type
|
153
|
+
end
|
154
|
+
end
|
155
|
+
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
module JsonSchema
|
2
|
+
class SchemaError
|
3
|
+
attr_accessor :message
|
4
|
+
attr_accessor :schema
|
5
|
+
|
6
|
+
def self.aggregate(errors)
|
7
|
+
# May want to eventually use a JSON Pointer instead to help user narrow
|
8
|
+
# down the location of the error. It's slightly tricky to ascend the
|
9
|
+
# schema hierarchy to raise build one though, so I'm punting on that
|
10
|
+
# for now.
|
11
|
+
errors.map { |e|
|
12
|
+
if e.schema
|
13
|
+
%{At "#{e.schema.uri}": #{e.message}}
|
14
|
+
else
|
15
|
+
e.message
|
16
|
+
end
|
17
|
+
}.join(" ")
|
18
|
+
end
|
19
|
+
|
20
|
+
def initialize(schema, message)
|
21
|
+
@schema = schema
|
22
|
+
@message = message
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
@@ -0,0 +1,405 @@
|
|
1
|
+
require "uri"
|
2
|
+
|
3
|
+
module JsonSchema
|
4
|
+
class Validator
|
5
|
+
TYPE_MAP = {
|
6
|
+
"array" => Array,
|
7
|
+
"boolean" => [FalseClass, TrueClass],
|
8
|
+
"integer" => Integer,
|
9
|
+
"number" => [Integer, Float],
|
10
|
+
"null" => NilClass,
|
11
|
+
"object" => Hash,
|
12
|
+
"string" => String,
|
13
|
+
}
|
14
|
+
|
15
|
+
attr_accessor :errors
|
16
|
+
|
17
|
+
def initialize(schema)
|
18
|
+
@schema = schema
|
19
|
+
end
|
20
|
+
|
21
|
+
def validate(data)
|
22
|
+
@errors = []
|
23
|
+
validate_data(@schema, data, @errors)
|
24
|
+
@errors.size == 0
|
25
|
+
end
|
26
|
+
|
27
|
+
def validate!(data)
|
28
|
+
if !validate(data)
|
29
|
+
raise SchemaError.aggregate(@errors)
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
private
|
34
|
+
|
35
|
+
# works around &&'s "lazy" behavior
|
36
|
+
def strict_and(valid_old, valid_new)
|
37
|
+
valid_old && valid_new
|
38
|
+
end
|
39
|
+
|
40
|
+
def validate_data(schema, data, errors)
|
41
|
+
valid = true
|
42
|
+
|
43
|
+
# validation: any
|
44
|
+
valid = strict_and valid, validate_all_of(schema, data, errors)
|
45
|
+
valid = strict_and valid, validate_any_of(schema, data, errors)
|
46
|
+
valid = strict_and valid, validate_enum(schema, data, errors)
|
47
|
+
valid = strict_and valid, validate_one_of(schema, data, errors)
|
48
|
+
valid = strict_and valid, validate_not(schema, data, errors)
|
49
|
+
valid = strict_and valid, validate_type(schema, data, errors)
|
50
|
+
|
51
|
+
# validation: array
|
52
|
+
if data.is_a?(Array)
|
53
|
+
valid = strict_and valid, validate_items(schema, data, errors)
|
54
|
+
valid = strict_and valid, validate_max_items(schema, data, errors)
|
55
|
+
valid = strict_and valid, validate_min_items(schema, data, errors)
|
56
|
+
valid = strict_and valid, validate_unique_items(schema, data, errors)
|
57
|
+
end
|
58
|
+
|
59
|
+
# validation: integer/number
|
60
|
+
if data.is_a?(Float) || data.is_a?(Integer)
|
61
|
+
valid = strict_and valid, validate_max(schema, data, errors)
|
62
|
+
valid = strict_and valid, validate_min(schema, data, errors)
|
63
|
+
valid = strict_and valid, validate_multiple_of(schema, data, errors)
|
64
|
+
end
|
65
|
+
|
66
|
+
# validation: object
|
67
|
+
if data.is_a?(Hash)
|
68
|
+
valid = strict_and valid, validate_additional_properties(schema, data, errors)
|
69
|
+
valid = strict_and valid, validate_dependencies(schema, data, errors)
|
70
|
+
valid = strict_and valid, validate_max_properties(schema, data, errors)
|
71
|
+
valid = strict_and valid, validate_min_properties(schema, data, errors)
|
72
|
+
valid = strict_and valid, validate_pattern_properties(schema, data, errors)
|
73
|
+
valid = strict_and valid, validate_properties(schema, data, errors)
|
74
|
+
valid = strict_and valid, validate_required(schema, data, errors, schema.required)
|
75
|
+
end
|
76
|
+
|
77
|
+
# validation: string
|
78
|
+
if data.is_a?(String)
|
79
|
+
valid = strict_and valid, validate_format(schema, data, errors)
|
80
|
+
valid = strict_and valid, validate_max_length(schema, data, errors)
|
81
|
+
valid = strict_and valid, validate_min_length(schema, data, errors)
|
82
|
+
valid = strict_and valid, validate_pattern(schema, data, errors)
|
83
|
+
end
|
84
|
+
|
85
|
+
valid
|
86
|
+
end
|
87
|
+
|
88
|
+
def validate_additional_properties(schema, data, errors)
|
89
|
+
return true if schema.additional_properties?
|
90
|
+
if (extra = data.keys - schema.properties.keys).empty?
|
91
|
+
true
|
92
|
+
else
|
93
|
+
message = %{Extra keys in object: #{extra.sort.join(", ")}.}
|
94
|
+
errors << SchemaError.new(schema, message)
|
95
|
+
false
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
def validate_all_of(schema, data, errors)
|
100
|
+
return true if schema.all_of.empty?
|
101
|
+
valid = schema.all_of.all? do |subschema|
|
102
|
+
validate_data(subschema, data, errors)
|
103
|
+
end
|
104
|
+
message = %{Data did not match all subschemas of "allOf" condition.}
|
105
|
+
errors << SchemaError.new(schema, message) if !valid
|
106
|
+
valid
|
107
|
+
end
|
108
|
+
|
109
|
+
def validate_any_of(schema, data, errors)
|
110
|
+
return true if schema.any_of.empty?
|
111
|
+
valid = schema.any_of.any? do |subschema|
|
112
|
+
validate_data(subschema, data, [])
|
113
|
+
end
|
114
|
+
message = %{Data did not match any subschema of "anyOf" condition.}
|
115
|
+
errors << SchemaError.new(schema, message) if !valid
|
116
|
+
valid
|
117
|
+
end
|
118
|
+
|
119
|
+
def validate_dependencies(schema, data, errors)
|
120
|
+
return true if schema.dependencies.empty?
|
121
|
+
schema.dependencies.each do |key, obj|
|
122
|
+
# if the key is not present, the dependency is fulfilled by definition
|
123
|
+
next unless data[key]
|
124
|
+
if obj.is_a?(Schema)
|
125
|
+
validate_data(obj, data, errors)
|
126
|
+
else
|
127
|
+
# if not a schema, value is an array of required fields
|
128
|
+
validate_required(schema, data, errors, obj)
|
129
|
+
end
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
def validate_format(schema, data, errors)
|
134
|
+
return true unless schema.format
|
135
|
+
valid = case schema.format
|
136
|
+
when "date-time"
|
137
|
+
data =~ DATE_TIME_PATTERN
|
138
|
+
when "email"
|
139
|
+
data =~ EMAIL_PATTERN
|
140
|
+
when "hostname"
|
141
|
+
data =~ HOSTNAME_PATTERN
|
142
|
+
when "ipv4"
|
143
|
+
data =~ IPV4_PATTERN
|
144
|
+
when "ipv6"
|
145
|
+
data =~ IPV6_PATTERN
|
146
|
+
when "uri"
|
147
|
+
data =~ URI.regexp
|
148
|
+
when "uuid"
|
149
|
+
data =~ UUID_PATTERN
|
150
|
+
end
|
151
|
+
if valid
|
152
|
+
true
|
153
|
+
else
|
154
|
+
message = %{Expected data to match "#{schema.format}" format, value was: #{data}.}
|
155
|
+
errors << SchemaError.new(schema, message)
|
156
|
+
false
|
157
|
+
end
|
158
|
+
end
|
159
|
+
|
160
|
+
def validate_enum(schema, data, errors)
|
161
|
+
return true unless schema.enum
|
162
|
+
if schema.enum.include?(data)
|
163
|
+
true
|
164
|
+
else
|
165
|
+
message = %{Expected data to be a member of enum #{schema.enum}, value was: #{data}.}
|
166
|
+
errors << SchemaError.new(schema, message)
|
167
|
+
false
|
168
|
+
end
|
169
|
+
end
|
170
|
+
|
171
|
+
def validate_items(schema, data, error)
|
172
|
+
return true unless schema.items
|
173
|
+
if schema.items.is_a?(Array)
|
174
|
+
if data.size < schema.items.count
|
175
|
+
message = %{Expected array to have at least #{schema.items.count} item(s), had #{data.size} item(s).}
|
176
|
+
errors << SchemaError.new(schema, message)
|
177
|
+
false
|
178
|
+
elsif data.size > schema.items.count && !schema.additional_items?
|
179
|
+
message = %{Expected array to have no more than #{schema.items.count} item(s), had #{data.size} item(s).}
|
180
|
+
errors << SchemaError.new(schema, message)
|
181
|
+
false
|
182
|
+
else
|
183
|
+
valid = true
|
184
|
+
schema.items.each_with_index do |subschema, i|
|
185
|
+
valid = strict_and valid,
|
186
|
+
validate_data(subschema, data[i], errors)
|
187
|
+
end
|
188
|
+
valid
|
189
|
+
end
|
190
|
+
else
|
191
|
+
valid = true
|
192
|
+
data.each do |value|
|
193
|
+
valid = strict_and valid, validate_data(schema.items, value, errors)
|
194
|
+
end
|
195
|
+
valid
|
196
|
+
end
|
197
|
+
end
|
198
|
+
|
199
|
+
def validate_max(schema, data, error)
|
200
|
+
return true unless schema.max
|
201
|
+
if schema.max_exclusive? && data < schema.max
|
202
|
+
true
|
203
|
+
elsif !schema.max_exclusive? && data <= schema.max
|
204
|
+
true
|
205
|
+
else
|
206
|
+
message = %{Expected data to be smaller than maximum #{schema.max} (exclusive: #{schema.max_exclusive?}), value was: #{data}.}
|
207
|
+
errors << SchemaError.new(schema, message)
|
208
|
+
false
|
209
|
+
end
|
210
|
+
end
|
211
|
+
|
212
|
+
def validate_max_items(schema, data, error)
|
213
|
+
return true unless schema.max_items
|
214
|
+
if data.size <= schema.max_items
|
215
|
+
true
|
216
|
+
else
|
217
|
+
message = %{Expected array to have no more than #{schema.max_items} item(s), had #{data.size} item(s).}
|
218
|
+
errors << SchemaError.new(schema, message)
|
219
|
+
false
|
220
|
+
end
|
221
|
+
end
|
222
|
+
|
223
|
+
def validate_max_length(schema, data, error)
|
224
|
+
return true unless schema.max_length
|
225
|
+
if data.length <= schema.max_length
|
226
|
+
true
|
227
|
+
else
|
228
|
+
message = %{Expected string to have a maximum length of #{schema.max_length}, was #{data.length} character(s) long.}
|
229
|
+
errors << SchemaError.new(schema, message)
|
230
|
+
false
|
231
|
+
end
|
232
|
+
end
|
233
|
+
|
234
|
+
def validate_max_properties(schema, data, error)
|
235
|
+
return true unless schema.max_properties
|
236
|
+
if data.keys.size <= schema.max_properties
|
237
|
+
true
|
238
|
+
else
|
239
|
+
message = %{Expected object to have a maximum of #{schema.max_properties} property/ies; it had #{data.keys.size}.}
|
240
|
+
errors << SchemaError.new(schema, message)
|
241
|
+
false
|
242
|
+
end
|
243
|
+
end
|
244
|
+
|
245
|
+
def validate_min(schema, data, error)
|
246
|
+
return true unless schema.min
|
247
|
+
if schema.min_exclusive? && data > schema.min
|
248
|
+
true
|
249
|
+
elsif !schema.min_exclusive? && data >= schema.min
|
250
|
+
true
|
251
|
+
else
|
252
|
+
message = %{Expected data to be larger than minimum #{schema.min} (exclusive: #{schema.min_exclusive?}), value was: #{data}.}
|
253
|
+
errors << SchemaError.new(schema, message)
|
254
|
+
false
|
255
|
+
end
|
256
|
+
end
|
257
|
+
|
258
|
+
def validate_min_items(schema, data, error)
|
259
|
+
return true unless schema.min_items
|
260
|
+
if data.size >= schema.min_items
|
261
|
+
true
|
262
|
+
else
|
263
|
+
message = %{Expected array to have at least #{schema.min_items} item(s), had #{data.size} item(s).}
|
264
|
+
errors << SchemaError.new(schema, message)
|
265
|
+
false
|
266
|
+
end
|
267
|
+
end
|
268
|
+
|
269
|
+
def validate_min_length(schema, data, error)
|
270
|
+
return true unless schema.min_length
|
271
|
+
if data.length >= schema.min_length
|
272
|
+
true
|
273
|
+
else
|
274
|
+
message = %{Expected string to have a minimum length of #{schema.min_length}, was #{data.length} character(s) long.}
|
275
|
+
errors << SchemaError.new(schema, message)
|
276
|
+
false
|
277
|
+
end
|
278
|
+
end
|
279
|
+
|
280
|
+
def validate_min_properties(schema, data, error)
|
281
|
+
return true unless schema.min_properties
|
282
|
+
if data.keys.size >= schema.min_properties
|
283
|
+
true
|
284
|
+
else
|
285
|
+
message = %{Expected object to have a minimum of #{schema.min_properties} property/ies; it had #{data.keys.size}.}
|
286
|
+
errors << SchemaError.new(schema, message)
|
287
|
+
false
|
288
|
+
end
|
289
|
+
end
|
290
|
+
|
291
|
+
def validate_multiple_of(schema, data, errors)
|
292
|
+
return true unless schema.multiple_of
|
293
|
+
if data % schema.multiple_of == 0
|
294
|
+
true
|
295
|
+
else
|
296
|
+
message = %{Expected data to be a multiple of #{schema.multiple_of}, value was: #{data}.}
|
297
|
+
errors << SchemaError.new(schema, message)
|
298
|
+
false
|
299
|
+
end
|
300
|
+
end
|
301
|
+
|
302
|
+
def validate_one_of(schema, data, errors)
|
303
|
+
return true if schema.one_of.empty?
|
304
|
+
num_valid = schema.one_of.count do |subschema|
|
305
|
+
validate_data(subschema, data, [])
|
306
|
+
end
|
307
|
+
message = %{Data did not match exactly one subschema of "oneOf" condition.}
|
308
|
+
errors << SchemaError.new(schema, message) if num_valid != 1
|
309
|
+
num_valid == 1
|
310
|
+
end
|
311
|
+
|
312
|
+
def validate_not(schema, data, errors)
|
313
|
+
return true unless schema.not
|
314
|
+
# don't bother accumulating these errors, they'll all be worded
|
315
|
+
# incorrectly for the inverse condition
|
316
|
+
valid = !validate_data(schema.not, data, [])
|
317
|
+
message = %{Data matched subschema of "not" condition.}
|
318
|
+
errors << SchemaError.new(schema, message) if !valid
|
319
|
+
valid
|
320
|
+
end
|
321
|
+
|
322
|
+
def validate_pattern(schema, data, error)
|
323
|
+
return true unless schema.pattern
|
324
|
+
if data =~ schema.pattern
|
325
|
+
true
|
326
|
+
else
|
327
|
+
message = %{Expected string to match pattern "#{schema.pattern.inspect}", value was: #{data}.}
|
328
|
+
errors << SchemaError.new(schema, message)
|
329
|
+
false
|
330
|
+
end
|
331
|
+
end
|
332
|
+
|
333
|
+
def validate_pattern_properties(schema, data, errors)
|
334
|
+
return true if schema.pattern_properties.empty?
|
335
|
+
valid = true
|
336
|
+
schema.pattern_properties.each do |pattern, subschema|
|
337
|
+
data.each do |key, value|
|
338
|
+
if key =~ pattern
|
339
|
+
valid = strict_and valid, validate_data(subschema, value, errors)
|
340
|
+
end
|
341
|
+
end
|
342
|
+
end
|
343
|
+
valid
|
344
|
+
end
|
345
|
+
|
346
|
+
def validate_properties(schema, data, errors)
|
347
|
+
return true if schema.properties.empty?
|
348
|
+
valid = true
|
349
|
+
schema.properties.each do |key, subschema|
|
350
|
+
if value = data[key]
|
351
|
+
valid = strict_and valid, validate_data(subschema, value, errors)
|
352
|
+
end
|
353
|
+
end
|
354
|
+
valid
|
355
|
+
end
|
356
|
+
|
357
|
+
def validate_required(schema, data, errors, required)
|
358
|
+
return true if !required || required.empty?
|
359
|
+
if (missing = required - data.keys).empty?
|
360
|
+
true
|
361
|
+
else
|
362
|
+
message = %{Missing required keys in object: #{missing.sort.join(", ")}.}
|
363
|
+
errors << SchemaError.new(schema, message)
|
364
|
+
false
|
365
|
+
end
|
366
|
+
end
|
367
|
+
|
368
|
+
def validate_type(schema, data, errors)
|
369
|
+
return true if schema.type.empty?
|
370
|
+
valid_types = schema.type.map { |t| TYPE_MAP[t] }.flatten.compact
|
371
|
+
if valid_types.any? { |t| data.is_a?(t) }
|
372
|
+
true
|
373
|
+
else
|
374
|
+
message = %{Expected data to be of type "#{schema.type.join("/")}"; value was: #{data.inspect}.}
|
375
|
+
errors << SchemaError.new(schema, message)
|
376
|
+
false
|
377
|
+
end
|
378
|
+
end
|
379
|
+
|
380
|
+
def validate_unique_items(schema, data, error)
|
381
|
+
return true unless schema.unique_items?
|
382
|
+
if data.size == data.uniq.size
|
383
|
+
true
|
384
|
+
else
|
385
|
+
message = %{Expected array items to be unique, but duplicate items were found.}
|
386
|
+
errors << SchemaError.new(schema, message)
|
387
|
+
false
|
388
|
+
end
|
389
|
+
end
|
390
|
+
|
391
|
+
EMAIL_PATTERN = /^[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,4}$/i
|
392
|
+
|
393
|
+
HOSTNAME_PATTERN = /^(?=.{1,255}$)[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?(?:\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?)*\.?$/
|
394
|
+
|
395
|
+
DATE_TIME_PATTERN = /^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-2][0-9]:[0-5][0-9]:[0-5][0-9](Z|[\-+][0-9]{2}:[0-5][0-9])$/
|
396
|
+
|
397
|
+
# from: http://stackoverflow.com/a/17871737
|
398
|
+
IPV4_PATTERN = /^((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])$/
|
399
|
+
|
400
|
+
# from: http://stackoverflow.com/a/17871737
|
401
|
+
IPV6_PATTERN = /^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]).){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:)$/
|
402
|
+
|
403
|
+
UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/
|
404
|
+
end
|
405
|
+
end
|