lenex-parser 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.rubocop.yml +21 -0
- data/.yardopts +2 -0
- data/LICENSE +21 -0
- data/README.md +796 -0
- data/Rakefile +43 -0
- data/bin/console +8 -0
- data/bin/setup +5 -0
- data/lenex-parser.gemspec +35 -0
- data/lib/lenex/document/serializer.rb +191 -0
- data/lib/lenex/document.rb +163 -0
- data/lib/lenex/parser/objects/age_date.rb +53 -0
- data/lib/lenex/parser/objects/age_group.rb +86 -0
- data/lib/lenex/parser/objects/athlete.rb +93 -0
- data/lib/lenex/parser/objects/bank.rb +56 -0
- data/lib/lenex/parser/objects/club.rb +101 -0
- data/lib/lenex/parser/objects/constructor.rb +51 -0
- data/lib/lenex/parser/objects/contact.rb +55 -0
- data/lib/lenex/parser/objects/entry.rb +70 -0
- data/lib/lenex/parser/objects/entry_schedule.rb +40 -0
- data/lib/lenex/parser/objects/event.rb +114 -0
- data/lib/lenex/parser/objects/facility.rb +58 -0
- data/lib/lenex/parser/objects/fee.rb +54 -0
- data/lib/lenex/parser/objects/fee_schedule.rb +26 -0
- data/lib/lenex/parser/objects/handicap.rb +86 -0
- data/lib/lenex/parser/objects/heat.rb +58 -0
- data/lib/lenex/parser/objects/host_club.rb +34 -0
- data/lib/lenex/parser/objects/judge.rb +55 -0
- data/lib/lenex/parser/objects/lenex.rb +72 -0
- data/lib/lenex/parser/objects/meet.rb +175 -0
- data/lib/lenex/parser/objects/meet_info.rb +60 -0
- data/lib/lenex/parser/objects/official.rb +70 -0
- data/lib/lenex/parser/objects/organizer.rb +34 -0
- data/lib/lenex/parser/objects/point_table.rb +54 -0
- data/lib/lenex/parser/objects/pool.rb +44 -0
- data/lib/lenex/parser/objects/qualify.rb +55 -0
- data/lib/lenex/parser/objects/ranking.rb +54 -0
- data/lib/lenex/parser/objects/record.rb +107 -0
- data/lib/lenex/parser/objects/record_athlete.rb +92 -0
- data/lib/lenex/parser/objects/record_list.rb +106 -0
- data/lib/lenex/parser/objects/record_relay.rb +62 -0
- data/lib/lenex/parser/objects/record_relay_position.rb +62 -0
- data/lib/lenex/parser/objects/relay.rb +93 -0
- data/lib/lenex/parser/objects/relay_entry.rb +81 -0
- data/lib/lenex/parser/objects/relay_position.rb +74 -0
- data/lib/lenex/parser/objects/relay_result.rb +85 -0
- data/lib/lenex/parser/objects/result.rb +76 -0
- data/lib/lenex/parser/objects/session.rb +107 -0
- data/lib/lenex/parser/objects/split.rb +53 -0
- data/lib/lenex/parser/objects/swim_style.rb +58 -0
- data/lib/lenex/parser/objects/time_standard.rb +55 -0
- data/lib/lenex/parser/objects/time_standard_list.rb +98 -0
- data/lib/lenex/parser/objects/time_standard_ref.rb +63 -0
- data/lib/lenex/parser/objects.rb +52 -0
- data/lib/lenex/parser/sax/document_handler.rb +184 -0
- data/lib/lenex/parser/version.rb +8 -0
- data/lib/lenex/parser/zip_source.rb +111 -0
- data/lib/lenex/parser.rb +184 -0
- data/lib/lenex-parser.rb +16 -0
- metadata +132 -0
data/Rakefile
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'bundler/gem_tasks'
|
|
4
|
+
require 'rake/testtask'
|
|
5
|
+
require 'rubocop/rake_task'
|
|
6
|
+
require 'yard'
|
|
7
|
+
require 'yard/rake/yardoc_task'
|
|
8
|
+
|
|
9
|
+
configure_test_task = lambda do |t|
|
|
10
|
+
t.libs << 'test'
|
|
11
|
+
t.pattern = 'test/**/*_test.rb'
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
Rake::TestTask.new(:test, &configure_test_task)
|
|
15
|
+
|
|
16
|
+
namespace :test do
|
|
17
|
+
desc 'Run tests with coverage'
|
|
18
|
+
task :coverage do
|
|
19
|
+
ENV['COVERAGE'] = 'true'
|
|
20
|
+
Rake::Task[:test].reenable
|
|
21
|
+
Rake::Task[:test].invoke
|
|
22
|
+
ensure
|
|
23
|
+
Rake::Task[:test].reenable
|
|
24
|
+
ENV.delete('COVERAGE')
|
|
25
|
+
end
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
RuboCop::RakeTask.new(:rubocop)
|
|
29
|
+
|
|
30
|
+
YARD::Rake::YardocTask.new(:docs)
|
|
31
|
+
|
|
32
|
+
desc 'Run full CI pipeline'
|
|
33
|
+
task :ci do
|
|
34
|
+
Rake::Task['test:coverage'].invoke
|
|
35
|
+
Rake::Task[:rubocop].invoke
|
|
36
|
+
Rake::Task[:docs].invoke
|
|
37
|
+
ensure
|
|
38
|
+
Rake::Task['test:coverage'].reenable
|
|
39
|
+
Rake::Task[:rubocop].reenable
|
|
40
|
+
Rake::Task[:docs].reenable
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
task default: :ci
|
data/bin/console
ADDED
data/bin/setup
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative 'lib/lenex/parser/version'
|
|
4
|
+
|
|
5
|
+
Gem::Specification.new do |spec|
|
|
6
|
+
spec.name = 'lenex-parser'
|
|
7
|
+
spec.version = Lenex::Parser::VERSION
|
|
8
|
+
spec.authors = ['bigcurl']
|
|
9
|
+
spec.email = ['maintheme@gmail.com']
|
|
10
|
+
|
|
11
|
+
spec.summary = 'Streaming SAX parser for Lenex 3 swim files'
|
|
12
|
+
spec.description = 'Streams Lenex v3 swim data without building a DOM.'
|
|
13
|
+
spec.homepage = 'https://github.com/bigcurl/lenex-parser'
|
|
14
|
+
spec.license = 'MIT'
|
|
15
|
+
|
|
16
|
+
spec.metadata['homepage_uri'] = spec.homepage
|
|
17
|
+
spec.metadata['source_code_uri'] = spec.homepage
|
|
18
|
+
spec.metadata['changelog_uri'] = spec.homepage
|
|
19
|
+
spec.metadata['rubygems_mfa_required'] = 'true'
|
|
20
|
+
|
|
21
|
+
spec.required_ruby_version = '>= 3.1'
|
|
22
|
+
|
|
23
|
+
spec.files = Dir.glob('lib/**/*') +
|
|
24
|
+
Dir.glob('bin/*') +
|
|
25
|
+
%w[README.md LICENSE lenex-parser.gemspec Rakefile .rubocop.yml .yardopts]
|
|
26
|
+
spec.files.uniq!
|
|
27
|
+
spec.files.reject! { |path| File.directory?(path) }
|
|
28
|
+
|
|
29
|
+
spec.bindir = 'bin'
|
|
30
|
+
spec.executables = Dir.children('bin').grep_v(/\A\./)
|
|
31
|
+
spec.require_paths = ['lib']
|
|
32
|
+
|
|
33
|
+
spec.add_runtime_dependency "nokogiri", "~> 1.14"
|
|
34
|
+
spec.add_runtime_dependency "rubyzip", "~> 2.3"
|
|
35
|
+
end
|
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'nokogiri'
|
|
4
|
+
|
|
5
|
+
module Lenex
|
|
6
|
+
class Document
|
|
7
|
+
# Serialises a {Lenex::Document} into Lenex XML.
|
|
8
|
+
class Serializer
|
|
9
|
+
ROOT_ELEMENT = 'LENEX'
|
|
10
|
+
|
|
11
|
+
def initialize(document)
|
|
12
|
+
@document = document
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
# Generates a Lenex XML document from the provided {Lenex::Document}.
|
|
16
|
+
#
|
|
17
|
+
# @return [String] UTF-8 encoded Lenex XML
|
|
18
|
+
def to_xml
|
|
19
|
+
lenex = document.build_lenex
|
|
20
|
+
|
|
21
|
+
Nokogiri::XML::Builder.new(encoding: 'UTF-8') do |xml|
|
|
22
|
+
write_lenex(xml, lenex)
|
|
23
|
+
end.to_xml
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def self.singular_element(container_name)
|
|
27
|
+
return container_name unless container_name.end_with?('S')
|
|
28
|
+
return container_name[0..-2] if container_name.end_with?('SS')
|
|
29
|
+
|
|
30
|
+
container_name.sub(/IES\z/, 'Y').sub(/S\z/, '')
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
private
|
|
34
|
+
|
|
35
|
+
attr_reader :document
|
|
36
|
+
|
|
37
|
+
def write_lenex(xml, lenex)
|
|
38
|
+
attributes = { 'version' => lenex.version }
|
|
39
|
+
revision = lenex.revision
|
|
40
|
+
attributes['revision'] = revision if present?(revision)
|
|
41
|
+
|
|
42
|
+
xml.send(ROOT_ELEMENT, attributes) do
|
|
43
|
+
NodeSerializer.write(xml, 'CONSTRUCTOR', lenex.constructor)
|
|
44
|
+
write_collection(xml, 'MEETS', lenex.meets)
|
|
45
|
+
write_collection(xml, 'RECORDLISTS', lenex.record_lists)
|
|
46
|
+
write_collection(xml, 'TIMESTANDARDLISTS', lenex.time_standard_lists)
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
def write_collection(xml, container_name, collection)
|
|
51
|
+
return if collection.empty?
|
|
52
|
+
|
|
53
|
+
item_name = self.class.singular_element(container_name)
|
|
54
|
+
xml.send(container_name) do
|
|
55
|
+
collection.each do |item|
|
|
56
|
+
NodeSerializer.write(xml, item_name, item)
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
def present?(value)
|
|
62
|
+
!(value.nil? || value.to_s.strip.empty?)
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
# Serialises Lenex object model instances using their attribute maps.
|
|
66
|
+
class NodeSerializer
|
|
67
|
+
def self.write(xml, element_name, object)
|
|
68
|
+
return if object.nil?
|
|
69
|
+
|
|
70
|
+
raise ArgumentError, "Cannot serialise #{object.class} with #{name}" if object.is_a?(Hash)
|
|
71
|
+
|
|
72
|
+
new(xml, element_name, object).write
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
def self.attribute_map_for(klass)
|
|
76
|
+
attribute_cache[klass] ||= build_attribute_map(klass)
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
def self.attribute_cache
|
|
80
|
+
@attribute_cache ||= {}
|
|
81
|
+
end
|
|
82
|
+
private_class_method :attribute_cache
|
|
83
|
+
|
|
84
|
+
def self.build_attribute_map(klass)
|
|
85
|
+
return {} unless klass.const_defined?(:ATTRIBUTES, false)
|
|
86
|
+
|
|
87
|
+
attributes = klass.const_get(:ATTRIBUTES)
|
|
88
|
+
attributes.each_with_object({}) do |(attribute_name, definition), mapping|
|
|
89
|
+
key = definition.is_a?(Hash) ? definition.fetch(:key) : definition
|
|
90
|
+
mapping[key.to_sym] = attribute_name
|
|
91
|
+
end
|
|
92
|
+
end
|
|
93
|
+
private_class_method :build_attribute_map
|
|
94
|
+
|
|
95
|
+
def initialize(xml, element_name, object)
|
|
96
|
+
@xml = xml
|
|
97
|
+
@element_name = element_name
|
|
98
|
+
@object = object
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
def write
|
|
102
|
+
attributes = collect_attributes
|
|
103
|
+
children = collect_children
|
|
104
|
+
|
|
105
|
+
return emit_empty_element(attributes) if children.empty?
|
|
106
|
+
|
|
107
|
+
emit_nested_element(attributes, children)
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
def write_child(name, value)
|
|
111
|
+
return write_array_child(name, value) if value.is_a?(Array)
|
|
112
|
+
|
|
113
|
+
self.class.write(xml, element_name_for(name), value)
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
private
|
|
117
|
+
|
|
118
|
+
attr_reader :xml, :element_name, :object
|
|
119
|
+
|
|
120
|
+
def collect_attributes
|
|
121
|
+
attribute_map = self.class.attribute_map_for(object.class)
|
|
122
|
+
|
|
123
|
+
attribute_map.each_with_object({}) do |(key, xml_name), collected|
|
|
124
|
+
value = fetch_attribute_value(key)
|
|
125
|
+
next if value.nil?
|
|
126
|
+
|
|
127
|
+
collected[xml_name] = value.to_s
|
|
128
|
+
end
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
def fetch_attribute_value(key)
|
|
132
|
+
object.public_send(key)
|
|
133
|
+
rescue NoMethodError
|
|
134
|
+
nil
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
def collect_children
|
|
138
|
+
object.instance_variables.each_with_object({}) do |ivar, collected|
|
|
139
|
+
next if attribute_instance_variables.include?(ivar.to_s)
|
|
140
|
+
|
|
141
|
+
value = object.instance_variable_get(ivar)
|
|
142
|
+
next if skip_child_value?(value)
|
|
143
|
+
|
|
144
|
+
collected[ivar_name(ivar)] = value
|
|
145
|
+
end
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
def attribute_instance_variables
|
|
149
|
+
self.class.attribute_map_for(object.class).keys.map { |key| "@#{key}" }
|
|
150
|
+
end
|
|
151
|
+
|
|
152
|
+
def ivar_name(ivar)
|
|
153
|
+
ivar.to_s.delete_prefix('@').to_sym
|
|
154
|
+
end
|
|
155
|
+
|
|
156
|
+
def element_name_for(name)
|
|
157
|
+
name.to_s.delete('_').upcase
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
def emit_empty_element(attributes)
|
|
161
|
+
xml.send(element_name, attributes)
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
def emit_nested_element(attributes, children)
|
|
165
|
+
xml.send(element_name, attributes) do
|
|
166
|
+
children.each do |name, value|
|
|
167
|
+
write_child(name, value)
|
|
168
|
+
end
|
|
169
|
+
end
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
def write_array_child(name, values)
|
|
173
|
+
return if values.empty?
|
|
174
|
+
|
|
175
|
+
container_name = element_name_for(name)
|
|
176
|
+
item_name = Serializer.singular_element(container_name)
|
|
177
|
+
|
|
178
|
+
xml.send(container_name) do
|
|
179
|
+
values.each do |value|
|
|
180
|
+
self.class.write(xml, item_name, value)
|
|
181
|
+
end
|
|
182
|
+
end
|
|
183
|
+
end
|
|
184
|
+
|
|
185
|
+
def skip_child_value?(value)
|
|
186
|
+
value.nil? || (value.respond_to?(:empty?) && value.empty?)
|
|
187
|
+
end
|
|
188
|
+
end
|
|
189
|
+
end
|
|
190
|
+
end
|
|
191
|
+
end
|
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative 'document/serializer'
|
|
4
|
+
|
|
5
|
+
module Lenex
|
|
6
|
+
# Document models the <LENEX> root element of a Lenex file.
|
|
7
|
+
# It exposes the constructor metadata and the collections that hang off the
|
|
8
|
+
# root node so the parser can populate the object graph incrementally.
|
|
9
|
+
class Document
|
|
10
|
+
# Container for constructor metadata coming from the <LENEX> element.
|
|
11
|
+
#
|
|
12
|
+
# The metadata is stored as a symbol-keyed hash so callers can merge values
|
|
13
|
+
# that originate from XML attributes or nested elements.
|
|
14
|
+
class ConstructorMetadata
|
|
15
|
+
# @return [Hash{Symbol => Object}] symbol-keyed constructor attributes
|
|
16
|
+
attr_reader :attributes
|
|
17
|
+
|
|
18
|
+
# @param attributes [Hash{Symbol,String => Object}] initial metadata values
|
|
19
|
+
def initialize(attributes = {})
|
|
20
|
+
@attributes = {}
|
|
21
|
+
merge!(attributes)
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
# Returns the value stored for the provided key.
|
|
25
|
+
#
|
|
26
|
+
# @param key [Symbol, String]
|
|
27
|
+
# @return [Object, nil]
|
|
28
|
+
def [](key)
|
|
29
|
+
@attributes[key.to_sym]
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
# Stores a value for the provided key.
|
|
33
|
+
#
|
|
34
|
+
# @param key [Symbol, String]
|
|
35
|
+
# @param value [Object]
|
|
36
|
+
# @return [Object] the assigned value
|
|
37
|
+
def []=(key, value)
|
|
38
|
+
@attributes[key.to_sym] = value
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
# Merges the provided hash into the stored attributes.
|
|
42
|
+
#
|
|
43
|
+
# Keys are normalized to symbols to provide consistent access semantics.
|
|
44
|
+
#
|
|
45
|
+
# @param new_attributes [Hash{Symbol,String => Object}]
|
|
46
|
+
# @return [ConstructorMetadata] self
|
|
47
|
+
def merge!(new_attributes)
|
|
48
|
+
new_attributes.each do |key, value|
|
|
49
|
+
@attributes[key.to_sym] = value
|
|
50
|
+
end
|
|
51
|
+
self
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
# @return [Hash{Symbol => Object}] a shallow copy of the stored attributes
|
|
55
|
+
def to_h
|
|
56
|
+
@attributes.dup
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
# @!attribute [r] constructor_metadata
|
|
61
|
+
# @return [ConstructorMetadata] constructor metadata captured from the LENEX root
|
|
62
|
+
# @!attribute [r] meets
|
|
63
|
+
# @return [Array<Object>] collection of parsed meets associated with the document
|
|
64
|
+
# @!attribute [r] record_lists
|
|
65
|
+
# @return [Array<Object>] record lists extracted from the document
|
|
66
|
+
# @!attribute [r] time_standard_lists
|
|
67
|
+
# @return [Array<Object>] time standard lists associated with the document
|
|
68
|
+
attr_reader :constructor_metadata, :meets, :record_lists, :time_standard_lists
|
|
69
|
+
attr_accessor :version, :revision
|
|
70
|
+
attr_writer :constructor
|
|
71
|
+
|
|
72
|
+
# @param constructor [ConstructorMetadata]
|
|
73
|
+
# @param collections [Hash{Symbol => Array<Object>}] pre-populated associations keyed by
|
|
74
|
+
# :meets, :record_lists, and :time_standard_lists
|
|
75
|
+
def initialize(constructor: ConstructorMetadata.new,
|
|
76
|
+
collections: {},
|
|
77
|
+
version: nil,
|
|
78
|
+
revision: nil)
|
|
79
|
+
@constructor_metadata = constructor
|
|
80
|
+
@constructor = nil
|
|
81
|
+
@meets = Array(collections.fetch(:meets, []))
|
|
82
|
+
@record_lists = Array(collections.fetch(:record_lists, []))
|
|
83
|
+
@time_standard_lists = Array(collections.fetch(:time_standard_lists, []))
|
|
84
|
+
@version = version
|
|
85
|
+
@revision = revision
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
# @return [ConstructorMetadata, Lenex::Parser::Objects::Constructor]
|
|
89
|
+
def constructor
|
|
90
|
+
@constructor || @constructor_metadata
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
# Adds a meet to the document.
|
|
94
|
+
#
|
|
95
|
+
# @param meet [Object]
|
|
96
|
+
# @return [Object] the provided meet
|
|
97
|
+
def add_meet(meet)
|
|
98
|
+
@meets << meet
|
|
99
|
+
meet
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
# Adds a record list to the document.
|
|
103
|
+
#
|
|
104
|
+
# @param record_list [Object]
|
|
105
|
+
# @return [Object] the provided record list
|
|
106
|
+
def add_record_list(record_list)
|
|
107
|
+
@record_lists << record_list
|
|
108
|
+
record_list
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
# Adds a time standard list to the document.
|
|
112
|
+
#
|
|
113
|
+
# @param time_standard_list [Object]
|
|
114
|
+
# @return [Object] the provided time standard list
|
|
115
|
+
def add_time_standard_list(time_standard_list)
|
|
116
|
+
@time_standard_lists << time_standard_list
|
|
117
|
+
time_standard_list
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
# Builds a Lenex object from the accumulated SAX state.
|
|
121
|
+
#
|
|
122
|
+
# @return [Lenex::Parser::Objects::Lenex]
|
|
123
|
+
def build_lenex
|
|
124
|
+
ensure_constructor_present!
|
|
125
|
+
|
|
126
|
+
Lenex::Parser::Objects::Lenex.new(
|
|
127
|
+
version: resolved_version,
|
|
128
|
+
revision: @revision,
|
|
129
|
+
constructor: @constructor,
|
|
130
|
+
collections: collections_payload
|
|
131
|
+
)
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
# Serialises the document into Lenex XML.
|
|
135
|
+
#
|
|
136
|
+
# @return [String]
|
|
137
|
+
def to_xml
|
|
138
|
+
Serializer.new(self).to_xml
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
private
|
|
142
|
+
|
|
143
|
+
def ensure_constructor_present!
|
|
144
|
+
return if @constructor
|
|
145
|
+
|
|
146
|
+
raise Lenex::Parser::ParseError, 'CONSTRUCTOR element is required'
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
def resolved_version
|
|
150
|
+
return @version unless @version.nil? || @version.strip.empty?
|
|
151
|
+
|
|
152
|
+
raise Lenex::Parser::ParseError, 'LENEX version attribute is required'
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
def collections_payload
|
|
156
|
+
{
|
|
157
|
+
meets: @meets,
|
|
158
|
+
record_lists: @record_lists,
|
|
159
|
+
time_standard_lists: @time_standard_lists
|
|
160
|
+
}
|
|
161
|
+
end
|
|
162
|
+
end
|
|
163
|
+
end
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Lenex
|
|
4
|
+
module Parser
|
|
5
|
+
module Objects
|
|
6
|
+
# Value object representing an AGEDATE element.
|
|
7
|
+
class AgeDate
|
|
8
|
+
ATTRIBUTES = {
|
|
9
|
+
'type' => { key: :type, required: true },
|
|
10
|
+
'value' => { key: :value, required: false }
|
|
11
|
+
}.freeze
|
|
12
|
+
|
|
13
|
+
ATTRIBUTE_KEYS = ATTRIBUTES.values.map { |definition| definition[:key] }.freeze
|
|
14
|
+
private_constant :ATTRIBUTE_KEYS
|
|
15
|
+
|
|
16
|
+
ATTRIBUTE_KEYS.each { |attribute| attr_reader attribute }
|
|
17
|
+
|
|
18
|
+
def initialize(**attributes)
|
|
19
|
+
ATTRIBUTES.each_value do |definition|
|
|
20
|
+
key = definition[:key]
|
|
21
|
+
instance_variable_set(:"@#{key}", attributes[key])
|
|
22
|
+
end
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def self.from_xml(element)
|
|
26
|
+
raise ::Lenex::Parser::ParseError, 'AGEDATE element is required' unless element
|
|
27
|
+
|
|
28
|
+
attributes = extract_attributes(element)
|
|
29
|
+
|
|
30
|
+
new(**attributes)
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def self.extract_attributes(element)
|
|
34
|
+
ATTRIBUTES.each_with_object({}) do |(attribute_name, definition), collected|
|
|
35
|
+
value = element.attribute(attribute_name)&.value
|
|
36
|
+
ensure_required_attribute!(attribute_name, definition, value)
|
|
37
|
+
collected[definition[:key]] = value if value
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
private_class_method :extract_attributes
|
|
41
|
+
|
|
42
|
+
def self.ensure_required_attribute!(attribute_name, definition, value)
|
|
43
|
+
return unless definition[:required]
|
|
44
|
+
return unless value.nil? || value.strip.empty?
|
|
45
|
+
|
|
46
|
+
message = "AGEDATE #{attribute_name} attribute is required"
|
|
47
|
+
raise ::Lenex::Parser::ParseError, message
|
|
48
|
+
end
|
|
49
|
+
private_class_method :ensure_required_attribute!
|
|
50
|
+
end
|
|
51
|
+
end
|
|
52
|
+
end
|
|
53
|
+
end
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Lenex
|
|
4
|
+
module Parser
|
|
5
|
+
module Objects
|
|
6
|
+
# Value object representing an AGEGROUP element.
|
|
7
|
+
class AgeGroup
|
|
8
|
+
ATTRIBUTES = {
|
|
9
|
+
'agegroupid' => { key: :age_group_id, required: true },
|
|
10
|
+
'agemax' => { key: :age_max, required: true },
|
|
11
|
+
'agemin' => { key: :age_min, required: true },
|
|
12
|
+
'calculate' => { key: :calculate, required: false },
|
|
13
|
+
'gender' => { key: :gender, required: false },
|
|
14
|
+
'handicap' => { key: :handicap, required: false },
|
|
15
|
+
'levelmax' => { key: :level_max, required: false },
|
|
16
|
+
'levelmin' => { key: :level_min, required: false },
|
|
17
|
+
'levels' => { key: :levels, required: false },
|
|
18
|
+
'name' => { key: :name, required: false }
|
|
19
|
+
}.freeze
|
|
20
|
+
|
|
21
|
+
ATTRIBUTE_KEYS = ATTRIBUTES.values.map { |definition| definition[:key] }.freeze
|
|
22
|
+
private_constant :ATTRIBUTE_KEYS
|
|
23
|
+
|
|
24
|
+
ATTRIBUTE_KEYS.each { |attribute| attr_reader attribute }
|
|
25
|
+
attr_reader :rankings
|
|
26
|
+
|
|
27
|
+
def initialize(rankings: [], **attributes)
|
|
28
|
+
ATTRIBUTES.each_value do |definition|
|
|
29
|
+
key = definition[:key]
|
|
30
|
+
instance_variable_set(:"@#{key}", attributes[key])
|
|
31
|
+
end
|
|
32
|
+
@rankings = Array(rankings)
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
def self.from_xml(element)
|
|
36
|
+
raise ::Lenex::Parser::ParseError, 'AGEGROUP element is required' unless element
|
|
37
|
+
|
|
38
|
+
attributes = extract_attributes(element)
|
|
39
|
+
|
|
40
|
+
rankings = extract_rankings(element.at_xpath('RANKINGS'))
|
|
41
|
+
|
|
42
|
+
new(**attributes, rankings:)
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
def self.extract_attributes(element)
|
|
46
|
+
ATTRIBUTES.each_with_object({}) do |(attribute_name, definition), collected|
|
|
47
|
+
value = element.attribute(attribute_name)&.value
|
|
48
|
+
ensure_required_attribute!(element, attribute_name, definition, value)
|
|
49
|
+
collected[definition[:key]] = value if value
|
|
50
|
+
end
|
|
51
|
+
end
|
|
52
|
+
private_class_method :extract_attributes
|
|
53
|
+
|
|
54
|
+
def self.ensure_required_attribute!(element, attribute_name, definition, value)
|
|
55
|
+
return unless definition[:required]
|
|
56
|
+
return if optional_age_group_id_without_reference?(element, attribute_name)
|
|
57
|
+
return unless value.nil? || value.strip.empty?
|
|
58
|
+
|
|
59
|
+
message = "AGEGROUP #{attribute_name} attribute is required"
|
|
60
|
+
raise ::Lenex::Parser::ParseError, message
|
|
61
|
+
end
|
|
62
|
+
private_class_method :ensure_required_attribute!
|
|
63
|
+
|
|
64
|
+
def self.optional_age_group_id_without_reference?(element, attribute_name)
|
|
65
|
+
return false unless attribute_name == 'agegroupid'
|
|
66
|
+
|
|
67
|
+
parent = element.respond_to?(:parent) ? element.parent : nil
|
|
68
|
+
ALLOWED_PARENTS_WITHOUT_ID.include?(parent&.name)
|
|
69
|
+
end
|
|
70
|
+
private_class_method :optional_age_group_id_without_reference?
|
|
71
|
+
|
|
72
|
+
ALLOWED_PARENTS_WITHOUT_ID = %w[TIMESTANDARDLIST RECORDLIST].freeze
|
|
73
|
+
private_constant :ALLOWED_PARENTS_WITHOUT_ID
|
|
74
|
+
|
|
75
|
+
def self.extract_rankings(collection_element)
|
|
76
|
+
return [] unless collection_element
|
|
77
|
+
|
|
78
|
+
collection_element.xpath('RANKING').map do |ranking_element|
|
|
79
|
+
Ranking.from_xml(ranking_element)
|
|
80
|
+
end
|
|
81
|
+
end
|
|
82
|
+
private_class_method :extract_rankings
|
|
83
|
+
end
|
|
84
|
+
end
|
|
85
|
+
end
|
|
86
|
+
end
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Lenex
|
|
4
|
+
module Parser
|
|
5
|
+
module Objects
|
|
6
|
+
# Value object representing an ATHLETE element.
|
|
7
|
+
class Athlete
|
|
8
|
+
ATTRIBUTES = {
|
|
9
|
+
'athleteid' => { key: :athlete_id, required: true },
|
|
10
|
+
'birthdate' => { key: :birthdate, required: true },
|
|
11
|
+
'firstname' => { key: :first_name, required: true },
|
|
12
|
+
'firstname.en' => { key: :first_name_en, required: false },
|
|
13
|
+
'gender' => { key: :gender, required: true },
|
|
14
|
+
'lastname' => { key: :last_name, required: true },
|
|
15
|
+
'lastname.en' => { key: :last_name_en, required: false },
|
|
16
|
+
'level' => { key: :level, required: false },
|
|
17
|
+
'license' => { key: :license, required: false },
|
|
18
|
+
'license_dbs' => { key: :license_dbs, required: false },
|
|
19
|
+
'license_dsv' => { key: :license_dsv, required: false },
|
|
20
|
+
'license_ipc' => { key: :license_ipc, required: false },
|
|
21
|
+
'nameprefix' => { key: :name_prefix, required: false },
|
|
22
|
+
'nation' => { key: :nation, required: false },
|
|
23
|
+
'passport' => { key: :passport, required: false },
|
|
24
|
+
'status' => { key: :status, required: false },
|
|
25
|
+
'swrid' => { key: :swrid, required: false }
|
|
26
|
+
}.freeze
|
|
27
|
+
|
|
28
|
+
ATTRIBUTE_KEYS = ATTRIBUTES.values.map { |definition| definition[:key] }.freeze
|
|
29
|
+
private_constant :ATTRIBUTE_KEYS
|
|
30
|
+
|
|
31
|
+
ATTRIBUTE_KEYS.each { |attribute| attr_reader attribute }
|
|
32
|
+
attr_reader :handicap, :entries, :results
|
|
33
|
+
|
|
34
|
+
def initialize(handicap: nil, entries: [], results: [], **attributes)
|
|
35
|
+
ATTRIBUTES.each_value do |definition|
|
|
36
|
+
key = definition[:key]
|
|
37
|
+
instance_variable_set(:"@#{key}", attributes[key])
|
|
38
|
+
end
|
|
39
|
+
@handicap = handicap
|
|
40
|
+
@entries = Array(entries)
|
|
41
|
+
@results = Array(results)
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
def self.from_xml(element)
|
|
45
|
+
raise ::Lenex::Parser::ParseError, 'ATHLETE element is required' unless element
|
|
46
|
+
|
|
47
|
+
attributes = extract_attributes(element)
|
|
48
|
+
handicap = Handicap.from_xml(element.at_xpath('HANDICAP'))
|
|
49
|
+
entries = extract_entries(element.at_xpath('ENTRIES'))
|
|
50
|
+
results = extract_results(element.at_xpath('RESULTS'))
|
|
51
|
+
|
|
52
|
+
new(**attributes, handicap:, entries:, results:)
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
def self.extract_attributes(element)
|
|
56
|
+
ATTRIBUTES.each_with_object({}) do |(attribute_name, definition), collected|
|
|
57
|
+
value = element.attribute(attribute_name)&.value
|
|
58
|
+
ensure_required_attribute!(attribute_name, definition, value)
|
|
59
|
+
collected[definition[:key]] = value if value
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
private_class_method :extract_attributes
|
|
63
|
+
|
|
64
|
+
def self.ensure_required_attribute!(attribute_name, definition, value)
|
|
65
|
+
return unless definition[:required]
|
|
66
|
+
return unless value.nil? || value.strip.empty?
|
|
67
|
+
|
|
68
|
+
message = "ATHLETE #{attribute_name} attribute is required"
|
|
69
|
+
raise ::Lenex::Parser::ParseError, message
|
|
70
|
+
end
|
|
71
|
+
private_class_method :ensure_required_attribute!
|
|
72
|
+
|
|
73
|
+
def self.extract_entries(collection_element)
|
|
74
|
+
return [] unless collection_element
|
|
75
|
+
|
|
76
|
+
collection_element.xpath('ENTRY').map do |entry_element|
|
|
77
|
+
Entry.from_xml(entry_element)
|
|
78
|
+
end
|
|
79
|
+
end
|
|
80
|
+
private_class_method :extract_entries
|
|
81
|
+
|
|
82
|
+
def self.extract_results(collection_element)
|
|
83
|
+
return [] unless collection_element
|
|
84
|
+
|
|
85
|
+
collection_element.xpath('RESULT').map do |result_element|
|
|
86
|
+
Result.from_xml(result_element)
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
private_class_method :extract_results
|
|
90
|
+
end
|
|
91
|
+
end
|
|
92
|
+
end
|
|
93
|
+
end
|