rail_feeds 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +23 -0
- data/.rspec +3 -0
- data/.rubocop.yml +31 -0
- data/.travis.yml +26 -0
- data/CHANGELOG.md +3 -0
- data/Gemfile +6 -0
- data/Guardfile +25 -0
- data/LICENSE.md +32 -0
- data/README.md +77 -0
- data/Rakefile +3 -0
- data/doc/guides/Logging.md +13 -0
- data/doc/guides/Network Rail/CORPUS.md +34 -0
- data/doc/guides/Network Rail/SMART.md +39 -0
- data/doc/guides/Network Rail/Schedule.md +138 -0
- data/file +0 -0
- data/lib/rail_feeds/credentials.rb +45 -0
- data/lib/rail_feeds/logging.rb +51 -0
- data/lib/rail_feeds/network_rail/corpus.rb +77 -0
- data/lib/rail_feeds/network_rail/credentials.rb +22 -0
- data/lib/rail_feeds/network_rail/http_client.rb +57 -0
- data/lib/rail_feeds/network_rail/schedule/association.rb +208 -0
- data/lib/rail_feeds/network_rail/schedule/data.rb +215 -0
- data/lib/rail_feeds/network_rail/schedule/days.rb +95 -0
- data/lib/rail_feeds/network_rail/schedule/fetcher.rb +193 -0
- data/lib/rail_feeds/network_rail/schedule/header/cif.rb +102 -0
- data/lib/rail_feeds/network_rail/schedule/header/json.rb +79 -0
- data/lib/rail_feeds/network_rail/schedule/header.rb +22 -0
- data/lib/rail_feeds/network_rail/schedule/parser/cif.rb +141 -0
- data/lib/rail_feeds/network_rail/schedule/parser/json.rb +87 -0
- data/lib/rail_feeds/network_rail/schedule/parser.rb +108 -0
- data/lib/rail_feeds/network_rail/schedule/stp_indicator.rb +72 -0
- data/lib/rail_feeds/network_rail/schedule/tiploc.rb +100 -0
- data/lib/rail_feeds/network_rail/schedule/train_schedule/change_en_route.rb +158 -0
- data/lib/rail_feeds/network_rail/schedule/train_schedule/location/intermediate.rb +119 -0
- data/lib/rail_feeds/network_rail/schedule/train_schedule/location/origin.rb +91 -0
- data/lib/rail_feeds/network_rail/schedule/train_schedule/location/terminating.rb +72 -0
- data/lib/rail_feeds/network_rail/schedule/train_schedule/location.rb +76 -0
- data/lib/rail_feeds/network_rail/schedule/train_schedule.rb +392 -0
- data/lib/rail_feeds/network_rail/schedule.rb +33 -0
- data/lib/rail_feeds/network_rail/smart.rb +186 -0
- data/lib/rail_feeds/network_rail/stomp_client.rb +77 -0
- data/lib/rail_feeds/network_rail.rb +16 -0
- data/lib/rail_feeds/version.rb +14 -0
- data/lib/rail_feeds.rb +10 -0
- data/rail_feeds.gemspec +32 -0
- data/spec/fixtures/network_rail/schedule/data/full.yaml +60 -0
- data/spec/fixtures/network_rail/schedule/data/starting.yaml +131 -0
- data/spec/fixtures/network_rail/schedule/data/update-gap.yaml +10 -0
- data/spec/fixtures/network_rail/schedule/data/update-next.yaml +13 -0
- data/spec/fixtures/network_rail/schedule/data/update-old.yaml +10 -0
- data/spec/fixtures/network_rail/schedule/data/update.yaml +112 -0
- data/spec/fixtures/network_rail/schedule/parser/train_create.json +1 -0
- data/spec/fixtures/network_rail/schedule/parser/train_delete.json +1 -0
- data/spec/fixtures/network_rail/schedule/train_schedule/json-data.yaml +67 -0
- data/spec/rail_feeds/credentials_spec.rb +46 -0
- data/spec/rail_feeds/logging_spec.rb +81 -0
- data/spec/rail_feeds/network_rail/corpus_spec.rb +92 -0
- data/spec/rail_feeds/network_rail/credentials_spec.rb +22 -0
- data/spec/rail_feeds/network_rail/http_client_spec.rb +88 -0
- data/spec/rail_feeds/network_rail/schedule/association_spec.rb +205 -0
- data/spec/rail_feeds/network_rail/schedule/data_spec.rb +219 -0
- data/spec/rail_feeds/network_rail/schedule/days_shared.rb +99 -0
- data/spec/rail_feeds/network_rail/schedule/days_spec.rb +4 -0
- data/spec/rail_feeds/network_rail/schedule/fetcher_spec.rb +228 -0
- data/spec/rail_feeds/network_rail/schedule/header/cif_spec.rb +72 -0
- data/spec/rail_feeds/network_rail/schedule/header/json_spec.rb +51 -0
- data/spec/rail_feeds/network_rail/schedule/header_spec.rb +19 -0
- data/spec/rail_feeds/network_rail/schedule/parser/cif_spec.rb +197 -0
- data/spec/rail_feeds/network_rail/schedule/parser/json_spec.rb +172 -0
- data/spec/rail_feeds/network_rail/schedule/parser_spec.rb +34 -0
- data/spec/rail_feeds/network_rail/schedule/stp_indicator_shared.rb +49 -0
- data/spec/rail_feeds/network_rail/schedule/stp_indicator_spec.rb +4 -0
- data/spec/rail_feeds/network_rail/schedule/tiploc_spec.rb +77 -0
- data/spec/rail_feeds/network_rail/schedule/train_schedule/change_en_route_spec.rb +121 -0
- data/spec/rail_feeds/network_rail/schedule/train_schedule/location/intermediate_spec.rb +95 -0
- data/spec/rail_feeds/network_rail/schedule/train_schedule/location/origin_spec.rb +87 -0
- data/spec/rail_feeds/network_rail/schedule/train_schedule/location/terminating_spec.rb +81 -0
- data/spec/rail_feeds/network_rail/schedule/train_schedule/location_spec.rb +35 -0
- data/spec/rail_feeds/network_rail/schedule/train_schedule_spec.rb +284 -0
- data/spec/rail_feeds/network_rail/schedule_spec.rb +41 -0
- data/spec/rail_feeds/network_rail/smart_spec.rb +194 -0
- data/spec/rail_feeds/network_rail/stomp_client_spec.rb +151 -0
- data/spec/rail_feeds/network_rail_spec.rb +7 -0
- data/spec/rail_feeds_spec.rb +11 -0
- data/spec/spec_helper.rb +47 -0
- metadata +282 -0
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'net/http'
|
|
4
|
+
|
|
5
|
+
module RailFeeds
|
|
6
|
+
module NetworkRail
|
|
7
|
+
module Schedule
|
|
8
|
+
# A class for fetching the schedule data files.
|
|
9
|
+
class Fetcher
|
|
10
|
+
include Logging
|
|
11
|
+
|
|
12
|
+
# Initialize a new schedule
|
|
13
|
+
# @param [RailFeeds::NetworkRail::Credentials] credentials
|
|
14
|
+
# The credentials for connecting to the feed.
|
|
15
|
+
# @param [Logger, nil] logger
|
|
16
|
+
# The logger for outputting events, if nil the global logger will be used.
|
|
17
|
+
def initialize(credentials: Credentials, logger: nil)
|
|
18
|
+
@credentials = credentials
|
|
19
|
+
self.logger = logger unless logger.nil?
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
# Download the full schedule.
|
|
23
|
+
# @param [:json, :cif] format
|
|
24
|
+
# The format to download the schedule in.
|
|
25
|
+
# @param [String] file
|
|
26
|
+
# The path to the file to save the .json.gz / .cif.gz download in.
|
|
27
|
+
def download_all_full(format, file)
|
|
28
|
+
download 'ALL', 'full', format, file
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
# Download the daily update to the full schedule.
|
|
32
|
+
# @param [String, #to_s] day
|
|
33
|
+
# The day to get the update schedule for ("mon", "tue", "wed", ...).
|
|
34
|
+
# Defaults to the current day.
|
|
35
|
+
# @param [:json, :cif] format
|
|
36
|
+
# The format to fetch the schedule in.
|
|
37
|
+
# @param [String] file
|
|
38
|
+
# The path to the file to save the .json.gz / .cif.gz download in.
|
|
39
|
+
def download_all_update(day, format, file)
|
|
40
|
+
download 'ALL', day, format, file
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
# Fetch the freight schedule.
|
|
44
|
+
# @param [String] file
|
|
45
|
+
# The path to the file to save the .json.gz download in.
|
|
46
|
+
def download_freight_full(file)
|
|
47
|
+
download 'FREIGHT', 'full', :json, file
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
# Fetch the daily update to the freight schedule.
|
|
51
|
+
# @param [String, #to_s] day
|
|
52
|
+
# The day to get the update schedule for ("mon", "tue", "wed", ...).
|
|
53
|
+
# Defaults to the current day.
|
|
54
|
+
# @param [String] file
|
|
55
|
+
# The path to the file to save the .json.gz download in.
|
|
56
|
+
def download_freight_update(day, file)
|
|
57
|
+
download 'FREIGHT', day, :json, file
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
# Fetch the schedule for a TOC.
|
|
61
|
+
# @param [String, #to_s, nil] toc
|
|
62
|
+
# The TOC to get the schedule for.
|
|
63
|
+
# @param [String] file
|
|
64
|
+
# The path to the file to save the .json.gz download in.
|
|
65
|
+
def download_toc_full(toc, file)
|
|
66
|
+
download toc, 'full', :json, file
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
# Fetch the daily update for a TOC.
|
|
70
|
+
# @param [String, #to_s, nil] toc
|
|
71
|
+
# The TOC to get the schedule for.
|
|
72
|
+
# @param [String, #to_s] day
|
|
73
|
+
# The day to get the update schedule for ("mon", "tue", "wed", ...).
|
|
74
|
+
# Defaults to the current day.
|
|
75
|
+
# @param [String] file
|
|
76
|
+
# The path to the file to save the .json.gz download in.
|
|
77
|
+
def download_toc_update(toc, day, file)
|
|
78
|
+
download toc, day, :json, file
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
# Fetch the full schedule.
|
|
82
|
+
# @param [:json, :cif] format
|
|
83
|
+
# The format to fetch the schedule in.
|
|
84
|
+
# @yield [file] Once the block has run the temp file will be deleted.
|
|
85
|
+
# @yieldparam [Zlib::GzipReader] file The unzippable content of the file.
|
|
86
|
+
def fetch_all_full(format, &block)
|
|
87
|
+
fetch 'ALL', 'full', format, &block
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
# Fetch the daily update to the full schedule.
|
|
91
|
+
# @param [String, #to_s] day
|
|
92
|
+
# The day to get the update schedule for ("mon", "tue", "wed", ...).
|
|
93
|
+
# Defaults to the current day.
|
|
94
|
+
# @param [:json, :cif] format
|
|
95
|
+
# The format to fetch the schedule in.
|
|
96
|
+
# @yield [file] Once the block has run the temp file will be deleted.
|
|
97
|
+
# @yieldparam [Zlib::GzipReader] file The unzippable content of the file.
|
|
98
|
+
def fetch_all_update(day, format, &block)
|
|
99
|
+
fetch 'ALL', day, format, &block
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
# Fetch the freight schedule.
|
|
103
|
+
# @yield [file] Once the block has run the temp file will be deleted.
|
|
104
|
+
# @yieldparam [Zlib::GzipReader] file The unzippable content of the file.
|
|
105
|
+
def fetch_freight_full(&block)
|
|
106
|
+
fetch 'FREIGHT', 'full', :json, &block
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
# Fetch the daily update to the freight schedule.
|
|
110
|
+
# @param [String, #to_s] day
|
|
111
|
+
# The day to get the update schedule for ("mon", "tue", "wed", ...).
|
|
112
|
+
# Defaults to the current day.
|
|
113
|
+
# @yield [file] Once the block has run the temp file will be deleted.
|
|
114
|
+
# @yieldparam [TempFile] file The unzippable content of the file.
|
|
115
|
+
def fetch_freight_update(day, &block)
|
|
116
|
+
fetch 'FREIGHT', day, :json, &block
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
# Fetch the schedule for a TOC.
|
|
120
|
+
# @param [String, #to_s, nil] toc
|
|
121
|
+
# The TOC to get the schedule for.
|
|
122
|
+
# @yield [file] Once the block has run the temp file will be deleted.
|
|
123
|
+
# @yieldparam [Zlib::GzipReader] file The unzippable content of the file.
|
|
124
|
+
def fetch_toc_full(toc, &block)
|
|
125
|
+
fetch toc, 'full', :json, &block
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
# Fetch the daily update for a TOC.
|
|
129
|
+
# @param [String, #to_s, nil] toc
|
|
130
|
+
# The TOC to get the schedule for.
|
|
131
|
+
# @param [String, #to_s] day
|
|
132
|
+
# The day to get the update schedule for ("mon", "tue", "wed", ...).
|
|
133
|
+
# Defaults to the current day.
|
|
134
|
+
# @yield [file] Once the block has run the temp file will be deleted.
|
|
135
|
+
# @yieldparam [Zlib::GzipReader] file The unzippable content of the file.
|
|
136
|
+
def fetch_toc_update(toc, day, &block)
|
|
137
|
+
fetch toc, day, :json, &block
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
private
|
|
141
|
+
|
|
142
|
+
# Fetch a schedule.
|
|
143
|
+
def fetch(toc, day, format, &block)
|
|
144
|
+
path = path_for toc, day, format
|
|
145
|
+
client = HTTPClient.new(credentials: @credentials, logger: logger)
|
|
146
|
+
client.fetch_unzipped(path, &block)
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
# Download a schedule.
|
|
150
|
+
def download(toc, day, format, file)
|
|
151
|
+
path = path_for toc, day, format
|
|
152
|
+
client = HTTPClient.new(credentials: @credentials, logger: logger)
|
|
153
|
+
client.download(path, file)
|
|
154
|
+
end
|
|
155
|
+
|
|
156
|
+
# rubocop:disable Metrics/CyclomaticComplexity
|
|
157
|
+
# rubocop:disable Metrics/MethodLength
|
|
158
|
+
# rubocop:disable Metrics/PerceivedComplexity
|
|
159
|
+
# Get the path for a schedule
|
|
160
|
+
def path_for(toc, day, format)
|
|
161
|
+
toc = "#{toc}_TOC" unless %w[ALL FREIGHT].include?(toc)
|
|
162
|
+
|
|
163
|
+
if format.eql?(:cif)
|
|
164
|
+
unless toc.eql?('ALL')
|
|
165
|
+
fail ArgumentError, 'CIF format is only available for the all schedule'
|
|
166
|
+
end
|
|
167
|
+
else
|
|
168
|
+
unless format.eql?(:json)
|
|
169
|
+
fail ArgumentError, 'format must be either :json or :cif'
|
|
170
|
+
end
|
|
171
|
+
end
|
|
172
|
+
|
|
173
|
+
if day.eql?('full')
|
|
174
|
+
day = 'toc-full'
|
|
175
|
+
type = "CIF_#{toc}_FULL_DAILY"
|
|
176
|
+
else
|
|
177
|
+
unless %w[mon tue wed thu fri sat sun].include?(day)
|
|
178
|
+
fail ArgumentError, 'day is invalid'
|
|
179
|
+
end
|
|
180
|
+
day = "toc-update-#{day}"
|
|
181
|
+
type = "CIF_#{toc}_UPDATE_DAILY"
|
|
182
|
+
end
|
|
183
|
+
|
|
184
|
+
path = "ntrod/CifFileAuthenticate?type=#{type}&day=#{day}"
|
|
185
|
+
format.eql?(:cif) ? "#{path}.CIF.gz" : path
|
|
186
|
+
end
|
|
187
|
+
# rubocop:enable Metrics/CyclomaticComplexity
|
|
188
|
+
# rubocop:enable Metrics/MethodLength
|
|
189
|
+
# rubocop:enable Metrics/PerceivedComplexity
|
|
190
|
+
end
|
|
191
|
+
end
|
|
192
|
+
end
|
|
193
|
+
end
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module RailFeeds
|
|
4
|
+
module NetworkRail
|
|
5
|
+
module Schedule
|
|
6
|
+
module Header
|
|
7
|
+
# A class to hole the information from the header row of a cif file
|
|
8
|
+
class CIF
|
|
9
|
+
# @!attribute [rw] file_identity
|
|
10
|
+
# @!attribute [rw] extracted_at
|
|
11
|
+
# @return [Time] When the BTD extract happened.
|
|
12
|
+
# @!attribute [rw] current_file_reference
|
|
13
|
+
# @return [String] Unique reference for the current file.
|
|
14
|
+
# @!attribute [rw] previous_file_reference
|
|
15
|
+
# @return [String, nil] Unique reference for the previous file
|
|
16
|
+
# (the one to apply the update to).
|
|
17
|
+
# @!attribute [rw] update_indicator
|
|
18
|
+
# @return [String] 'F' for a full extract, 'U' for an update extract.
|
|
19
|
+
# @!attribute [rw] version
|
|
20
|
+
# @return [String] The version of the software that generated the CIF file.
|
|
21
|
+
# @!attribute [rw] start_date
|
|
22
|
+
# @return [Date]
|
|
23
|
+
# @!attribute [rw] end_date
|
|
24
|
+
# @return [Date]
|
|
25
|
+
|
|
26
|
+
attr_accessor :file_identity, :extracted_at,
|
|
27
|
+
:current_file_reference, :previous_file_reference,
|
|
28
|
+
:update_indicator, :version, :start_date, :end_date
|
|
29
|
+
|
|
30
|
+
def initialize(**attributes)
|
|
31
|
+
attributes.each do |attribute, value|
|
|
32
|
+
send "#{attribute}=", value
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
# rubocop:disable Metrics/AbcSize
|
|
37
|
+
# Initialize a new header from a CIF file line
|
|
38
|
+
def self.from_cif(line)
|
|
39
|
+
fail ArgumentError, "Invalid line:\n#{line}" unless line[0..1].eql?('HD')
|
|
40
|
+
|
|
41
|
+
new(
|
|
42
|
+
file_identity: line[2..21].strip,
|
|
43
|
+
extracted_at: Time.strptime(line[22..31] + 'UTC', '%d%m%y%H%M%Z'),
|
|
44
|
+
current_file_reference: line[32..38].strip,
|
|
45
|
+
previous_file_reference: line[39..45].strip,
|
|
46
|
+
update_indicator: line[46].strip,
|
|
47
|
+
version: line[47].strip,
|
|
48
|
+
start_date: Date.strptime(line[48..53], '%d%m%y'),
|
|
49
|
+
end_date: Date.strptime(line[54..59], '%d%m%y')
|
|
50
|
+
)
|
|
51
|
+
end
|
|
52
|
+
# rubocop:enable Metrics/AbcSize
|
|
53
|
+
|
|
54
|
+
# Test if this is a header for an update file
|
|
55
|
+
def update?
|
|
56
|
+
update_indicator.eql?('U')
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
# Test if this is a header for a full file
|
|
60
|
+
def full?
|
|
61
|
+
update_indicator.eql?('F')
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
def ==(other)
|
|
65
|
+
hash == other&.hash
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
def hash
|
|
69
|
+
current_file_reference&.dup
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
# rubocop:disable Metrics/AbcSize
|
|
73
|
+
def to_cif
|
|
74
|
+
format('%-80.80s', [
|
|
75
|
+
'HD',
|
|
76
|
+
format('%-20.20s', file_identity),
|
|
77
|
+
# rubocop:disable Style/FormatStringToken
|
|
78
|
+
format('%-10.10s', extracted_at&.strftime('%d%m%y%H%M')),
|
|
79
|
+
# rubocop:enable Style/FormatStringToken
|
|
80
|
+
format('%-7.7s', current_file_reference),
|
|
81
|
+
format('%-7.7s', previous_file_reference),
|
|
82
|
+
format('%-1.1s', update_indicator),
|
|
83
|
+
format('%-1.1s', version),
|
|
84
|
+
# rubocop:disable Style/FormatStringToken
|
|
85
|
+
format('%-6.6s', start_date&.strftime('%d%m%y')),
|
|
86
|
+
format('%-6.6s', end_date&.strftime('%d%m%y'))
|
|
87
|
+
# rubocop:enable Style/FormatStringToken
|
|
88
|
+
].join) + "\n"
|
|
89
|
+
end
|
|
90
|
+
# rubocop:enable Metrics/AbcSize
|
|
91
|
+
|
|
92
|
+
def to_s
|
|
93
|
+
"File #{file_identity.inspect} (version #{version}) " \
|
|
94
|
+
"at #{extracted_at.strftime('%Y-%m-%d %H:%M')}. " \
|
|
95
|
+
"#{full? ? 'A full' : 'An update'} extract " \
|
|
96
|
+
"for #{start_date} to #{end_date}."
|
|
97
|
+
end
|
|
98
|
+
end
|
|
99
|
+
end
|
|
100
|
+
end
|
|
101
|
+
end
|
|
102
|
+
end
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module RailFeeds
|
|
4
|
+
module NetworkRail
|
|
5
|
+
module Schedule
|
|
6
|
+
module Header
|
|
7
|
+
# A class to hole the information from the header row of a json file
|
|
8
|
+
class JSON
|
|
9
|
+
# @!attribute [rw] extracted_at
|
|
10
|
+
# @return [Time] When the BTD extract happened.
|
|
11
|
+
# @!attribute [rw] sequence
|
|
12
|
+
# @return [Integer] Where this file appears in the sequence of extracts.
|
|
13
|
+
# (Appears to be days since 2012-06-13)
|
|
14
|
+
# @!attribute [r] start_date
|
|
15
|
+
# @return [Date] Infered from sequence
|
|
16
|
+
|
|
17
|
+
attr_accessor :extracted_at, :sequence
|
|
18
|
+
|
|
19
|
+
START_DATE = Date.new 2012, 6, 13
|
|
20
|
+
private_constant :START_DATE
|
|
21
|
+
|
|
22
|
+
def initialize(**attributes)
|
|
23
|
+
attributes.each do |attribute, value|
|
|
24
|
+
send "#{attribute}=", value
|
|
25
|
+
end
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
# Initialize a new header from a JSON file line
|
|
29
|
+
def self.from_json(line)
|
|
30
|
+
data = ::JSON.parse(line)['JsonTimetableV1']
|
|
31
|
+
metadata = data['Metadata']
|
|
32
|
+
|
|
33
|
+
new(
|
|
34
|
+
extracted_at: Time.strptime(data['timestamp'].to_s, '%s').utc,
|
|
35
|
+
sequence: metadata['sequence']
|
|
36
|
+
)
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
def start_date
|
|
40
|
+
START_DATE + sequence.to_i
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
def <=>(other)
|
|
44
|
+
hash <=> other&.hash
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
def hash
|
|
48
|
+
sequence&.dup
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
# rubocop:disable Metrics/MethodLength
|
|
52
|
+
def to_json
|
|
53
|
+
{
|
|
54
|
+
'JsonTimetableV1' => {
|
|
55
|
+
'classification' => 'public',
|
|
56
|
+
'timestamp' => extracted_at.to_i,
|
|
57
|
+
'owner' => 'Network Rail',
|
|
58
|
+
'Sender' => {
|
|
59
|
+
'organisation': '',
|
|
60
|
+
'application' => 'NTROD',
|
|
61
|
+
'component' => 'SCHEDULE'
|
|
62
|
+
},
|
|
63
|
+
'Metadata' => {
|
|
64
|
+
'type' => 'full',
|
|
65
|
+
'sequence' => sequence
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}.to_json
|
|
69
|
+
end
|
|
70
|
+
# rubocop:enable Metrics/MethodLength
|
|
71
|
+
|
|
72
|
+
def to_s
|
|
73
|
+
"Sequence #{sequence}, proabbly from #{start_date}."
|
|
74
|
+
end
|
|
75
|
+
end
|
|
76
|
+
end
|
|
77
|
+
end
|
|
78
|
+
end
|
|
79
|
+
end
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative 'header/cif'
|
|
4
|
+
require_relative 'header/json'
|
|
5
|
+
|
|
6
|
+
module RailFeeds
|
|
7
|
+
module NetworkRail
|
|
8
|
+
module Schedule
|
|
9
|
+
module Header # :nodoc:
|
|
10
|
+
# Initialize a new header from a CIF file line
|
|
11
|
+
def self.from_cif(line)
|
|
12
|
+
CIF.from_cif line
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
# Initialize a new header from a JSON file line
|
|
16
|
+
def self.from_json(line)
|
|
17
|
+
JSON.from_json line
|
|
18
|
+
end
|
|
19
|
+
end
|
|
20
|
+
end
|
|
21
|
+
end
|
|
22
|
+
end
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module RailFeeds
|
|
4
|
+
module NetworkRail
|
|
5
|
+
module Schedule
|
|
6
|
+
class Parser
|
|
7
|
+
# A class for parsing schedule data read from CIF schedule file(s).
|
|
8
|
+
class CIF < Parser
|
|
9
|
+
UNDERSTOOD_ROWS = %w[
|
|
10
|
+
HD TI TA TD AAN AAD AAR BSN BSD BSR BX LO LI LT CR ZZ
|
|
11
|
+
].freeze
|
|
12
|
+
|
|
13
|
+
# Parse the data on a single CIF line
|
|
14
|
+
# @param [String] line
|
|
15
|
+
def parse_line(line)
|
|
16
|
+
catch :line_parsed do
|
|
17
|
+
UNDERSTOOD_ROWS.each do |record_type|
|
|
18
|
+
if line.start_with?(record_type)
|
|
19
|
+
send "parse_#{record_type.downcase}_line", line.chomp
|
|
20
|
+
throw :line_parsed
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
if line[0].eql?('/')
|
|
25
|
+
parse_comment_line line.chomp
|
|
26
|
+
throw :line_parsed
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
logger.error "Can't understand line: #{line.chomp.inspect}"
|
|
30
|
+
end
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
private
|
|
34
|
+
|
|
35
|
+
# Header record
|
|
36
|
+
def parse_hd_line(line)
|
|
37
|
+
header = Header.from_cif(line)
|
|
38
|
+
logger.info "Starting Parse. #{header}"
|
|
39
|
+
@on_header&.call self, header
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
# TIPLOC Insert record
|
|
43
|
+
def parse_ti_line(line)
|
|
44
|
+
@on_tiploc_create&.call self, Tiploc.from_cif(line)
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
# TIPLOC Amend record
|
|
48
|
+
def parse_ta_line(line)
|
|
49
|
+
tiploc = Tiploc.from_cif(line)
|
|
50
|
+
old_id = tiploc.tiploc
|
|
51
|
+
tiploc.tiploc = line[2..8].strip
|
|
52
|
+
@on_tiploc_update&.call self, old_id, tiploc
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
# TIPLOC Delete record
|
|
56
|
+
def parse_td_line(line)
|
|
57
|
+
@on_tiploc_delete&.call self, Tiploc.from_cif(line).tiploc
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
# Association New record
|
|
61
|
+
def parse_aan_line(line)
|
|
62
|
+
@on_association_create&.call self, Association.from_cif(line)
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
# Association Revise record
|
|
66
|
+
def parse_aar_line(line)
|
|
67
|
+
@on_association_update&.call self, Association.from_cif(line)
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
# Association Delete record
|
|
71
|
+
def parse_aad_line(line)
|
|
72
|
+
@on_association_delete&.call self, Association.from_cif(line)
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
# Train schedule record - basic schedule - new
|
|
76
|
+
def parse_bsn_line(line)
|
|
77
|
+
finish_current_train
|
|
78
|
+
@current_train = TrainSchedule.new
|
|
79
|
+
@current_train.update_from_cif line
|
|
80
|
+
@current_train_action = :create
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
# Train schedule record - basic schedule - delete
|
|
84
|
+
def parse_bsd_line(line)
|
|
85
|
+
finish_current_train
|
|
86
|
+
train = TrainSchedule.new
|
|
87
|
+
train.update_from_cif line
|
|
88
|
+
@on_train_schedule_delete&.call self, train
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
# Train schedule record - basic schedule - revise
|
|
92
|
+
def parse_bsr_line(line)
|
|
93
|
+
finish_current_train
|
|
94
|
+
@current_train = TrainSchedule.new
|
|
95
|
+
@current_train.update_from_cif line
|
|
96
|
+
@current_train_action = :update
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
# Train schedule record - basic schedule extra details
|
|
100
|
+
def parse_bx_line(line)
|
|
101
|
+
@current_train.update_from_cif line
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
# Train schedule record - origin location
|
|
105
|
+
alias parse_lo_line parse_bx_line
|
|
106
|
+
# Train schedule record - intermediate location
|
|
107
|
+
alias parse_li_line parse_bx_line
|
|
108
|
+
# Train schedule record - change en route
|
|
109
|
+
alias parse_cr_line parse_bx_line
|
|
110
|
+
# Train schedule record - terminating location
|
|
111
|
+
alias parse_lt_line parse_bx_line
|
|
112
|
+
|
|
113
|
+
def finish_current_train
|
|
114
|
+
return if @current_train.nil?
|
|
115
|
+
|
|
116
|
+
case @current_train_action
|
|
117
|
+
when :create
|
|
118
|
+
@on_train_schedule_create&.call self, @current_train
|
|
119
|
+
when :update
|
|
120
|
+
@on_train_schedule_update&.call self, @current_train
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
@current_train = nil
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
# Trailer record
|
|
127
|
+
def parse_zz_line(_line)
|
|
128
|
+
finish_current_train
|
|
129
|
+
@file_ended = true
|
|
130
|
+
@on_trailer&.call self
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
# Comment
|
|
134
|
+
def parse_comment_line(line)
|
|
135
|
+
@on_comment&.call self, line[1..-1]
|
|
136
|
+
end
|
|
137
|
+
end
|
|
138
|
+
end
|
|
139
|
+
end
|
|
140
|
+
end
|
|
141
|
+
end
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'json'
|
|
4
|
+
|
|
5
|
+
module RailFeeds
|
|
6
|
+
module NetworkRail
|
|
7
|
+
module Schedule
|
|
8
|
+
class Parser
|
|
9
|
+
# A class for parsing schedule data read from JSON schedule file(s).
|
|
10
|
+
class JSON < Parser
|
|
11
|
+
def parse_line(line)
|
|
12
|
+
if line.start_with? '{"TiplocV1":'
|
|
13
|
+
parse_tiploc_line line
|
|
14
|
+
elsif line.start_with? '{"JsonAssociationV1":'
|
|
15
|
+
parse_association_line line
|
|
16
|
+
elsif line.start_with? '{"JsonScheduleV1":'
|
|
17
|
+
parse_schedule_line line
|
|
18
|
+
elsif line.start_with? '{"JsonTimetableV1":'
|
|
19
|
+
parse_header_line line
|
|
20
|
+
elsif line.start_with? '{"EOF":'
|
|
21
|
+
parse_trailer_line line
|
|
22
|
+
else
|
|
23
|
+
logger.error "Can't understand line: #{line.chomp}"
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
private
|
|
28
|
+
|
|
29
|
+
def parse_header_line(line)
|
|
30
|
+
header = Header.from_json(line)
|
|
31
|
+
logger.info "Starting Parse. #{header}"
|
|
32
|
+
@on_header&.call self, header
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
def parse_trailer_line(_line)
|
|
36
|
+
@file_ended = true
|
|
37
|
+
@on_trailer&.call self
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def parse_tiploc_line(line)
|
|
41
|
+
hash = ::JSON.parse(line)['TiplocV1']
|
|
42
|
+
|
|
43
|
+
case hash['transaction_type'].downcase
|
|
44
|
+
when 'create'
|
|
45
|
+
@on_tiploc_create&.call self, Tiploc.from_json(line)
|
|
46
|
+
when 'delete'
|
|
47
|
+
@on_tiploc_delete&.call self, hash['tiploc_code']
|
|
48
|
+
else
|
|
49
|
+
logger.error 'Don\'t know how to ' \
|
|
50
|
+
"#{hash['transaction_type'].inspect} a Tiploc: #{line.chomp}"
|
|
51
|
+
end
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
def parse_association_line(line)
|
|
55
|
+
hash = ::JSON.parse(line)['JsonAssociationV1']
|
|
56
|
+
|
|
57
|
+
case hash['transaction_type'].downcase
|
|
58
|
+
when 'create'
|
|
59
|
+
@on_association_create&.call self, Association.from_json(line)
|
|
60
|
+
when 'delete'
|
|
61
|
+
@on_association_delete&.call self, Association.from_json(line)
|
|
62
|
+
else
|
|
63
|
+
logger.error 'Don\'t know how to ' \
|
|
64
|
+
"#{hash['transaction_type'].inspect} an Association: " \
|
|
65
|
+
"#{line.chomp}"
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
def parse_schedule_line(line)
|
|
70
|
+
hash = ::JSON.parse(line)['JsonScheduleV1']
|
|
71
|
+
|
|
72
|
+
case hash['transaction_type'].downcase
|
|
73
|
+
when 'create'
|
|
74
|
+
@on_train_schedule_create&.call self, TrainSchedule.from_json(line)
|
|
75
|
+
when 'delete'
|
|
76
|
+
@on_train_schedule_delete&.call self, TrainSchedule.from_json(line)
|
|
77
|
+
else
|
|
78
|
+
logger.error 'Don\'t know how to ' \
|
|
79
|
+
"#{hash['transaction_type'].inspect} a Train Schedule: " \
|
|
80
|
+
"#{line.chomp}"
|
|
81
|
+
end
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
end
|
|
85
|
+
end
|
|
86
|
+
end
|
|
87
|
+
end
|