rail_feeds 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +23 -0
- data/.rspec +3 -0
- data/.rubocop.yml +31 -0
- data/.travis.yml +26 -0
- data/CHANGELOG.md +3 -0
- data/Gemfile +6 -0
- data/Guardfile +25 -0
- data/LICENSE.md +32 -0
- data/README.md +77 -0
- data/Rakefile +3 -0
- data/doc/guides/Logging.md +13 -0
- data/doc/guides/Network Rail/CORPUS.md +34 -0
- data/doc/guides/Network Rail/SMART.md +39 -0
- data/doc/guides/Network Rail/Schedule.md +138 -0
- data/file +0 -0
- data/lib/rail_feeds/credentials.rb +45 -0
- data/lib/rail_feeds/logging.rb +51 -0
- data/lib/rail_feeds/network_rail/corpus.rb +77 -0
- data/lib/rail_feeds/network_rail/credentials.rb +22 -0
- data/lib/rail_feeds/network_rail/http_client.rb +57 -0
- data/lib/rail_feeds/network_rail/schedule/association.rb +208 -0
- data/lib/rail_feeds/network_rail/schedule/data.rb +215 -0
- data/lib/rail_feeds/network_rail/schedule/days.rb +95 -0
- data/lib/rail_feeds/network_rail/schedule/fetcher.rb +193 -0
- data/lib/rail_feeds/network_rail/schedule/header/cif.rb +102 -0
- data/lib/rail_feeds/network_rail/schedule/header/json.rb +79 -0
- data/lib/rail_feeds/network_rail/schedule/header.rb +22 -0
- data/lib/rail_feeds/network_rail/schedule/parser/cif.rb +141 -0
- data/lib/rail_feeds/network_rail/schedule/parser/json.rb +87 -0
- data/lib/rail_feeds/network_rail/schedule/parser.rb +108 -0
- data/lib/rail_feeds/network_rail/schedule/stp_indicator.rb +72 -0
- data/lib/rail_feeds/network_rail/schedule/tiploc.rb +100 -0
- data/lib/rail_feeds/network_rail/schedule/train_schedule/change_en_route.rb +158 -0
- data/lib/rail_feeds/network_rail/schedule/train_schedule/location/intermediate.rb +119 -0
- data/lib/rail_feeds/network_rail/schedule/train_schedule/location/origin.rb +91 -0
- data/lib/rail_feeds/network_rail/schedule/train_schedule/location/terminating.rb +72 -0
- data/lib/rail_feeds/network_rail/schedule/train_schedule/location.rb +76 -0
- data/lib/rail_feeds/network_rail/schedule/train_schedule.rb +392 -0
- data/lib/rail_feeds/network_rail/schedule.rb +33 -0
- data/lib/rail_feeds/network_rail/smart.rb +186 -0
- data/lib/rail_feeds/network_rail/stomp_client.rb +77 -0
- data/lib/rail_feeds/network_rail.rb +16 -0
- data/lib/rail_feeds/version.rb +14 -0
- data/lib/rail_feeds.rb +10 -0
- data/rail_feeds.gemspec +32 -0
- data/spec/fixtures/network_rail/schedule/data/full.yaml +60 -0
- data/spec/fixtures/network_rail/schedule/data/starting.yaml +131 -0
- data/spec/fixtures/network_rail/schedule/data/update-gap.yaml +10 -0
- data/spec/fixtures/network_rail/schedule/data/update-next.yaml +13 -0
- data/spec/fixtures/network_rail/schedule/data/update-old.yaml +10 -0
- data/spec/fixtures/network_rail/schedule/data/update.yaml +112 -0
- data/spec/fixtures/network_rail/schedule/parser/train_create.json +1 -0
- data/spec/fixtures/network_rail/schedule/parser/train_delete.json +1 -0
- data/spec/fixtures/network_rail/schedule/train_schedule/json-data.yaml +67 -0
- data/spec/rail_feeds/credentials_spec.rb +46 -0
- data/spec/rail_feeds/logging_spec.rb +81 -0
- data/spec/rail_feeds/network_rail/corpus_spec.rb +92 -0
- data/spec/rail_feeds/network_rail/credentials_spec.rb +22 -0
- data/spec/rail_feeds/network_rail/http_client_spec.rb +88 -0
- data/spec/rail_feeds/network_rail/schedule/association_spec.rb +205 -0
- data/spec/rail_feeds/network_rail/schedule/data_spec.rb +219 -0
- data/spec/rail_feeds/network_rail/schedule/days_shared.rb +99 -0
- data/spec/rail_feeds/network_rail/schedule/days_spec.rb +4 -0
- data/spec/rail_feeds/network_rail/schedule/fetcher_spec.rb +228 -0
- data/spec/rail_feeds/network_rail/schedule/header/cif_spec.rb +72 -0
- data/spec/rail_feeds/network_rail/schedule/header/json_spec.rb +51 -0
- data/spec/rail_feeds/network_rail/schedule/header_spec.rb +19 -0
- data/spec/rail_feeds/network_rail/schedule/parser/cif_spec.rb +197 -0
- data/spec/rail_feeds/network_rail/schedule/parser/json_spec.rb +172 -0
- data/spec/rail_feeds/network_rail/schedule/parser_spec.rb +34 -0
- data/spec/rail_feeds/network_rail/schedule/stp_indicator_shared.rb +49 -0
- data/spec/rail_feeds/network_rail/schedule/stp_indicator_spec.rb +4 -0
- data/spec/rail_feeds/network_rail/schedule/tiploc_spec.rb +77 -0
- data/spec/rail_feeds/network_rail/schedule/train_schedule/change_en_route_spec.rb +121 -0
- data/spec/rail_feeds/network_rail/schedule/train_schedule/location/intermediate_spec.rb +95 -0
- data/spec/rail_feeds/network_rail/schedule/train_schedule/location/origin_spec.rb +87 -0
- data/spec/rail_feeds/network_rail/schedule/train_schedule/location/terminating_spec.rb +81 -0
- data/spec/rail_feeds/network_rail/schedule/train_schedule/location_spec.rb +35 -0
- data/spec/rail_feeds/network_rail/schedule/train_schedule_spec.rb +284 -0
- data/spec/rail_feeds/network_rail/schedule_spec.rb +41 -0
- data/spec/rail_feeds/network_rail/smart_spec.rb +194 -0
- data/spec/rail_feeds/network_rail/stomp_client_spec.rb +151 -0
- data/spec/rail_feeds/network_rail_spec.rb +7 -0
- data/spec/rail_feeds_spec.rb +11 -0
- data/spec/spec_helper.rb +47 -0
- metadata +282 -0
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
class DummyParserForDataTests
|
|
4
|
+
def initialize(logger:, **procs)
|
|
5
|
+
_logger = logger
|
|
6
|
+
@procs = procs
|
|
7
|
+
@events = {}
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
def parse_cif_file(file)
|
|
11
|
+
filename = File.join RSPEC_FIXTURES, 'network_rail', 'schedule', 'data', "#{file}.yaml"
|
|
12
|
+
YAML.load(File.read(filename)).each do |event, data|
|
|
13
|
+
if data.nil?
|
|
14
|
+
@procs[event].call self
|
|
15
|
+
else
|
|
16
|
+
@procs[event].call self, *data
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
end
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
describe RailFeeds::NetworkRail::Schedule::Data do
|
|
23
|
+
before :each do
|
|
24
|
+
expect(RailFeeds::NetworkRail::Schedule::Parser::CIF).to receive(:new) do |**args|
|
|
25
|
+
DummyParserForDataTests.new(**args)
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
subject do |example|
|
|
29
|
+
data = described_class.new
|
|
30
|
+
unless example.metadata[:skip_load_starting_data]
|
|
31
|
+
# Populate with starting data
|
|
32
|
+
filename = File.join RSPEC_FIXTURES, 'network_rail', 'schedule', 'data', 'starting.yaml'
|
|
33
|
+
starting_data = YAML.load(File.read(filename))
|
|
34
|
+
data.associations.clear
|
|
35
|
+
data.tiplocs.clear
|
|
36
|
+
data.trains.clear
|
|
37
|
+
data.instance_exec(starting_data[:last_header]) { |h| @last_header = h }
|
|
38
|
+
starting_data[:associations].each { |i| data.associations[i.hash] = i }
|
|
39
|
+
starting_data[:trains].each do |uid, schedules|
|
|
40
|
+
data.trains[uid] = []
|
|
41
|
+
schedules.each do |i|
|
|
42
|
+
data.trains[uid].push i
|
|
43
|
+
end
|
|
44
|
+
end
|
|
45
|
+
starting_data[:tiplocs].each { |i| data.tiplocs[i.hash] = i }
|
|
46
|
+
end
|
|
47
|
+
data
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
describe '#load_cif_file' do
|
|
51
|
+
describe 'Loads data' do
|
|
52
|
+
context 'A full extract' do
|
|
53
|
+
before(:each) { subject.load_cif_file('full') }
|
|
54
|
+
|
|
55
|
+
describe 'Replaces data' do
|
|
56
|
+
it 'Headers' do
|
|
57
|
+
expect(subject.last_header.current_file_reference).to eq 'DFROC2Q'
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
it 'Tiplocs' do
|
|
61
|
+
expect(subject.tiplocs.values.map(&:tiploc)).to eq ['1', '2', '3']
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
it 'Associations' do
|
|
65
|
+
expect(subject.associations.values.map(&:main_location_suffix)).to eq ['a', 'b', 'c']
|
|
66
|
+
expect(subject.associations.values.map(&:category)).to eq ['JJ', 'JJ', 'JJ']
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
it 'Trains' do
|
|
70
|
+
expect(subject.trains.values.flatten.map(&:signalling_headcode)).to eq [
|
|
71
|
+
'1A11',
|
|
72
|
+
'2B22',
|
|
73
|
+
'3C33'
|
|
74
|
+
]
|
|
75
|
+
end
|
|
76
|
+
end
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
context 'An update extract' do
|
|
80
|
+
before(:each) do
|
|
81
|
+
subject.load_cif_file 'full'
|
|
82
|
+
subject.load_cif_file 'update'
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
describe 'Updates data' do
|
|
86
|
+
it 'Headers' do
|
|
87
|
+
expect(subject.last_header.current_file_reference).to eq 'DFROC1L'
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
it 'Tiplocs' do
|
|
91
|
+
expect(subject.tiplocs.values.map(&:tiploc)).to eq ['1', '3a', '4', '5a']
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
it 'Associations' do
|
|
95
|
+
expect(subject.associations.values.map(&:main_location_suffix)).to eq ['a', 'c', 'd', 'e']
|
|
96
|
+
expect(subject.associations.values.map(&:category)).to eq ['JJ', 'VV', 'JJ', 'JJ']
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
it 'Trains' do
|
|
100
|
+
expect(subject.trains.values.flatten.map(&:signalling_headcode)).to eq [
|
|
101
|
+
'1A11',
|
|
102
|
+
'3c33',
|
|
103
|
+
'4D44',
|
|
104
|
+
'5E55'
|
|
105
|
+
]
|
|
106
|
+
end
|
|
107
|
+
end
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
describe 'Errors on incorrect sequence', :skip_load_starting_data do
|
|
111
|
+
it 'Update applied after a full extract' do
|
|
112
|
+
subject.load_cif_file 'full'
|
|
113
|
+
expect { subject.load_cif_file('update') }.to_not raise_error
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
it 'Update applied after previous update extract' do
|
|
117
|
+
subject.load_cif_file 'full'
|
|
118
|
+
subject.load_cif_file 'update'
|
|
119
|
+
expect { subject.load_cif_file('update-next') }.to_not raise_error
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
it 'Update applied with a gap' do
|
|
123
|
+
subject.load_cif_file 'full'
|
|
124
|
+
subject.load_cif_file 'update'
|
|
125
|
+
message = 'Missing update(s). Last applied update is "DFROC1L", ' \
|
|
126
|
+
'this update requires "DFROC1M" to be the previous applied update.'
|
|
127
|
+
expect { subject.load_cif_file('update-gap') }.to raise_error ArgumentError, message
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
it 'Update applied too old update' do
|
|
131
|
+
subject.load_cif_file 'full'
|
|
132
|
+
subject.load_cif_file 'update'
|
|
133
|
+
message = 'Update is too old, it is before the last applied update.'
|
|
134
|
+
expect { subject.load_cif_file('update-old') }.to raise_error ArgumentError, message
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
it 'Update applied before a full extract' do
|
|
138
|
+
message = 'Update can\'t be loaded before loading a full extract.'
|
|
139
|
+
expect { subject.load_cif_file('update') }.to raise_error ArgumentError, message
|
|
140
|
+
end
|
|
141
|
+
end
|
|
142
|
+
end
|
|
143
|
+
end
|
|
144
|
+
|
|
145
|
+
it '#generate_cif' do
|
|
146
|
+
lines = []
|
|
147
|
+
subject.load_cif_file 'full'
|
|
148
|
+
subject.load_cif_file 'update'
|
|
149
|
+
subject.generate_cif { |line| lines.push line }
|
|
150
|
+
expect(lines).to eq([
|
|
151
|
+
'/!! Start of file',
|
|
152
|
+
'/!! Generated: 18/06/2018 19:45',
|
|
153
|
+
'HD 1806181945 F 190618180619 ',
|
|
154
|
+
'TI1 ',
|
|
155
|
+
'TI3a ',
|
|
156
|
+
'TI4 ',
|
|
157
|
+
'TI5a ',
|
|
158
|
+
'AAN 1111100JJ 1 a T ',
|
|
159
|
+
'AAN 1111100VV 3 c T ',
|
|
160
|
+
'AAN 1111100JJ 4 d T ',
|
|
161
|
+
'AAN 1111100JJ 5 e T ',
|
|
162
|
+
'BSN1 1111100 1A11 1 ',
|
|
163
|
+
'BX N ',
|
|
164
|
+
'BSN3 1111100 3c33 1 ',
|
|
165
|
+
'BX N ',
|
|
166
|
+
'BSN4 1111100 4D44 1 ',
|
|
167
|
+
'BX N ',
|
|
168
|
+
'LO1 1111 1111 0 0 0 ',
|
|
169
|
+
'CR2 ',
|
|
170
|
+
'LI2 1111 1111 0 0 0 ',
|
|
171
|
+
'LT3 2222 2222 ',
|
|
172
|
+
'BSN5 1111100 5E55 1 ',
|
|
173
|
+
'BX N ',
|
|
174
|
+
'ZZ ',
|
|
175
|
+
'/!! End of file'
|
|
176
|
+
].map { |i| "#{i}\n" })
|
|
177
|
+
end
|
|
178
|
+
|
|
179
|
+
describe '#fetch_data' do
|
|
180
|
+
let(:fetcher) { double RailFeeds::NetworkRail::Schedule::Fetcher }
|
|
181
|
+
before :each do
|
|
182
|
+
expect(RailFeeds::NetworkRail::Schedule::Fetcher)
|
|
183
|
+
.to receive(:new).and_return(fetcher)
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
it 'Gets a full update if empty of data', :skip_load_starting_data do
|
|
187
|
+
full_file = StringIO.new
|
|
188
|
+
update_file = StringIO.new
|
|
189
|
+
|
|
190
|
+
expect(fetcher).to receive(:fetch_all).and_yield(full_file).and_yield(update_file)
|
|
191
|
+
expect(subject).to receive(:load_cif_file).with(full_file)
|
|
192
|
+
expect(subject).to receive(:load_cif_file).with(update_file)
|
|
193
|
+
|
|
194
|
+
subject.fetch_data
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
it 'Gets a full update if not updated in last week' do
|
|
198
|
+
Timecop.freeze 2018, 6, 21
|
|
199
|
+
full_file = StringIO.new
|
|
200
|
+
update_file = StringIO.new
|
|
201
|
+
|
|
202
|
+
expect(fetcher).to receive(:fetch_all).and_yield(full_file).and_yield(update_file)
|
|
203
|
+
expect(subject).to receive(:load_cif_file).with(full_file)
|
|
204
|
+
expect(subject).to receive(:load_cif_file).with(update_file)
|
|
205
|
+
|
|
206
|
+
subject.fetch_data
|
|
207
|
+
end
|
|
208
|
+
|
|
209
|
+
it 'Just gets updates if updated in last week' do
|
|
210
|
+
Timecop.freeze 2018, 6, 20
|
|
211
|
+
update_file = StringIO.new
|
|
212
|
+
|
|
213
|
+
expect(fetcher).to receive(:fetch_all_updates).and_yield(update_file)
|
|
214
|
+
expect(subject).to receive(:load_cif_file).with(update_file)
|
|
215
|
+
|
|
216
|
+
subject.fetch_data
|
|
217
|
+
end
|
|
218
|
+
end
|
|
219
|
+
end
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
shared_examples 'it has a days array' do
|
|
4
|
+
describe 'Setting valid values' do
|
|
5
|
+
it 'Array of 7 booleans' do
|
|
6
|
+
subject.days = [false, true, false, true, false, true, false]
|
|
7
|
+
expect(subject.days).to eq [false, true, false, true, false, true, false]
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
it 'String with length of 7' do
|
|
11
|
+
subject.days = '1010101'
|
|
12
|
+
expect(subject.days).to eq [true, false, true, false, true, false, true]
|
|
13
|
+
end
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
describe 'Setting invalid values' do
|
|
17
|
+
it 'Array of 6 booleans' do
|
|
18
|
+
subject.days = [false, true, false, true, false, true]
|
|
19
|
+
expect(subject.days).to eq [false, true, false, true, false, true, nil]
|
|
20
|
+
end
|
|
21
|
+
it 'Array of 8 booleans' do
|
|
22
|
+
subject.days = [false, true, false, true, false, true, false, true]
|
|
23
|
+
expect(subject.days).to eq [false, true, false, true, false, true, false]
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
it 'String with length of 6' do
|
|
27
|
+
subject.days = '101010'
|
|
28
|
+
expect(subject.days).to eq [true, false, true, false, true, false, nil]
|
|
29
|
+
end
|
|
30
|
+
it 'String with length of 8' do
|
|
31
|
+
subject.days = '10101010'
|
|
32
|
+
expect(subject.days).to eq [true, false, true, false, true, false, true]
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
describe 'Query methods' do
|
|
37
|
+
before(:each) { subject.days = '0000000' }
|
|
38
|
+
|
|
39
|
+
it '#mondays?' do
|
|
40
|
+
expect(subject.mondays?).to be false
|
|
41
|
+
subject.days = '1000000'
|
|
42
|
+
expect(subject.mondays?).to be true
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
it '#tuesdays?' do
|
|
46
|
+
expect(subject.tuesdays?).to be false
|
|
47
|
+
subject.days = '0100000'
|
|
48
|
+
expect(subject.tuesdays?).to be true
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
it '#wednesdays?' do
|
|
52
|
+
expect(subject.wednesdays?).to be false
|
|
53
|
+
subject.days = '0010000'
|
|
54
|
+
expect(subject.wednesdays?).to be true
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
it '#thursdays?' do
|
|
58
|
+
expect(subject.thursdays?).to be false
|
|
59
|
+
subject.days = '0001000'
|
|
60
|
+
expect(subject.thursdays?).to be true
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
it '#fridays?' do
|
|
64
|
+
expect(subject.fridays?).to be false
|
|
65
|
+
subject.days = '0000100'
|
|
66
|
+
expect(subject.fridays?).to be true
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
it '#saturdays?' do
|
|
70
|
+
expect(subject.saturdays?).to be false
|
|
71
|
+
subject.days = '0000010'
|
|
72
|
+
expect(subject.saturdays?).to be true
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
it '#sundays?' do
|
|
76
|
+
expect(subject.sundays?).to be false
|
|
77
|
+
subject.days = '0000001'
|
|
78
|
+
expect(subject.sundays?).to be true
|
|
79
|
+
end
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
describe 'Conversion methods' do
|
|
83
|
+
it '#days_to_cif' do
|
|
84
|
+
expect(described_class.days_to_cif([true, true, true, true, true, true, true])).to eq '1111111'
|
|
85
|
+
expect(described_class.days_to_cif([false, false, false, false, false, false, false])).to eq '0000000'
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
describe '#days_from_cif' do
|
|
89
|
+
it 'String of 1 or 0' do
|
|
90
|
+
expect(described_class.days_from_cif('1111111')).to eq [true, true, true, true, true, true, true]
|
|
91
|
+
expect(described_class.days_from_cif('0000000')).to eq [false, false, false, false, false, false, false]
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
it 'nil' do
|
|
95
|
+
expect(described_class.days_from_cif(nil)).to eq [nil, nil, nil, nil, nil, nil, nil]
|
|
96
|
+
end
|
|
97
|
+
end
|
|
98
|
+
end
|
|
99
|
+
end
|
|
@@ -0,0 +1,228 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
describe RailFeeds::NetworkRail::Schedule::Fetcher do
|
|
4
|
+
let(:http_client) { double RailFeeds::NetworkRail::HTTPClient }
|
|
5
|
+
let(:reader) { double Zlib::GzipReader }
|
|
6
|
+
|
|
7
|
+
describe '#download_all_full' do
|
|
8
|
+
it 'CIF format' do
|
|
9
|
+
expect(RailFeeds::NetworkRail::HTTPClient).to receive(:new).and_return(http_client)
|
|
10
|
+
expect(http_client).to receive(:download)
|
|
11
|
+
.with('ntrod/CifFileAuthenticate?type=CIF_ALL_FULL_DAILY&day=toc-full.CIF.gz', 'file')
|
|
12
|
+
subject.download_all_full :cif, 'file'
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
it 'JSON format' do
|
|
16
|
+
expect(RailFeeds::NetworkRail::HTTPClient).to receive(:new).and_return(http_client)
|
|
17
|
+
expect(http_client).to receive(:download)
|
|
18
|
+
.with('ntrod/CifFileAuthenticate?type=CIF_ALL_FULL_DAILY&day=toc-full', 'file')
|
|
19
|
+
subject.download_all_full :json, 'file'
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
it 'Invalid format' do
|
|
23
|
+
expect { subject.download_all_full :invalid, 'file' }
|
|
24
|
+
.to raise_error ArgumentError, 'format must be either :json or :cif'
|
|
25
|
+
end
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
describe '#download_all_update' do
|
|
29
|
+
it 'CIF format' do
|
|
30
|
+
expect(RailFeeds::NetworkRail::HTTPClient).to receive(:new).and_return(http_client)
|
|
31
|
+
expect(http_client).to receive(:download)
|
|
32
|
+
.with('ntrod/CifFileAuthenticate?type=CIF_ALL_UPDATE_DAILY&day=toc-update-mon.CIF.gz', 'file')
|
|
33
|
+
subject.download_all_update 'mon', :cif, 'file'
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
it 'JSON format' do
|
|
37
|
+
expect(RailFeeds::NetworkRail::HTTPClient).to receive(:new).and_return(http_client)
|
|
38
|
+
expect(http_client).to receive(:download)
|
|
39
|
+
.with('ntrod/CifFileAuthenticate?type=CIF_ALL_UPDATE_DAILY&day=toc-update-tue', 'file')
|
|
40
|
+
subject.download_all_update 'tue', :json, 'file'
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
it 'Invalid format' do
|
|
44
|
+
expect { subject.download_all_update 'wed', :invalid, 'file' }
|
|
45
|
+
.to raise_error ArgumentError, 'format must be either :json or :cif'
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
it 'Invalid day' do
|
|
49
|
+
expect { subject.download_all_update 'BAD', :json, 'file' }
|
|
50
|
+
.to raise_error ArgumentError, 'day is invalid'
|
|
51
|
+
end
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
it '#download_freight_full' do
|
|
55
|
+
expect(RailFeeds::NetworkRail::HTTPClient).to receive(:new).and_return(http_client)
|
|
56
|
+
expect(http_client).to receive(:download)
|
|
57
|
+
.with('ntrod/CifFileAuthenticate?type=CIF_FREIGHT_FULL_DAILY&day=toc-full', 'file')
|
|
58
|
+
subject.download_freight_full 'file'
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
describe '#download_freight_update' do
|
|
62
|
+
it 'Valid day' do
|
|
63
|
+
expect(RailFeeds::NetworkRail::HTTPClient).to receive(:new).and_return(http_client)
|
|
64
|
+
expect(http_client).to receive(:download)
|
|
65
|
+
.with('ntrod/CifFileAuthenticate?type=CIF_FREIGHT_UPDATE_DAILY&day=toc-update-mon', 'file')
|
|
66
|
+
subject.download_freight_update 'mon', 'file'
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
it 'Invalid day' do
|
|
70
|
+
expect { subject.download_freight_update('BAD', 'file') {} }
|
|
71
|
+
.to raise_error ArgumentError, 'day is invalid'
|
|
72
|
+
end
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
it '#download_toc_full' do
|
|
76
|
+
expect(RailFeeds::NetworkRail::HTTPClient).to receive(:new).and_return(http_client)
|
|
77
|
+
expect(http_client).to receive(:download)
|
|
78
|
+
.with('ntrod/CifFileAuthenticate?type=CIF_TT_TOC_FULL_DAILY&day=toc-full', 'file')
|
|
79
|
+
subject.download_toc_full 'TT', 'file'
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
describe '#download_toc_update' do
|
|
83
|
+
it 'Valid day' do
|
|
84
|
+
expect(RailFeeds::NetworkRail::HTTPClient).to receive(:new).and_return(http_client)
|
|
85
|
+
expect(http_client).to receive(:download)
|
|
86
|
+
.with('ntrod/CifFileAuthenticate?type=CIF_TT_TOC_UPDATE_DAILY&day=toc-update-wed', 'file')
|
|
87
|
+
subject.download_toc_update 'TT', 'wed', 'file'
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
it 'Invalid day' do
|
|
91
|
+
expect { subject.download_toc_update 'TT', 'BAD', 'file' }
|
|
92
|
+
.to raise_error ArgumentError, 'day is invalid'
|
|
93
|
+
end
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
describe '#download' do
|
|
97
|
+
it 'Passes credentials and logger to HTTP client' do
|
|
98
|
+
credentials = double RailFeeds::NetworkRail::Credentials
|
|
99
|
+
logger = double Logger
|
|
100
|
+
expect(RailFeeds::NetworkRail::HTTPClient)
|
|
101
|
+
.to receive(:new).with(credentials: credentials, logger: logger)
|
|
102
|
+
.and_return(http_client)
|
|
103
|
+
expect(http_client).to receive(:download).and_return(reader)
|
|
104
|
+
subject = described_class.new credentials: credentials, logger: logger
|
|
105
|
+
subject.send :download, 'toc', 'mon', :json, 'file'
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
it 'CIF format request for non all schedule' do
|
|
109
|
+
expect { subject.send(:download, 'toc', 'mon', :cif, 'file') {} }
|
|
110
|
+
.to raise_error ArgumentError, 'CIF format is only available for the all schedule'
|
|
111
|
+
end
|
|
112
|
+
end
|
|
113
|
+
|
|
114
|
+
describe '#fetch_all_full' do
|
|
115
|
+
it 'CIF format' do
|
|
116
|
+
expect(RailFeeds::NetworkRail::HTTPClient).to receive(:new).and_return(http_client)
|
|
117
|
+
expect(http_client).to receive(:fetch_unzipped)
|
|
118
|
+
.with('ntrod/CifFileAuthenticate?type=CIF_ALL_FULL_DAILY&day=toc-full.CIF.gz')
|
|
119
|
+
.and_yield(reader)
|
|
120
|
+
expect { |a| subject.fetch_all_full(:cif, &a) }.to yield_with_args(reader)
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
it 'JSON format' do
|
|
124
|
+
expect(RailFeeds::NetworkRail::HTTPClient).to receive(:new).and_return(http_client)
|
|
125
|
+
expect(http_client).to receive(:fetch_unzipped)
|
|
126
|
+
.with('ntrod/CifFileAuthenticate?type=CIF_ALL_FULL_DAILY&day=toc-full')
|
|
127
|
+
.and_yield(reader)
|
|
128
|
+
expect { |a| subject.fetch_all_full(:json, &a) }.to yield_with_args(reader)
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
it 'Invalid format' do
|
|
132
|
+
expect { subject.fetch_all_full :invalid }
|
|
133
|
+
.to raise_error ArgumentError, 'format must be either :json or :cif'
|
|
134
|
+
end
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
describe '#fetch_all_update' do
|
|
138
|
+
it 'CIF format' do
|
|
139
|
+
expect(RailFeeds::NetworkRail::HTTPClient).to receive(:new).and_return(http_client)
|
|
140
|
+
expect(http_client).to receive(:fetch_unzipped)
|
|
141
|
+
.with('ntrod/CifFileAuthenticate?type=CIF_ALL_UPDATE_DAILY&day=toc-update-mon.CIF.gz')
|
|
142
|
+
.and_yield(reader)
|
|
143
|
+
expect { |a| subject.fetch_all_update('mon', :cif, &a) }.to yield_with_args(reader)
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
it 'JSON format' do
|
|
147
|
+
expect(RailFeeds::NetworkRail::HTTPClient).to receive(:new).and_return(http_client)
|
|
148
|
+
expect(http_client).to receive(:fetch_unzipped)
|
|
149
|
+
.with('ntrod/CifFileAuthenticate?type=CIF_ALL_UPDATE_DAILY&day=toc-update-tue')
|
|
150
|
+
.and_yield(reader)
|
|
151
|
+
expect { |a| subject.fetch_all_update('tue', :json, &a) }.to yield_with_args(reader)
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
it 'Invalid format' do
|
|
155
|
+
expect { subject.fetch_all_update 'wed', :invalid }
|
|
156
|
+
.to raise_error ArgumentError, 'format must be either :json or :cif'
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
it 'Invalid day' do
|
|
160
|
+
expect { subject.fetch_all_update 'BAD', :json }
|
|
161
|
+
.to raise_error ArgumentError, 'day is invalid'
|
|
162
|
+
end
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
it '#fetch_freight_full' do
|
|
166
|
+
expect(RailFeeds::NetworkRail::HTTPClient).to receive(:new).and_return(http_client)
|
|
167
|
+
expect(http_client).to receive(:fetch_unzipped)
|
|
168
|
+
.with('ntrod/CifFileAuthenticate?type=CIF_FREIGHT_FULL_DAILY&day=toc-full')
|
|
169
|
+
.and_yield(reader)
|
|
170
|
+
expect { |a| subject.fetch_freight_full(&a) }.to yield_with_args(reader)
|
|
171
|
+
end
|
|
172
|
+
|
|
173
|
+
describe '#fetch_freight_update' do
|
|
174
|
+
it 'Valid day' do
|
|
175
|
+
expect(RailFeeds::NetworkRail::HTTPClient).to receive(:new).and_return(http_client)
|
|
176
|
+
expect(http_client).to receive(:fetch_unzipped)
|
|
177
|
+
.with('ntrod/CifFileAuthenticate?type=CIF_FREIGHT_UPDATE_DAILY&day=toc-update-mon')
|
|
178
|
+
.and_yield(reader)
|
|
179
|
+
expect { |a| subject.fetch_freight_update('mon', &a) }.to yield_with_args(reader)
|
|
180
|
+
end
|
|
181
|
+
|
|
182
|
+
it 'Invalid day' do
|
|
183
|
+
expect { subject.fetch_freight_update('BAD') {} }
|
|
184
|
+
.to raise_error ArgumentError, 'day is invalid'
|
|
185
|
+
end
|
|
186
|
+
end
|
|
187
|
+
|
|
188
|
+
it '#fetch_toc_full' do
|
|
189
|
+
expect(RailFeeds::NetworkRail::HTTPClient).to receive(:new).and_return(http_client)
|
|
190
|
+
expect(http_client).to receive(:fetch_unzipped)
|
|
191
|
+
.with('ntrod/CifFileAuthenticate?type=CIF_TT_TOC_FULL_DAILY&day=toc-full')
|
|
192
|
+
.and_yield(reader)
|
|
193
|
+
expect { |a| subject.fetch_toc_full('TT', &a) }.to yield_with_args(reader)
|
|
194
|
+
end
|
|
195
|
+
|
|
196
|
+
describe '#fetch_toc_update' do
|
|
197
|
+
it 'Valid day' do
|
|
198
|
+
expect(RailFeeds::NetworkRail::HTTPClient).to receive(:new).and_return(http_client)
|
|
199
|
+
expect(http_client).to receive(:fetch_unzipped)
|
|
200
|
+
.with('ntrod/CifFileAuthenticate?type=CIF_TT_TOC_UPDATE_DAILY&day=toc-update-wed')
|
|
201
|
+
.and_yield(reader)
|
|
202
|
+
expect { |a| subject.fetch_toc_update('TT', 'wed', &a) }.to yield_with_args(reader)
|
|
203
|
+
end
|
|
204
|
+
|
|
205
|
+
it 'Invalid day' do
|
|
206
|
+
expect { subject.fetch_toc_update 'TT', 'BAD' }
|
|
207
|
+
.to raise_error ArgumentError, 'day is invalid'
|
|
208
|
+
end
|
|
209
|
+
end
|
|
210
|
+
|
|
211
|
+
describe '#fetch' do
|
|
212
|
+
it 'Passes credentials and logger to HTTP client' do
|
|
213
|
+
credentials = double RailFeeds::NetworkRail::Credentials
|
|
214
|
+
logger = double Logger
|
|
215
|
+
expect(RailFeeds::NetworkRail::HTTPClient)
|
|
216
|
+
.to receive(:new).with(credentials: credentials, logger: logger)
|
|
217
|
+
.and_return(http_client)
|
|
218
|
+
expect(http_client).to receive(:fetch_unzipped).and_return(reader)
|
|
219
|
+
subject = described_class.new credentials: credentials, logger: logger
|
|
220
|
+
subject.send :fetch, 'toc', 'mon', :json
|
|
221
|
+
end
|
|
222
|
+
|
|
223
|
+
it 'CIF format request for non all schedule' do
|
|
224
|
+
expect { subject.send(:fetch, 'toc', 'mon', :cif) {} }
|
|
225
|
+
.to raise_error ArgumentError, 'CIF format is only available for the all schedule'
|
|
226
|
+
end
|
|
227
|
+
end
|
|
228
|
+
end
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
describe RailFeeds::NetworkRail::Schedule::Header::CIF do
|
|
4
|
+
let(:line) do
|
|
5
|
+
+'HDa 0102030405b c de060708091011 '
|
|
6
|
+
end
|
|
7
|
+
subject { described_class.from_cif line }
|
|
8
|
+
|
|
9
|
+
describe 'Sets attributes' do
|
|
10
|
+
it(':file_identity') { expect(subject.file_identity).to eq 'a' }
|
|
11
|
+
it(':extracted_at') { expect(subject.extracted_at).to eq Time.new(2003, 2, 1, 4, 5, 0, 0) }
|
|
12
|
+
it(':current_file_reference') { expect(subject.current_file_reference).to eq 'b' }
|
|
13
|
+
it(':previous_file_reference') { expect(subject.previous_file_reference).to eq 'c' }
|
|
14
|
+
it(':update_indicator') { expect(subject.update_indicator).to eq 'd' }
|
|
15
|
+
it(':version') { expect(subject.version).to eq 'e' }
|
|
16
|
+
it(':start_date') { expect(subject.start_date).to eq Date.new(2008, 7, 6) }
|
|
17
|
+
it(':end_date') { expect(subject.end_date).to eq Date.new(2011, 10, 9) }
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
describe 'Helper methods' do
|
|
21
|
+
context 'A full extract' do
|
|
22
|
+
before(:each) { line[46] = 'F' }
|
|
23
|
+
it { should_not be_update }
|
|
24
|
+
it { should be_full }
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
context 'An update extract' do
|
|
28
|
+
before(:each) { line[46] = 'U' }
|
|
29
|
+
it { should be_update }
|
|
30
|
+
it { should_not be_full }
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
it '#to_cif' do
|
|
35
|
+
expect(subject.to_cif).to eq "#{line}\n"
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
describe '#hash' do
|
|
39
|
+
it 'Uses current_file_reference' do
|
|
40
|
+
expect(subject.hash).to eq 'b'
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
describe '#==' do
|
|
45
|
+
let(:header1) { described_class.new current_file_reference: 'a' }
|
|
46
|
+
let(:header2) { described_class.new current_file_reference: 'a' }
|
|
47
|
+
|
|
48
|
+
it 'Doesn\'t match' do
|
|
49
|
+
header1.current_file_reference = nil
|
|
50
|
+
expect(header1).to_not eq header2
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
it 'Matches' do
|
|
54
|
+
expect(header1).to eq header2
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
it 'Compares to nil without error' do
|
|
58
|
+
expect(header1).to_not eq nil
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
it '#to_s' do
|
|
63
|
+
expect(subject.to_s).to eq 'File "a" (version e) at ' \
|
|
64
|
+
'2003-02-01 04:05. An update extract for ' \
|
|
65
|
+
'2008-07-06 to 2011-10-09.'
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
it 'Fails to initalize from invalid line' do
|
|
69
|
+
expect { described_class.from_cif('bad line') }
|
|
70
|
+
.to raise_error ArgumentError, "Invalid line:\nbad line"
|
|
71
|
+
end
|
|
72
|
+
end
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
describe RailFeeds::NetworkRail::Schedule::Header::JSON do
|
|
4
|
+
subject { described_class.new sequence: 2200 }
|
|
5
|
+
|
|
6
|
+
it '::from_json' do
|
|
7
|
+
subject = described_class.from_json(
|
|
8
|
+
'{"JsonTimetableV1":{"timestamp":1529712268,"Metadata":{"sequence":2200}}}'
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
expect(subject.extracted_at).to eq Time.new(2018, 6, 23, 0, 4, 28, 0)
|
|
12
|
+
expect(subject.sequence).to eq 2200
|
|
13
|
+
expect(subject.start_date).to eq Date.new 2018, 6, 22
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
it '#to_json' do
|
|
17
|
+
expect(subject.to_json).to eq '{"JsonTimetableV1":{"classification":"public","time' \
|
|
18
|
+
'stamp":0,"owner":"Network Rail","Sender":{"organisa' \
|
|
19
|
+
'tion":"","application":"NTROD","component":"SCHEDUL' \
|
|
20
|
+
'E"},"Metadata":{"type":"full","sequence":2200}}}'
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
describe '#hash' do
|
|
24
|
+
it 'Uses sequence' do
|
|
25
|
+
expect(subject.hash).to eq 2200
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
describe '#<=>' do
|
|
30
|
+
let(:header1) { described_class.new sequence: 1 }
|
|
31
|
+
let(:header2) { described_class.new sequence: 1 }
|
|
32
|
+
|
|
33
|
+
it 'Doesn\'t match' do
|
|
34
|
+
header2.sequence = 2
|
|
35
|
+
expect(header1.<=>(header2)).to eq(-1)
|
|
36
|
+
expect(header2.<=>(header1)).to eq 1
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
it 'Matches' do
|
|
40
|
+
expect(header1.<=>(header2)).to eq 0
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
it 'Compares to nil without error' do
|
|
44
|
+
expect { header1 <=> nil }.to_not raise_error
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
it '#to_s' do
|
|
49
|
+
expect(subject.to_s).to eq 'Sequence 2200, proabbly from 2018-06-22.'
|
|
50
|
+
end
|
|
51
|
+
end
|