serienrenamer 0.0.14 → 0.0.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/bin/serienrenamer +0 -6
- data/lib/serienrenamer/plugin/episode_identifier.rb +35 -0
- data/lib/serienrenamer/plugin/serienjunkies_de.rb +131 -0
- data/lib/serienrenamer/plugin/serienjunkies_feed.rb +112 -0
- data/lib/serienrenamer/plugin/serienjunkies_org.rb +181 -0
- data/lib/serienrenamer/plugin/textfile.rb +57 -0
- data/lib/serienrenamer/plugin/wikipedia.rb +448 -0
- data/lib/serienrenamer/plugin.rb +26 -0
- data/lib/serienrenamer/version.rb +1 -1
- data/lib/serienrenamer.rb +4 -25
- data/serienrenamer.gemspec +1 -0
- data/test/test_helper.rb +1 -1
- data/test/test_plugin_episode_identifier.rb +1 -1
- data/test/test_plugin_serienjunkies_de.rb +3 -3
- data/test/test_plugin_serienjunkies_feed.rb +1 -1
- data/test/{test_serienjunkies_org.rb → test_plugin_serienjunkies_org.rb} +3 -3
- data/test/test_plugin_textfile.rb +3 -3
- metadata +12 -12
- data/lib/plugin/episode_identifier.rb +0 -33
- data/lib/plugin/serienjunkies_de.rb +0 -129
- data/lib/plugin/serienjunkies_feed.rb +0 -110
- data/lib/plugin/serienjunkies_org.rb +0 -179
- data/lib/plugin/textfile.rb +0 -54
- data/lib/plugin/wikipedia.rb +0 -446
- data/lib/plugin.rb +0 -8
@@ -0,0 +1,448 @@
|
|
1
|
+
# encoding: UTF-8
|
2
|
+
require 'media_wiki'
|
3
|
+
|
4
|
+
module Serienrenamer
|
5
|
+
module Plugin
|
6
|
+
|
7
|
+
# This Plugin tries to extract the series
|
8
|
+
# information from wikipedia
|
9
|
+
#
|
10
|
+
# (by now only the german wikipedia)
|
11
|
+
class Wikipedia < Serienrenamer::Pluginbase
|
12
|
+
|
13
|
+
def self.plugin_name; "Wikipedia" end
|
14
|
+
def self.usable; true end
|
15
|
+
def self.priority; 30 end
|
16
|
+
|
17
|
+
@@WIKIPEDIA_URL = 'http://de.wikipedia.org/w/api.php'
|
18
|
+
|
19
|
+
# patterns used in this class
|
20
|
+
@@EPISODE_TABLE_PATTERN = /.*(?<table>\{\{Episodenlistentabelle.*\}\})\s*$/m
|
21
|
+
@@EPISODE_ENTRY_PATTERN = /\{\{Episodenlisteneintrag|S-Episode/
|
22
|
+
@@SERIES_SITE_TEST_PATTERN = /\{\{Infobox.Fernsehsendung.*\}\}/m
|
23
|
+
@@DISAMBIGUATION_TEST_PATTERN = /\{\{Begriffsklärung\}\}/m
|
24
|
+
@@CONTAINS_LINK_TO_EPISODE_LIST = /Hauptartikel.*(?<main>Liste.*?)[\]\}]+/
|
25
|
+
@@CONTAINS_INARTICLE_EPISODE_LIST = /\<div.*\>Staffel.(\d+).*\<\/div\>.*class=\"wikitable\".*titel/m
|
26
|
+
@@INPAGE_SEASON_SEPARATOR = /\<div.style=\"clear:both\;.class=\"NavFrame\"\>/
|
27
|
+
@@WIKITABLE_EXTRACT_PATTERN = /(\{\|.class=\"wikitable\".*\|\})\n/m
|
28
|
+
@@IS_ONE_LINE_EPISODE_LIST = /\|.*\|\|.*\|\|.*\|\|/m
|
29
|
+
|
30
|
+
|
31
|
+
# this method will be called from the main program
|
32
|
+
# with an Serienrenamer::Episode instance as parameter
|
33
|
+
#
|
34
|
+
# it returns an array of episode information
|
35
|
+
def self.generate_episode_information(episode)
|
36
|
+
|
37
|
+
raise ArgumentError, "Serienrenamer::Episode instance needed" unless
|
38
|
+
episode.is_a?(Serienrenamer::Episode)
|
39
|
+
|
40
|
+
return [] unless episode.series.match(/\w+/)
|
41
|
+
|
42
|
+
unless defined? @cached_data
|
43
|
+
@cached_data = Hash.new
|
44
|
+
end
|
45
|
+
|
46
|
+
wiki = MediaWiki::Gateway.new(@@WIKIPEDIA_URL)
|
47
|
+
|
48
|
+
if ! @cached_data.has_key?(episode.series)
|
49
|
+
# search for a series site in wikipedia
|
50
|
+
series_site = nil
|
51
|
+
tries = 3
|
52
|
+
search_pattern = episode.series
|
53
|
+
search_pattern_modified = false
|
54
|
+
|
55
|
+
begin
|
56
|
+
wiki.search(search_pattern, nil, 15).each do |title|
|
57
|
+
pagedata = wiki.get(title)
|
58
|
+
if is_series_main_page?(pagedata)
|
59
|
+
series_site = title
|
60
|
+
break
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
# modify the search term pattern so that it contains
|
65
|
+
# only the last word if the search_pattern contains
|
66
|
+
# more than one words
|
67
|
+
if series_site.nil? && ! search_pattern_modified
|
68
|
+
search_pattern = search_pattern.match(/(\w+)\s*$/)[1]
|
69
|
+
search_pattern_modified = true
|
70
|
+
raise EOFError if search_pattern # break out and retry
|
71
|
+
end
|
72
|
+
rescue MediaWiki::APIError => e
|
73
|
+
tries -= 1
|
74
|
+
retry if tries > 0
|
75
|
+
rescue EOFError => e
|
76
|
+
retry
|
77
|
+
end
|
78
|
+
|
79
|
+
return [] unless series_site
|
80
|
+
|
81
|
+
# look for a link to a list of episodes
|
82
|
+
pagedata = wiki.get(series_site)
|
83
|
+
|
84
|
+
if contains_link_to_episode_list?(pagedata)
|
85
|
+
mainarticle = pagedata.match(@@CONTAINS_LINK_TO_EPISODE_LIST)[:main]
|
86
|
+
if mainarticle
|
87
|
+
episodelist_page = wiki.get(mainarticle)
|
88
|
+
series = parse_episodelist_page_data(episodelist_page)
|
89
|
+
|
90
|
+
@cached_data[episode.series] = series
|
91
|
+
end
|
92
|
+
|
93
|
+
elsif contains_inarticle_episode_list?(pagedata)
|
94
|
+
series = parse_inarticle_episodelist_page_data(pagedata)
|
95
|
+
@cached_data[episode.series] = series
|
96
|
+
|
97
|
+
else
|
98
|
+
warn "no episode list found"
|
99
|
+
return []
|
100
|
+
end
|
101
|
+
end
|
102
|
+
|
103
|
+
episode_names = []
|
104
|
+
|
105
|
+
# tries to find an episodename in cached_data
|
106
|
+
# otherwise returns empty array
|
107
|
+
begin
|
108
|
+
series = @cached_data[episode.series]
|
109
|
+
episodename = series[episode.season][episode.episode]
|
110
|
+
if episodename.match(/\w+/)
|
111
|
+
episode_names.push(episodename)
|
112
|
+
end
|
113
|
+
rescue
|
114
|
+
end
|
115
|
+
|
116
|
+
return episode_names
|
117
|
+
end
|
118
|
+
|
119
|
+
|
120
|
+
# This method will extract season based information
|
121
|
+
# from a string that contains a wikipedia episodelist page
|
122
|
+
#
|
123
|
+
# returns an Array of Arrays with episode information
|
124
|
+
# where episode and season numbers are the indizes
|
125
|
+
def self.parse_episodelist_page_data(pagedata, debug=false)
|
126
|
+
raise ArgumentError, 'String with pagedata expected' unless
|
127
|
+
pagedata.is_a?(String)
|
128
|
+
|
129
|
+
series_data = []
|
130
|
+
is_season_table_following = false
|
131
|
+
season_number = nil
|
132
|
+
|
133
|
+
# split the wikipedia page by headings and process
|
134
|
+
# the following paragraph if the heading starts with
|
135
|
+
# 'Staffel'
|
136
|
+
pagedata.split(/(==.*)==/).each do |paragraph|
|
137
|
+
if paragraph.match(/^==.*Staffel/)
|
138
|
+
match = paragraph.match(/^==.*Staffel.(?<seasonnr>\d+)/)
|
139
|
+
if match
|
140
|
+
season_number = match[:seasonnr].to_i
|
141
|
+
is_season_table_following = true
|
142
|
+
end
|
143
|
+
elsif is_season_table_following
|
144
|
+
#
|
145
|
+
# extract season table from this paragraph
|
146
|
+
season = parse_season_table(paragraph)
|
147
|
+
|
148
|
+
series_data[season_number] = season
|
149
|
+
is_season_table_following = false
|
150
|
+
end
|
151
|
+
end
|
152
|
+
|
153
|
+
return series_data
|
154
|
+
end
|
155
|
+
|
156
|
+
|
157
|
+
# this method will be called with a wikipedia seasontable
|
158
|
+
# as parameter and will extract all episodes from this
|
159
|
+
# and returns that as an array where the episode number is
|
160
|
+
# the index
|
161
|
+
def self.parse_season_table(table)
|
162
|
+
raise ArgumentError, 'String with seasontable expected' unless
|
163
|
+
table.is_a?(String)
|
164
|
+
|
165
|
+
season_data = []
|
166
|
+
|
167
|
+
matched_table = table.match(@@EPISODE_TABLE_PATTERN)
|
168
|
+
if matched_table
|
169
|
+
|
170
|
+
# extract all episode entries that
|
171
|
+
# looks like the following
|
172
|
+
#
|
173
|
+
# {{Episodenlisteneintrag
|
174
|
+
# | NR_GES = 107
|
175
|
+
# | NR_ST = 1
|
176
|
+
# | OT = The Mastodon in the Room
|
177
|
+
# | DT = Die Rückkehr der Scheuklappen
|
178
|
+
# | ZF =
|
179
|
+
# | EA = {{dts|23|09|2010}}
|
180
|
+
# | EAD = {{dts|08|09|2011}}
|
181
|
+
# }}
|
182
|
+
|
183
|
+
episodes = matched_table[:table].split(@@EPISODE_ENTRY_PATTERN)
|
184
|
+
if episodes
|
185
|
+
episodes.each do |epi|
|
186
|
+
|
187
|
+
# build up a hash from the entry
|
188
|
+
infos = {}
|
189
|
+
epi.lines.each do |part|
|
190
|
+
parts = part.strip.match(/(?<key>\w+).=.(?<value>.*)$/)
|
191
|
+
if parts
|
192
|
+
infos[parts[:key].strip] = parts[:value].strip
|
193
|
+
end
|
194
|
+
end
|
195
|
+
|
196
|
+
next unless infos.has_key?('NR_ST')
|
197
|
+
|
198
|
+
# extract useful information and
|
199
|
+
# add it to the array
|
200
|
+
epi_nr = infos['NR_ST'].to_i
|
201
|
+
next unless epi_nr
|
202
|
+
|
203
|
+
# TODO make the following variable
|
204
|
+
epi_name = infos['DT'].strip
|
205
|
+
|
206
|
+
# remove all html tags and all following
|
207
|
+
# text from the episode name and the bold
|
208
|
+
# syntax from mediawiki [[text]]
|
209
|
+
epi_name.gsub!(/<\/?[^>]*>.*/, "")
|
210
|
+
epi_name.gsub!(/[\[\[\]\]]/, "")
|
211
|
+
next unless epi_name.match(/\w+/)
|
212
|
+
|
213
|
+
season_data[epi_nr] = epi_name
|
214
|
+
end
|
215
|
+
end
|
216
|
+
end
|
217
|
+
return season_data
|
218
|
+
end
|
219
|
+
|
220
|
+
|
221
|
+
# This method will extract season based information
|
222
|
+
# from a string that contains a series page with an
|
223
|
+
# episodelist included
|
224
|
+
#
|
225
|
+
# returns an Array of Arrays with episode information
|
226
|
+
# where episode and season numbers are the indizes
|
227
|
+
def self.parse_inarticle_episodelist_page_data(pagedata, debug=false)
|
228
|
+
raise ArgumentError, 'String with pagedata expected' unless
|
229
|
+
pagedata.is_a?(String)
|
230
|
+
|
231
|
+
series_data = []
|
232
|
+
|
233
|
+
# look for a paragraph with an episodelist
|
234
|
+
episodelist_paragraph = pagedata.split(/==.*==/).select { |p|
|
235
|
+
contains_inarticle_episode_list?(p) }[0]
|
236
|
+
|
237
|
+
raise ArgumentError, 'no episodelist found' unless episodelist_paragraph
|
238
|
+
|
239
|
+
# iterate through all seasons in this episode table
|
240
|
+
episodelist_paragraph.split(@@INPAGE_SEASON_SEPARATOR).each do |season|
|
241
|
+
next unless contains_inarticle_episode_list?(season)
|
242
|
+
|
243
|
+
season_nr = season.match(@@CONTAINS_INARTICLE_EPISODE_LIST)[1].to_i
|
244
|
+
|
245
|
+
wikitable = season.match(@@WIKITABLE_EXTRACT_PATTERN)[1]
|
246
|
+
|
247
|
+
# we have to detect the type of the inarticle season page
|
248
|
+
# because there are two different kinds of table structures
|
249
|
+
# used in the german wikipedia
|
250
|
+
if self.is_episode_list_with_one_episode_per_line?(wikitable)
|
251
|
+
episodes = parse_inarticle_season_table_with_one_line(wikitable)
|
252
|
+
else
|
253
|
+
episodes = parse_inarticle_season_table(wikitable)
|
254
|
+
end
|
255
|
+
|
256
|
+
# HACK if a season is splitted into different parts
|
257
|
+
# eg. Flashpoint (2.1 and 2.2) than merge that if possible
|
258
|
+
if series_data[season_nr] != nil
|
259
|
+
series_data[season_nr].each_with_index do |item, index|
|
260
|
+
episodes[index] = item unless episodes[index]
|
261
|
+
end
|
262
|
+
end
|
263
|
+
|
264
|
+
series_data[season_nr] = episodes
|
265
|
+
end
|
266
|
+
|
267
|
+
return series_data
|
268
|
+
end
|
269
|
+
|
270
|
+
|
271
|
+
# this method will be called with a wikitable for a season
|
272
|
+
# as parameter and will extract all episodes from this
|
273
|
+
# and returns that as an array where the episode number is
|
274
|
+
# the index
|
275
|
+
#
|
276
|
+
# Example for an wikitable for episodes:
|
277
|
+
#
|
278
|
+
# {| class="wikitable" width="100%"
|
279
|
+
# |- vertical-align: top; text-align:center; "
|
280
|
+
# | width="15" | '''Nummer''' <br /><small>(Gesamt)<small>
|
281
|
+
# | width="15" | '''Nummer''' <br /><small>(Staffel)<small>
|
282
|
+
# ! width="250" | Originaltitel
|
283
|
+
# ! width="250" | Deutscher Titel
|
284
|
+
# ! width="180" | Erstausstrahlung<br /><small>(USA Network)</small>
|
285
|
+
# ! width="180" | Erstausstrahlung<br /><small>(RTL)</small>
|
286
|
+
# ! width="180" | Erstausstrahlung<br /><small>(SF zwei)</small>
|
287
|
+
# |-
|
288
|
+
# | bgcolor="#DFEEEF"| 01
|
289
|
+
# | 01
|
290
|
+
# | ''Pilot''
|
291
|
+
# | ''Auch Reiche sind nur Menschen''
|
292
|
+
# | 4. Mai 2009
|
293
|
+
# | 17. Mai 2011
|
294
|
+
# | 6. Juni 2011 (Teil 1)<br />13. Juni 2011 (Teil 2)
|
295
|
+
# |-
|
296
|
+
# |}
|
297
|
+
#
|
298
|
+
def self.parse_inarticle_season_table(table)
|
299
|
+
raise ArgumentError, 'String with seasontable expected' unless
|
300
|
+
table.is_a?(String)
|
301
|
+
|
302
|
+
season_data = []
|
303
|
+
episode_nr_line_nr = nil
|
304
|
+
episode_name_line_nr = nil
|
305
|
+
|
306
|
+
table.split(/^\|\-.*$/).each do |tablerow|
|
307
|
+
tablerow.strip!
|
308
|
+
|
309
|
+
# skip invalid rows
|
310
|
+
lines = tablerow.lines.to_a
|
311
|
+
next unless lines.length >= 4
|
312
|
+
|
313
|
+
if tablerow.match(/width=\"\d+\"/)
|
314
|
+
# extract line numbers for needed data that
|
315
|
+
# are in the table header
|
316
|
+
lines.each_with_index do |item, index|
|
317
|
+
if item.match(/Nummer.*Staffel/i)
|
318
|
+
episode_nr_line_nr = index
|
319
|
+
|
320
|
+
# TODO make the following more variable
|
321
|
+
elsif item.match(/Deutscher.*Titel/i)
|
322
|
+
episode_name_line_nr = index
|
323
|
+
end
|
324
|
+
end
|
325
|
+
else
|
326
|
+
# extract episode information
|
327
|
+
if episode_nr_line_nr && episode_name_line_nr
|
328
|
+
|
329
|
+
md_nr = lines[episode_nr_line_nr].strip.match(/(\d+)/)
|
330
|
+
if md_nr
|
331
|
+
episode_nr = md_nr[1].to_i
|
332
|
+
|
333
|
+
md_name = lines[episode_name_line_nr].strip.match(/^\|.(.*)$/)
|
334
|
+
if md_name
|
335
|
+
episode_name = md_name[1]
|
336
|
+
episode_name.gsub!(/[\'\"\[\]]/, "")
|
337
|
+
next unless episode_name.match(/\w+/)
|
338
|
+
|
339
|
+
season_data[episode_nr] = episode_name.strip
|
340
|
+
end
|
341
|
+
end
|
342
|
+
end
|
343
|
+
end
|
344
|
+
end
|
345
|
+
|
346
|
+
return season_data
|
347
|
+
end
|
348
|
+
|
349
|
+
|
350
|
+
# this method will be called with a wikitable for a season
|
351
|
+
# as parameter and will extract all episodes from this
|
352
|
+
# and returns that as an array where the episode number is
|
353
|
+
# the index
|
354
|
+
#
|
355
|
+
# this method implements a special format that takes place in
|
356
|
+
# e.g. 'Prison Break' where an episode is not spread along several
|
357
|
+
# lines like in the method above
|
358
|
+
#
|
359
|
+
# Example for an wikitable for episodes:
|
360
|
+
#
|
361
|
+
#{| class="wikitable"
|
362
|
+
# |- style="color:#black; background-color:#006699"
|
363
|
+
# ! '''Episode''' !! '''Deutscher Titel''' !! '''Originaltitel''' !! '''Erstausstrahlung (DE)''' !! '''Erstausstrahlung (USA)'''
|
364
|
+
# |-
|
365
|
+
# |'''1''' (1-01) || Der große Plan || Pilot || 21. Juni 2007 || 29. August 2005
|
366
|
+
# |-
|
367
|
+
# |'''2''' (1-02) || Lügt Lincoln? || Allen || 21. Juni 2007 || 29. August 2005
|
368
|
+
# |-
|
369
|
+
# |'''3''' (1-03) || Vertrauenstest || Cell Test || 28. Juni 2007 || 5. September 2005
|
370
|
+
# |-
|
371
|
+
# |'''4''' (1-04) || Veronica steigt ein || Cute Poison || 28. Juni 2007 || 12. September 2005
|
372
|
+
#
|
373
|
+
def self.parse_inarticle_season_table_with_one_line(table)
|
374
|
+
raise ArgumentError, 'String with seasontable expected' unless
|
375
|
+
table.is_a?(String)
|
376
|
+
|
377
|
+
season_data = []
|
378
|
+
episode_nr_col = nil
|
379
|
+
episode_name_col = nil
|
380
|
+
|
381
|
+
table.split(/^\|\-.*$/).each do |tablerow|
|
382
|
+
|
383
|
+
if tablerow.match(/!!.*!!.*!!/)
|
384
|
+
# extract column numbers from table header
|
385
|
+
tablerow.split(/!!/).each_with_index do |col,index|
|
386
|
+
episode_nr_col = index if col.match(/Episode/i)
|
387
|
+
episode_name_col = index if col.match(/Deutsch.*Titel/i)
|
388
|
+
end
|
389
|
+
|
390
|
+
elsif tablerow.match(/\|\|.*\w+.*\|\|/)
|
391
|
+
tablerow.strip!
|
392
|
+
columns = tablerow.split(/\|\|/)
|
393
|
+
|
394
|
+
# the following cleanes up the column so that the following occurs
|
395
|
+
# " '''7''' (1-07) " => "7 1 07"
|
396
|
+
#
|
397
|
+
# we can now extract the last bunch of digits and this algorithm is
|
398
|
+
# some kind of format independent
|
399
|
+
dirty_episode_nr = columns[episode_nr_col].gsub(/\D/, " ").strip
|
400
|
+
episode_nr = dirty_episode_nr.match(/(\d+)$/)[1]
|
401
|
+
next unless episode_nr
|
402
|
+
|
403
|
+
episode_name = columns[episode_name_col].strip
|
404
|
+
next unless episode_nr.match(/\w+/)
|
405
|
+
|
406
|
+
season_data[episode_nr.to_i] = episode_name
|
407
|
+
end
|
408
|
+
end
|
409
|
+
|
410
|
+
return season_data
|
411
|
+
end
|
412
|
+
|
413
|
+
|
414
|
+
# this method checks if the page is the main page
|
415
|
+
# for a series
|
416
|
+
#
|
417
|
+
# returns true if page contains the infobox that
|
418
|
+
# is typical for series pages in wikipedia
|
419
|
+
def self.is_series_main_page?(page)
|
420
|
+
page.match(@@SERIES_SITE_TEST_PATTERN) != nil
|
421
|
+
end
|
422
|
+
|
423
|
+
# check the site if it is a disambiguation site
|
424
|
+
#
|
425
|
+
# returns true if this site links to pages with
|
426
|
+
# themes with the same name
|
427
|
+
def self.is_disambiguation_site?(page)
|
428
|
+
page.match(@@DISAMBIGUATION_TEST_PATTERN) != nil
|
429
|
+
end
|
430
|
+
|
431
|
+
# test if the page contains a link to an article
|
432
|
+
# with an episode list
|
433
|
+
def self.contains_link_to_episode_list?(page)
|
434
|
+
page.match(@@CONTAINS_LINK_TO_EPISODE_LIST) != nil
|
435
|
+
end
|
436
|
+
|
437
|
+
# test if the page contains a episode list
|
438
|
+
def self.contains_inarticle_episode_list?(page)
|
439
|
+
page.match(@@CONTAINS_INARTICLE_EPISODE_LIST) != nil
|
440
|
+
end
|
441
|
+
|
442
|
+
# tests for the type of in article episode list
|
443
|
+
def self.is_episode_list_with_one_episode_per_line?(page)
|
444
|
+
page.match(@@IS_ONE_LINE_EPISODE_LIST) != nil
|
445
|
+
end
|
446
|
+
end
|
447
|
+
end
|
448
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
module Serienrenamer
|
2
|
+
|
3
|
+
# Base plugin from which all exisiting plugins inherits
|
4
|
+
class Pluginbase
|
5
|
+
|
6
|
+
class << self; attr_reader :registered_plugins end
|
7
|
+
@registered_plugins = []
|
8
|
+
|
9
|
+
# if you inherit from this class, the child
|
10
|
+
# gets added to the "registered plugins" array
|
11
|
+
def self.inherited(child)
|
12
|
+
self.registered_plugins << child
|
13
|
+
end
|
14
|
+
|
15
|
+
def self.plugin_name; "PluginBase" end
|
16
|
+
|
17
|
+
def self.to_s
|
18
|
+
self.plugin_name
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
# include all existing plugins
|
23
|
+
module Plugin
|
24
|
+
Dir[File.dirname(__FILE__) + '/plugin/*.rb'].each {|file| require file }
|
25
|
+
end
|
26
|
+
end
|
data/lib/serienrenamer.rb
CHANGED
@@ -1,28 +1,7 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
1
|
+
require 'serienrenamer/episode.rb'
|
2
|
+
require 'serienrenamer/information_store.rb'
|
3
|
+
require 'serienrenamer/version.rb'
|
4
|
+
require 'serienrenamer/plugin'
|
4
5
|
|
5
6
|
module Serienrenamer
|
6
|
-
|
7
|
-
require 'serienrenamer/episode.rb'
|
8
|
-
require 'serienrenamer/information_store.rb'
|
9
|
-
require 'serienrenamer/version.rb'
|
10
|
-
|
11
|
-
class Pluginbase
|
12
|
-
|
13
|
-
class << self; attr_reader :registered_plugins end
|
14
|
-
@registered_plugins = []
|
15
|
-
|
16
|
-
# if you inherit from this class, the child
|
17
|
-
# gets added to the "registered plugins" array
|
18
|
-
def self.inherited(child)
|
19
|
-
Pluginbase.registered_plugins << child
|
20
|
-
end
|
21
|
-
|
22
|
-
def self.plugin_name; "PluginBase" end
|
23
|
-
|
24
|
-
def self.to_s
|
25
|
-
self.plugin_name
|
26
|
-
end
|
27
|
-
end
|
28
7
|
end
|
data/serienrenamer.gemspec
CHANGED
@@ -15,6 +15,7 @@ Gem::Specification.new do |gem|
|
|
15
15
|
gem.require_paths = ["lib"]
|
16
16
|
gem.version = Serienrenamer::VERSION
|
17
17
|
|
18
|
+
gem.required_ruby_version = '>= 1.9.0'
|
18
19
|
gem.add_runtime_dependency(%q<wlapi>, [">= 0.8.4"])
|
19
20
|
gem.add_runtime_dependency(%q<savon>, ["~> 0.8.4"])
|
20
21
|
gem.add_runtime_dependency(%q<mediawiki-gateway>, [">= 0.4.4"])
|
data/test/test_helper.rb
CHANGED
@@ -27,7 +27,7 @@ class TestPluginSerienjunkiesDe < Test::Unit::TestCase
|
|
27
27
|
|
28
28
|
def test_seriespage_url_search
|
29
29
|
VCR.use_cassette("sjunkie_de_#{method_name}") do
|
30
|
-
plugin = Plugin::SerienjunkiesDe
|
30
|
+
plugin = Serienrenamer::Plugin::SerienjunkiesDe
|
31
31
|
|
32
32
|
misfits = plugin.find_link_to_series_page("Misfits")
|
33
33
|
assert_equal("http://serienjunkies.de/misfits/", misfits)
|
@@ -46,7 +46,7 @@ class TestPluginSerienjunkiesDe < Test::Unit::TestCase
|
|
46
46
|
def test_parse_seriespage
|
47
47
|
|
48
48
|
VCR.use_cassette("sjunkie_de_#{method_name}") do
|
49
|
-
plugin = Plugin::SerienjunkiesDe
|
49
|
+
plugin = Serienrenamer::Plugin::SerienjunkiesDe
|
50
50
|
|
51
51
|
seasons = plugin.parse_seriespage(
|
52
52
|
"http://www.serienjunkies.de/royal-pains/")
|
@@ -72,7 +72,7 @@ class TestPluginSerienjunkiesDe < Test::Unit::TestCase
|
|
72
72
|
def test_episode_information_generation
|
73
73
|
|
74
74
|
VCR.use_cassette("sjunkie_de_#{method_name}") do
|
75
|
-
plugin = Plugin::SerienjunkiesDe
|
75
|
+
plugin = Serienrenamer::Plugin::SerienjunkiesDe
|
76
76
|
|
77
77
|
flpo = Serienrenamer::Episode.new(@@files['flpo'])
|
78
78
|
data = plugin.generate_episode_information(flpo)[0]
|
@@ -21,7 +21,7 @@ class TestPluginSerienjunkiesFeed < Test::Unit::TestCase
|
|
21
21
|
|
22
22
|
def setup
|
23
23
|
unless defined? @feed
|
24
|
-
@feed = Plugin::SerienjunkiesOrgFeed
|
24
|
+
@feed = Serienrenamer::Plugin::SerienjunkiesOrgFeed
|
25
25
|
@feed.feed_url = File.join( File.dirname(__FILE__),
|
26
26
|
'serienjunkies_feed_sample.xml')
|
27
27
|
end
|
@@ -21,7 +21,7 @@ class TestSerienjunkiesOrg < Test::Unit::TestCase
|
|
21
21
|
def test_seriespage_url_search
|
22
22
|
|
23
23
|
VCR.use_cassette("sjunkie_org_#{method_name}") do
|
24
|
-
plugin = Plugin::SerienjunkiesOrg
|
24
|
+
plugin = Serienrenamer::Plugin::SerienjunkiesOrg
|
25
25
|
|
26
26
|
misfits = plugin.find_link_to_series_page("Misfits")
|
27
27
|
assert_equal("http://serienjunkies.org/serie/misfits/", misfits)
|
@@ -37,7 +37,7 @@ class TestSerienjunkiesOrg < Test::Unit::TestCase
|
|
37
37
|
def test_parse_seriespage
|
38
38
|
|
39
39
|
VCR.use_cassette("sjunkie_org_#{method_name}") do
|
40
|
-
plugin = Plugin::SerienjunkiesOrg
|
40
|
+
plugin = Serienrenamer::Plugin::SerienjunkiesOrg
|
41
41
|
|
42
42
|
seasons = plugin.parse_seriespage(
|
43
43
|
"http://serienjunkies.org/royal-pains/")
|
@@ -58,7 +58,7 @@ class TestSerienjunkiesOrg < Test::Unit::TestCase
|
|
58
58
|
def test_episode_information_generation
|
59
59
|
|
60
60
|
VCR.use_cassette("sjunkie_org_#{method_name}") do
|
61
|
-
plugin = Plugin::SerienjunkiesOrg
|
61
|
+
plugin = Serienrenamer::Plugin::SerienjunkiesOrg
|
62
62
|
|
63
63
|
flpo = Serienrenamer::Episode.new(@@files['flpo'])
|
64
64
|
data = plugin.generate_episode_information(flpo)[0]
|
@@ -40,7 +40,7 @@ class TestPluginTextfile < Test::Unit::TestCase
|
|
40
40
|
def test_information_extraction
|
41
41
|
VCR.use_cassette("textfile_#{method_name}") do
|
42
42
|
how = Serienrenamer::Episode.new(@@directories['hmym'])
|
43
|
-
data = Plugin::Textfile.generate_episode_information(how)[0]
|
43
|
+
data = Serienrenamer::Plugin::Textfile.generate_episode_information(how)[0]
|
44
44
|
how.add_episode_information(data, true)
|
45
45
|
assert_equal("S07E05 - Die Exkursion.avi", how.to_s)
|
46
46
|
end
|
@@ -49,7 +49,7 @@ class TestPluginTextfile < Test::Unit::TestCase
|
|
49
49
|
def test_select_right_textfile
|
50
50
|
VCR.use_cassette("textfile_#{method_name}") do
|
51
51
|
how = Serienrenamer::Episode.new(@@directories['hmmg'])
|
52
|
-
data = Plugin::Textfile.generate_episode_information(how)[0]
|
52
|
+
data = Serienrenamer::Plugin::Textfile.generate_episode_information(how)[0]
|
53
53
|
how.add_episode_information(data, true)
|
54
54
|
assert_equal("S07E11 - Plan B.avi", how.to_s)
|
55
55
|
end
|
@@ -58,7 +58,7 @@ class TestPluginTextfile < Test::Unit::TestCase
|
|
58
58
|
def test_information_extraction_with_directory_parameter
|
59
59
|
VCR.use_cassette("textfile_#{method_name}") do
|
60
60
|
how = @@directories['hmym']
|
61
|
-
data = Plugin::Textfile.generate_episode_information(how)[0]
|
61
|
+
data = Serienrenamer::Plugin::Textfile.generate_episode_information(how)[0]
|
62
62
|
assert_not_nil(data)
|
63
63
|
end
|
64
64
|
end
|