rbmediawiki 0.2.6 → 0.2.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. data/lib/rbmediawiki/page.rb +446 -141
  2. data/lib/rbmediawiki.rb +1 -1
  3. metadata +1 -1
@@ -1,158 +1,463 @@
1
- class Page_generator
2
- def initialize(site)
3
- @site = site
1
+ #TODO: rollback
2
+ #TODO: patrol
3
+
4
+ #This class represents a page. It gives methods for dealing with single
5
+ #pages: obtainig the content, putting it, appending content, deleting, etc.
6
+ class Page
7
+ attr_reader :title
8
+ def initialize(title = nil, site = nil)
9
+ @site = site ? site : Api.new()
10
+ @title = title
11
+ @normtitle = title.gsub(" ","_")
4
12
  end
5
13
 
6
- #gets pages alphabetically from a certain start point
7
- # * from: starting point (Default starts from the beginning)
8
- # * limit: pages to get per query (default: 500)
9
- # * prefix: Only list titles that start with this value
10
- # * namespace: The namespace to enumerate. By default, the main namespace will be enumerated
11
- # * How to filter redirects
12
- # * all: List all pages regardless of their redirect flag (default)
13
- # * redirects: Only list redirects
14
- # * nonredirects: Don't list redirects
15
- # * minsize: Only list pages that are at least this many bytes in size
16
- # * maxsize: Only list pages that are at most this many bytes in size
17
- # * prtype: Only list pages that have been protected from this type of action
18
- # * edit: Only list edit-protected pages
19
- # * move: Only list move-protected pages
20
- # * prleve: Only list pages that have been protected at this level. Cannot be used without prtype
21
- # * autoconfirmed: Only autoconfirmed users can edit/move/whatever
22
- # * sysop: Only sysops can edit/move/whatever
23
- # * Empty: Everyone can edit/move/whatever
24
- # * prfiltercascade: Filter protections based on cascadingness (ignored when apprtype isn't set)
25
- # * One value: cascading, noncascading, all
26
- # * Default: all
27
- # *filterlanglinks: Filter based on whether a page has langlinks
28
- # * One value: withlanglinks, withoutlanglinks, all
29
- # * Default: all
30
- def all_pages(from = "!", limit = "500", prefix = nil, namespace = nil, filterredir = nil, minsize = nil, maxsize = nil, prtype = nil, prlevel = nil, prfiltercascade = nil, filterlanglinks = nil)
31
- pages = Hash.new
32
- count = 0
33
- finish = false
34
- while !finish
35
- result = @site.query_list_allpages(nil, from, prefix, namespace, filterredir, minsize, maxsize, prtype, prlevel, prfiltercascade, limit, nil, filterlanglinks)
36
- result['query']['allpages']['p'].each {|page|
37
- yield Page.new(page['title'], @site)
38
- }
39
- if result.key?('query-continue')
40
- from = result['query-continue']['allpages']['apfrom']
41
- else
42
- finish = true
43
- end
14
+ #retrieves the content of the page
15
+ def get()
16
+ result = @site.query_prop_revisions(@normtitle, 'content')
17
+ if result.key?('error')
18
+ raise RbmediawikiError, "#{title}: "+result['error']['code']
19
+ else
20
+ return result['query']['pages']['page']['revisions']['rev']
44
21
  end
45
22
  end
46
23
 
47
- def linksearch(euquery, eulimit = 500, eunamespace = 0)
48
- pages = Hash.new
49
- count = 0
50
- finish = false
51
- euoffset = nil
52
- while !finish
53
- result = @site.query_list_exturlusage(nil, nil, euoffset, nil, euquery, eunamespace, eulimit)
54
- result['query']['exturlusage']['eu'].each {|page|
55
- yield Page.new(page['title'], @site)
56
- }
57
- if result.key?('query-continue')
58
- euoffset = result['query-continue']['exturlusage']['euoffset']
59
- else
60
- finish = true
61
- end
24
+ #returns false if it is not a redirect, the redirected title if it is
25
+ def redirect?()
26
+ txt = this.get
27
+ if (txt =~ /#REDIRECT\s+\[\[(.*)\]\]/)
28
+ return $1
29
+ else
30
+ return false
62
31
  end
63
32
  end
64
33
 
65
- #Returns pages where eititle is transcluded in.
66
- #eilimit is the max results for query (default: 500)
67
- #einamespace is the namespace to work in
68
- def templateusage(eititle, eilimit = 500, einamespace = nil)
69
- pages = Hash.new
70
- finish = false
71
- eioffset = nil
72
- while !finish
73
- result = @site.query_list_embeddedin(nil, eititle, eioffset, einamespace, nil, eilimit)
74
- result['query']['embeddedin']['ei'].each {|page|
75
- yield Page.new(page['title'], @site)
76
- }
77
- if result.key?('query-continue')
78
- eioffset = result['query-continue']['embeddedin']['eicontinue']
79
- else
80
- finish = true
81
- end
34
+ #puts the text of a page.
35
+ # * text: the new content of the page
36
+ # * summary: editting summary
37
+ # * minor: is a minor edit? default->true
38
+ # * bot: is a bot flagged edit?
39
+ def put(text, summary = nil, minor = true, bot = true, password = nil)
40
+ #require login
41
+ @site.login(password)
42
+ result = @site.query_prop_info(@normtitle, nil, 'edit')
43
+ token = result['query']['pages']['page']['edittoken']
44
+ result = @site.edit(@normtitle, nil, text, token, summary, minor, nil, bot)
45
+ if result.key?('error')
46
+ raise RbmediawikiError, "#{title}: "+result['error']['code']
47
+ else
48
+ return true
82
49
  end
50
+ puts "content put"
83
51
  end
84
- def alllinks(alprefix, allimit = 500, alnamespace = nil)
85
- pages = Hash.new
86
- finish = false
87
- aloffset = nil
88
- while !finish
89
- result = @site.query_list_alllinks(nil, aloffset, nil, alprefix, nil, nil, nil, alnamespace, allimit, nil, nil, true)
90
- puts result
91
- result['query']['alllinks']['l'].each {|page|
92
- yield Page.new(page['title'], @site)
93
- }
94
- if result.key?('query-continue')
95
- euoffset = result['query-continue']['alllinks']['alcontinue']
96
- else
97
- finish = true
98
- end
52
+
53
+ #appends texto to a page
54
+ #same as #put, but the text is appended and the previous content preserved
55
+ def append(text, summary = nil, minor = true, bot = true)
56
+ #require login
57
+ @site.login
58
+ puts text
59
+ result = @site.query_prop_info(@normtitle, nil, 'edit')
60
+ token = result['query']['pages']['page']['edittoken']
61
+ result = @site.edit(@normtitle, nil, text, token, summary, minor, nil, bot, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, text)
62
+ if result.key?('error')
63
+ raise RbmediawikiError, "#{title}: "+result['error']['code']
64
+ else
65
+ return true
66
+ end
67
+ end
68
+
69
+ #prepends text to a page
70
+ #same as #put, but the text is prepended and the previous content preserved
71
+ def prepend(text, summary = nil, minor = true, bot = true)
72
+ #require login
73
+ @site.login
74
+ result = @site.query_prop_info(@normtitle, nil, 'edit')
75
+ token = result['query']['pages']['page']['edittoken']
76
+ result = @site.edit(@normtitle, nil, text, token, summary, minor, nil, bot, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, text)
77
+ if result.key?('error')
78
+ raise RbmediawikiError, "#{title}: "+result['error']['code']
79
+ else
80
+ return true
81
+ end
82
+ end
83
+
84
+ #adds a section to a page
85
+ #same as #append, but is a section what is appended.
86
+ #title is the title of the new section
87
+ def addsection(text, title, minor = false, bot = true)
88
+ #require login
89
+ @site.login
90
+ result = @site.query_prop_info(@normtitle, nil, 'edit')
91
+ token = result['query']['pages']['page']['edittoken']
92
+ result = @site.edit(@normtitle, section, text, token, title, minor, nil, bot)
93
+ if result.key?('error')
94
+ raise RbmediawikiError, "#{title}: "+result['error']['code']
95
+ else
96
+ return true
97
+ end
98
+ end
99
+
100
+ #moves a page
101
+ # * reason: reason or summary
102
+ # * movetalk: move talk pages too (default->true)
103
+ # * noredirect: don't leave a redirect (default->nil)
104
+ def move(to, reason = nil, movetalk = true, noredirect = nil)
105
+ #require login
106
+ @site.login
107
+ result = @site.query_prop_info(@normtitle, nil, 'move')
108
+ token = result['query']['pages']['page']['movetoken']
109
+ result = @site.move(@normtitle, nil, to, token, reason, movetalk, nil, noredirect)
110
+ if result.key?('error')
111
+ raise RbmediawikiError, "#{title}: "+result['error']['code']
112
+ else
113
+ return true
114
+ end
115
+ end
116
+
117
+ #protects a page.
118
+ #reason is the reason for the protection
119
+ #expiry is a timescamp (default is infinite).
120
+ #protections is the action and group that can perform that action, separated
121
+ #by pipes. Exapmple "edit=sysop|move=autoconfirmed".Default is edit=sysop|move=sysop
122
+ def protect(reason = nil, expiry = 'infinite', protections = 'edit=sysop|move=sysop')
123
+ #require login
124
+ @site.login
125
+ result = @site.query_prop_info(@normtitle, nil, 'protect')
126
+ token = result['query']['pages']['page']['protecttoken']
127
+ result = @site.protect(@normtitle, token, protections, expiry, reason)
128
+ if result.key?('error')
129
+ raise RbmediawikiError, "#{title}: "+result['error']['code']
130
+ else
131
+ return true
132
+ end
133
+ end
134
+
135
+ #semipotects a page.
136
+ #is the same as protect, but default for protections is "edit=autoconfirmed|move=autoconfirmed"
137
+ def semiprotect(reason = nil, expiry = 'infinite')
138
+ protect(reason, expiry, 'edit=autoconfirmed|move=autoconfirmed')
139
+ #possible errors: user doesn't have privileges
140
+ end
141
+
142
+ #delete the page.
143
+ #reason : reason for deleting
144
+ #returns true if success, raises NoPage if page doesn't exist
145
+ def delete(reason="")
146
+ @site.login
147
+ result = @site.query_prop_info(@normtitle, nil, 'delete')
148
+ token = result['query']['pages']['page']['deletetoken']
149
+ result = @site.delete(@normtitle, nil, token, reason)
150
+ if result.key?('error')
151
+ raise RbmediawikiError, "#{@title}: "+result['error']['code']
152
+ else
153
+ return true
99
154
  end
100
155
  end
101
- def backlinks(bltitle, bllimit = 500, blnamespace = nil, blfilterredir = nil)
102
- pages = Hash.new
103
- finish = false
104
- bloffset = nil
105
- while !finish
106
- result = @site.query_list_backlinks(nil, bltitle, bloffset, blnamespace, blfilterredir, bllimit, true )
107
- puts result
108
- result['query']['backlinks']['bl'].each {|page|
109
- #TODO:code for dealing with redirects
110
- # if page.key?('redirlinks')
111
- # #checking probable double redirect
112
- # if page['redirlinks'].key?('bl')
113
- # puts page
114
- # page['redirlinks']['bl'].each {|page2|
115
- # puts page2
116
- # yield Page.new(page2['title'], @site)
117
- # }
118
- # end
119
- # else
120
- puts page
121
- yield Page.new(page['title'], @site)
122
- # end
123
- }
124
- if result.key?('query-continue')
125
- bloffset = result['query-continue']['backlinks']['blcontinue']
156
+
157
+ #undeletes a page.
158
+ #reason: reason for deleting
159
+ #returns true if success, false if there aren't deleted revisions
160
+
161
+ def undelete(reason="")
162
+ @site.login
163
+ result = @site.query_list_deletedrevs(@normtitle, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, 'token')
164
+ if result.key?('error')
165
+ raise RbmediawikiError, "#{@title}: "+result['error']['code']
166
+ end
167
+ if !result.has_key?('deletedRevs')
168
+ return false
169
+ end
170
+ token = result['query']['deletedrevs']['page']['token']
171
+ result = @site.undelete(@normtitle, token, reason)
172
+ return true
173
+ end
174
+
175
+ #rollback (revert) editr by user. Summary can be given
176
+ def rollback(user = nil, summary = nil, markbot = nil)
177
+ @site.login
178
+ result = @site.query_prop_revisions(@normtitle, nil, nil, nil, nil, nil, nil, nil, user, nil, nil, nil, nil, 'rollback')
179
+ #Page exists?
180
+ if result['query']['pages']['page'].key?('missing')
181
+ raise NoPage, "Page [[#{@title}]] does not exist"
182
+ end
183
+ #Has user edited this?
184
+ if !result['query']['pages']['page'].key?('revisions')
185
+ raise RbmediawikiError, "#{@title}: No edits by user #{user}"
186
+ end
187
+
188
+ #If the user made more than one contribs, this is an array
189
+ #but the token is the same. We only want the token
190
+ if result['query']['pages']['page']['revisions']['rev'].is_a? Array
191
+ token = result['query']['pages']['page']['revisions']['rev'][0]['rollbacktoken']
192
+ else
193
+ token = result['query']['pages']['page']['revisions']['rev']['rollbacktoken']
194
+ end
195
+ result = @site.rollback(@normtitle, user, token, summary, markbot)
196
+ if result.key?('error')
197
+ raise RbmediawikiError, "#{@title}: "+result['error']['code']
198
+ else
199
+ return true
200
+ end
201
+ end
202
+
203
+ #gets info about the protections of a page. Returns an array as for instance
204
+ #{level => sysop,type => edit,expiry => infinity}
205
+ #{level => sysop,type => move,expiry => infinity}
206
+ def protected?()
207
+ result = @site.query_prop_info(@normtitle, 'protection')
208
+ if result.key?('error')
209
+ raise RbmediawikiError, "#{@title}: "+result['error']['code']
210
+ end
211
+ if result['query']['pages']['page'].key?('missing')
212
+ raise NoPage, "Page [[#{@title}]] does not exist"
213
+
214
+ else
215
+ return result['query']['pages']['page']['protection']['pr']
216
+ end
217
+ end
218
+
219
+
220
+ #not working in r1.9
221
+ # #get interwiki links
222
+ # #min is the minimum number of elements to return, lllimit is the number of
223
+ # #elements to request from the API in each iteration. The method will
224
+ # #request elements until it has at least min elements.
225
+ # #returns false if there aren't any, and raises NoPage if page doesn't exist
226
+ # def get_interwikis(min = nil, lllimit = 500)
227
+ # llcontinue = nil
228
+ # iws = Hash.new
229
+ # count = 0
230
+ # loop {
231
+ # result = @site.query_prop_langlinks(@normtitle, lllimit, llcontinue)
232
+ # iws.deep_merge!(result['query'])
233
+ # if result.key?('query-continue') && min && count < min
234
+ # count += lllimit
235
+ # llcontinue = result['query-continue']['langlinks']['llcontinue']
236
+ # else
237
+ # break
238
+ # end
239
+ # }
240
+ # if iws['pages']['page'].key?('missing')
241
+ # raise NoPage.new(), "Page [[#{title}]] does not exist"
242
+ # elsif iws['pages']['page'].key?('langlinks')
243
+ # return iws['pages']['page']['langlinks']['ll']
244
+ # else return false
245
+ # end
246
+ # end
247
+ #
248
+ # #gets image links of a page
249
+ # #min is the minimum number of elements to return, imlimit is the number of
250
+ # #elements to request from the API in each iteration. The method will
251
+ # #request elements until it has at least min elements.
252
+ # #returns false if there aren't any, and raises NoPage if page doesn't exist
253
+ # def get_images(min = nil, imlimit = 500)
254
+ # imcontinue = nil
255
+ # ims = Hash.new
256
+ # count = 0
257
+ # loop {
258
+ # result = @site.query_prop_images(@normtitle, imlimit, imcontinue)
259
+ # ims.deep_merge!(result['query'])
260
+ # if result.key?('query-continue') && min && count < min
261
+ # count += lllimit
262
+ # imcontinue = result['query-continue']['images']['imcontinue']
263
+ # else
264
+ # break
265
+ # end
266
+ # }
267
+ # if ims['pages']['page'].key?('missing')
268
+ # raise NoPage.new(), "Page [[#{@title}]] does not exist"
269
+ # elsif ims['pages']['page'].key?('images')
270
+ # return ims['pages']['page']['images']['im']
271
+ # else return false
272
+ # end
273
+ # end
274
+ #
275
+ # #gets templates used in a page
276
+ # #min is the minimum number of elements to return, tllimit is the number of
277
+ # #elements to request from the API in each iteration. The method will
278
+ # #request elements until it has at least min elements.
279
+ # #returns false if there aren't any, and raises NoPage if page doesn't exist
280
+ # def get_templates(min = nil, tllimit = 500)
281
+ # tlcontinue = nil
282
+ # tls = Hash.new
283
+ # count = 0
284
+ # loop {
285
+ # result = @site.query_prop_templates(@normtitle, nil, tllimit, tlcontinue)
286
+ # tls.deep_merge!(result['query'])
287
+ # if result.key?('query-continue')&& min && count < min
288
+ # count += lllimit
289
+ # tlcontinue = result['query-continue']['templates']['tlcontinue']
290
+ # else
291
+ # break
292
+ # end
293
+ # }
294
+ # if tls['pages']['page'].key?('missing')
295
+ # raise NoPage.new(), "Page [[#{@title}]] does not exist"
296
+ # elsif tls['pages']['page'].key?('templates')
297
+ # return tls['pages']['page']['templates']['tl']
298
+ # else return false
299
+ # end
300
+ # end
301
+ #
302
+ # #gets templates used in a page
303
+ # #min is the minimum number of elements to return, cllimit is the number of
304
+ # #elements to request from the API in each iteration. The method will
305
+ # #request elements until it has at least min elements.
306
+ # #clshow can be "hidden" or "!hidden". Default shows both
307
+ # #if sortkey is true will return the sortkey. Default is true
308
+ # def get_categories(min = nil, cllimit = 500, clshow = nil, sortkey = true)
309
+ # clcontinue = nil
310
+ # cls = Hash.new
311
+ # count = 0
312
+ #
313
+ # if sortkey
314
+ # clprop = "sortkey"
315
+ # end
316
+ #
317
+ # loop {
318
+ # result = @site.query_prop_categories(@normtitle, clprop, clshow, cllimit, clcontinue)
319
+ # cls.deep_merge!(result['query'])
320
+ # if result.key?('query-continue')&& min && count < min
321
+ # count += lllimit
322
+ # clcontinue = result['query-continue']['categories']['clcontinue']
323
+ # else
324
+ # break
325
+ # end
326
+ # }
327
+ # if cls['pages']['page'].key?('missing')
328
+ # raise NoPage.new(), "Page [[#{@title}]] does not exist"
329
+ # elsif cls['pages']['page'].key?('categories')
330
+ # return cls['pages']['page']['categories']['cl']
331
+ # else return false
332
+ # end
333
+ # end
334
+ #
335
+ # #gets external links used in a page
336
+ # #min is the minimum number of elements to return, ellimit is the number of
337
+ # #elements to request from the API in each iteration. The method will
338
+ # #request elements until it has at least min elements.
339
+ # #returns false if there aren't any, and raises NoPage if page doesn't exist
340
+ # def get_external_links(min = nil, ellimit = 500)
341
+ # eloffset = nil
342
+ # els = Hash.new
343
+ # count = 0
344
+ # loop {
345
+ # result = @site.query_prop_extlinks(@normtitle, ellimit, eloffset)
346
+ # els.deep_merge!(result['query'])
347
+ # if result.key?('query-continue')&& min && count < min
348
+ # count += lllimit
349
+ # eloffset = result['query-continue']['extlinks']['elcontinue']
350
+ # else
351
+ # break
352
+ # end
353
+ # }
354
+ # if els['pages']['page'].key?('missing')
355
+ # raise NoPage.new(), "Page [[#{@title}]] does not exist"
356
+ # elsif els['pages']['page'].key?('extlinks')
357
+ # return els['pages']['page']['extlinks']['el']
358
+ # else return false
359
+ # end
360
+ # end
361
+ #
362
+ #gets backlinks (what links here) used in a page
363
+ #min is the minimum number of elements to return, bllimit is the number of
364
+ #elements to request from the API in each iteration. The method will
365
+ #request elements until it has at least min elements.
366
+ #returns false if there aren't any, and raises NoPage if page doesn't exist
367
+ def get_backlinks(min = nil, bllimit = 500, blnamespace = nil, blredirect = true)
368
+ blcontinue = nil
369
+ bls = Hash.new
370
+ count = 0
371
+ loop {
372
+ result = @site.query_list_backlinks(@normtitle, @normtitle, blcontinue, blnamespace, nil, bllimit, blredirect)
373
+ if result['query']['pages']['page'].key?('missing')
374
+ raise NoPage.new(), "Page [[#{@title}]] does not exist"
375
+ end
376
+ bls.merge!(result['query']['backlinks'])
377
+ if result.key?('query-continue')&& min && count < min
378
+ count += lllimit
379
+ blcontinue = result['query-continue']['backlinks']['blcontinue']
126
380
  else
127
- finish = true
381
+ break
128
382
  end
129
- end
130
-
383
+ }
384
+ return bls['bl']
131
385
  end
132
386
 
133
- #TODO
134
- #opensearch
135
- #prop links
136
- #prop langlinks?
137
- #prop images
138
- #prop templates
139
- #prop categories
140
- #prop extlinks
141
- #list allimages
142
- #list allcategories
143
- #list allusers
144
- #list blocks
145
- #list categorymembers
146
- #list deletedrevs
147
- #list imageusage
148
- #list logevents
149
- #list recentchanges
150
- #list search
151
- #list usercontribs
152
- #list watchlist
153
- #list exturlusage
154
- #list users
155
- #list random
156
- #list protectedtitles
157
- #list globalblocks
387
+ # #gets deleted revisions of a page
388
+ # #min is the minimum number of elements to return, drlimit is the number of
389
+ # #elements to request from the API in each iteration. The method will
390
+ # #request elements until it has at least min elements.
391
+ # #returns false if there aren't any
392
+ # def get_deletedrevs(min = nil, drlimit = 500)
393
+ # @site.login
394
+ # drcontinue = nil
395
+ # drs = Hash.new
396
+ # count = 0
397
+ # loop {
398
+ # result = @site.query_list_deletedrevs(@normtitle, nil, nil, nil, nil, nil, nil, drcontinue, nil, nil, nil, nil, drlimit)
399
+ # drs.deep_merge!(result['query'])
400
+ # if result.key?('query-continue')&& min && count < min
401
+ # count += lllimit
402
+ # drcontinue = result['query-continue']['deletedrevs']['drstart']
403
+ # else
404
+ # break
405
+ # end
406
+ # }
407
+ # if drs['deletedrevs'].key?('page')
408
+ # return drs['deletedrevs']['page']['revisions']['rev']
409
+ # else return false
410
+ # end
411
+ # end
412
+ #
413
+ # #gets pages in which this page is embedded (or transcluded). Returns a list
414
+ # #of Page elements
415
+ # #min is the minimum number of elements to return, eilimit is the number of
416
+ # #elements to request from the API in each iteration. The method will
417
+ # #request elements until it has at least min elements.
418
+ # #returns false if there aren't any, and raises NoPage if page doesn't exist
419
+ # def get_embeddedin(min = nil, eilimit = 500)
420
+ # eicontinue = nil
421
+ # eis = Hash.new
422
+ # count = 0
423
+ # loop {
424
+ # result = @site.query_list_embeddedin(@normtitle, @normtitle, eicontinue, nil, nil, eilimit)
425
+ # eis.deep_merge!(result['query'])
426
+ # if result.key?('query-continue')&& min && count < min
427
+ # count += lllimit
428
+ # eicontinue = result['query-continue']['embeddedin']['eicontinue']
429
+ # else
430
+ # break
431
+ # end
432
+ # }
433
+ # if eis['pages']['page'].key?('missing')
434
+ # raise NoPage.new(), "Page [[#{@title}]] does not exist"
435
+ # elsif eis['embeddedin'].key?('ei')
436
+ # members = Array.new
437
+ # eis['embeddedin']['ei'].each{|el| members.push(Page.new(el['title']))}
438
+ # return members
439
+ # else return false
440
+ # end
441
+ # end
442
+ #
443
+ # #returns the size of the page content in bytes
444
+ # #Raises NoPage if the page doesn't exist
445
+ # def get_size
446
+ # result = @site.query_prop_info(@normtitle)
447
+ # if result['query']['pages']['page'].key?('missing')
448
+ # raise NoPage.new(), "Page [[#{@normtitle}]] does not exist"
449
+ # else
450
+ # return result['query']['pages']['page']['length']
451
+ # end
452
+ # end
453
+
454
+ end
455
+
456
+ class NoPage < RuntimeError
457
+ end
458
+
459
+ class PageExists < RuntimeError
460
+ end
461
+
462
+ class RbmediawikiError < RuntimeError
158
463
  end
data/lib/rbmediawiki.rb CHANGED
@@ -4,7 +4,7 @@ require 'rubygems'
4
4
  require 'xmlsimple'
5
5
 
6
6
  class Rbmediawiki
7
- VERSION = '0.2.6'
7
+ VERSION = '0.2.6.1'
8
8
  Dir["#{File.dirname(__FILE__)}/rbmediawiki/*.rb"].sort.each { |lib| require lib }
9
9
  end
10
10
 
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rbmediawiki
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.6
4
+ version: 0.2.6.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Chabacano