rwikibot 1.0.4 → 1.0.5
Sign up to get free protection for your applications and to get access to all the features.
- data/CHANGELOG +7 -0
- data/lib/rwikibot.rb +136 -11
- metadata +2 -2
data/CHANGELOG
CHANGED
@@ -1,5 +1,12 @@
|
|
1
1
|
RWikiBot ChangeLog
|
2
2
|
|
3
|
+
1.0.5
|
4
|
+
- Added parameters for queries to the rdoc - save MediaWiki some bandwidth
|
5
|
+
- Added BACKLINKS, PAGE_EXISTS?, EMBEDDED_IN, IMAGE_EMBEDDED_IN
|
6
|
+
- Fixed a bug in ALL_PAGES, where it wasn't really sending the options hash as part of the request. Oops.
|
7
|
+
- Removed some debug puts
|
8
|
+
- Premiered the RWikiBot logo. Not in the source, but it exists now.
|
9
|
+
|
3
10
|
1.0.4
|
4
11
|
- Changed config.yaml to sample-config.yaml
|
5
12
|
- Had rake include the sample-config.yaml file
|
data/lib/rwikibot.rb
CHANGED
@@ -125,7 +125,7 @@ class RWikiBot
|
|
125
125
|
# This method will return Wiki-wide recent changes, almost as if looking at the Special page Recent Changes. But, in this format, a bot can handle it. Also we're using the API. And bots can't read.
|
126
126
|
#
|
127
127
|
# INPUT:: A hash of API-allowed keys and values. Default is same as API default.
|
128
|
-
#
|
128
|
+
# PARAMETERS:: letype (flt), lefrom (paging timestamp), leto (flt), ledirection (dflt=older), leuser (flt), letitle (flt), lelimit (dflt=10, max=500/5000)
|
129
129
|
# OUTPUT:: An array of hashes.
|
130
130
|
def recent_changes (options=nil)
|
131
131
|
|
@@ -133,7 +133,7 @@ class RWikiBot
|
|
133
133
|
# TODO - Persistent timestamp storage
|
134
134
|
|
135
135
|
@wikibotlogger.debug("RECENT CHANGES - Preparing request information...")
|
136
|
-
post_me = {"list" => "recentchanges"}
|
136
|
+
post_me = {"list" => "recentchanges", 'rclimit' => '5000'}
|
137
137
|
if options != nil
|
138
138
|
options.each do |key, value|
|
139
139
|
post_me[key] = value
|
@@ -279,7 +279,7 @@ class RWikiBot
|
|
279
279
|
# Also note that if the username the Bot uses is not of type Bot in the Wiki, you will be limited to 50 articles. Also log in, or you get an error.
|
280
280
|
#
|
281
281
|
# INPUT:: A hash of API-allowed keys and values. Default is same as API default.
|
282
|
-
#
|
282
|
+
# PARAMETERS:: apfrom (paging), apnamespace (dflt=0), apredirect (flt), aplimit (dflt=10, max=500/5000)
|
283
283
|
# OUTPUT:: An array of hashes with information about the pages.
|
284
284
|
def all_pages (options = nil)
|
285
285
|
|
@@ -287,10 +287,6 @@ class RWikiBot
|
|
287
287
|
@wikibotlogger.debug "ALL PAGES - Preparing request information..."
|
288
288
|
post_me = {'list' => 'allpages', 'apnamespace' => '0', 'aplimit' => '500'}
|
289
289
|
|
290
|
-
#make the request
|
291
|
-
@wikibotlogger.debug "ALL PAGES - Asking make_request to get all pages..."
|
292
|
-
allpages_result = make_request('query', post_me)
|
293
|
-
@wikibotlogger.debug "ALL PAGES - We should have a result now..."
|
294
290
|
|
295
291
|
if options != nil
|
296
292
|
@wikibotlogger.debug("ALL PAGES - Additional options added by requestor. Adding to post_me...")
|
@@ -301,10 +297,108 @@ class RWikiBot
|
|
301
297
|
@wikibotlogger.debug("ALL PAGES - No more additional options. Moving on...")
|
302
298
|
end
|
303
299
|
|
300
|
+
#make the request
|
301
|
+
@wikibotlogger.debug "ALL PAGES - Asking make_request to get all pages..."
|
302
|
+
allpages_result = make_request('query', post_me)
|
303
|
+
@wikibotlogger.debug "ALL PAGES - We should have a result now..."
|
304
|
+
|
304
305
|
return allpages_result.fetch('allpages')
|
305
306
|
|
306
307
|
end
|
308
|
+
|
309
|
+
# List
|
310
|
+
#
|
311
|
+
# This method fetches any article that links to the article given in 'title'. Returned in alphabetical order.
|
312
|
+
#
|
313
|
+
# INPUT:: A normalized article title or titles (pipe delimited), and a hash of API-allowed keys and values. Default is same as API default.
|
314
|
+
# PARAMETERS:: blfrom (paging), blnamespace (flt), blredirect (flt), bllimit (dflt=10, max=500/5000)
|
315
|
+
# OUTPUT:: An array of hashes with backlinked articles.
|
316
|
+
def backlinks (titles, options = nil)
|
317
|
+
|
318
|
+
# This will get all pages. Limits vary based on user rights of the Bot. Set to bot.
|
319
|
+
@wikibotlogger.debug "BACKLINKS - Preparing request information..."
|
320
|
+
post_me = {'list' => 'backlinks', 'titles' => "#{title}" }
|
321
|
+
|
322
|
+
|
323
|
+
if options != nil
|
324
|
+
@wikibotlogger.debug("BACKLINKS - Additional options added by requestor. Adding to post_me...")
|
325
|
+
options.each_pair do |key, value|
|
326
|
+
post_me[key] = value
|
327
|
+
@wikibotlogger.debug "BACKLINKS - Added #{post_me[key]}"
|
328
|
+
end
|
329
|
+
@wikibotlogger.debug("BACKLINKS - No more additional options. Moving on...")
|
330
|
+
end
|
331
|
+
|
332
|
+
#make the request
|
333
|
+
@wikibotlogger.debug "BACKLINKS - Asking make_request to get backlinks..."
|
334
|
+
backlinks_result = make_request('query', post_me)
|
335
|
+
@wikibotlogger.debug "BACKLINKS - We should have a result now..."
|
336
|
+
return backlinks_result.fetch('backlinks')
|
337
|
+
|
338
|
+
end
|
307
339
|
|
340
|
+
# List
|
341
|
+
#
|
342
|
+
# This method pulls any page that includes the template requested. Please note - the template must be the full name, like "Template:Disputed" or "Template:Awesome". Just one, please.
|
343
|
+
#
|
344
|
+
# INPUT:: A normalized template title, and a hash of API-allowed keys and values. Default is same as API default.
|
345
|
+
# PARAMETERS:: eifrom (paging), einamespace (flt), eiredirect (flt), eilimit (dflt=10, max=500/5000)
|
346
|
+
# OUTPUT:: An array of hashes with articles using said template.
|
347
|
+
def embedded_in (title, options = nil)
|
348
|
+
|
349
|
+
# This will get all pages. Limits vary based on user rights of the Bot. Set to bot.
|
350
|
+
@wikibotlogger.debug "EMBEDDED IN - Preparing request information..."
|
351
|
+
post_me = {'list' => 'embeddedin', 'titles' => "#{title}" }
|
352
|
+
|
353
|
+
|
354
|
+
if options != nil
|
355
|
+
@wikibotlogger.debug("EMBEDDED IN - Additional options added by requestor. Adding to post_me...")
|
356
|
+
options.each_pair do |key, value|
|
357
|
+
post_me[key] = value
|
358
|
+
@wikibotlogger.debug "EMBEDDED IN - Added #{post_me[key]}"
|
359
|
+
end
|
360
|
+
@wikibotlogger.debug("EMBEDDED IN - No more additional options. Moving on...")
|
361
|
+
end
|
362
|
+
|
363
|
+
#make the request
|
364
|
+
@wikibotlogger.debug "EMBEDDED IN - Asking make_request to get backlinks..."
|
365
|
+
embeddedin_result = make_request('query', post_me)
|
366
|
+
@wikibotlogger.debug "EMBEDDED IN - We should have a result now..."
|
367
|
+
return embeddedin_result.fetch('embeddedin')
|
368
|
+
|
369
|
+
end
|
370
|
+
|
371
|
+
# List
|
372
|
+
#
|
373
|
+
# A whole lot like EMBEDDED_IN, except this bad boy has the job of handling Image: instead of Template:. I guess bots may want to watch images. Its really for completeness. But, people do do things with pictures. Maybe it handles Media: as well, but no promisesses.
|
374
|
+
#
|
375
|
+
# INPUT:: A normalized image title, and a hash of API-allowed keys and values. Default is same as API default.
|
376
|
+
# PARAMETERS:: iefrom (paging), ienamespace (flt), ielimit (dflt=10, max=500/5000)
|
377
|
+
# OUTPUT:: An array of hashes with images page links
|
378
|
+
# def image_embedded_in (title, options = nil) :nodoc:
|
379
|
+
#
|
380
|
+
# # This will get all pages. Limits vary based on user rights of the Bot. Set to bot.
|
381
|
+
# @wikibotlogger.debug "IMAGE EMBEDDED IN - Preparing request information..."
|
382
|
+
# post_me = {'list' => 'embeddedin', 'titles' => "#{title}" }
|
383
|
+
#
|
384
|
+
#
|
385
|
+
# if options != nil
|
386
|
+
# @wikibotlogger.debug("IMAGE EMBEDDED IN - Additional options added by requestor. Adding to post_me...")
|
387
|
+
# options.each_pair do |key, value|
|
388
|
+
# post_me[key] = value
|
389
|
+
# @wikibotlogger.debug "IMAGE EMBEDDED IN - Added #{post_me[key]}"
|
390
|
+
# end
|
391
|
+
# @wikibotlogger.debug("IMAGE EMBEDDED IN - No more additional options. Moving on...")
|
392
|
+
# end
|
393
|
+
#
|
394
|
+
# #make the request
|
395
|
+
# @wikibotlogger.debug "IMAGE EMBEDDED IN - Asking make_request to get backlinks..."
|
396
|
+
# imageembeddedin_result = make_request('query', post_me)
|
397
|
+
# @wikibotlogger.debug "IMAGE EMBEDDED IN - We should have a result now..."
|
398
|
+
# return imageembeddedin_result.fetch('embeddedin')
|
399
|
+
#
|
400
|
+
# end
|
401
|
+
|
308
402
|
# Prop = Info
|
309
403
|
#
|
310
404
|
# I decided to split this up since I wanted to normalize the bot framework as much as possible, or in other words, make it as easy to use as possible. I think the sacrifice of more methods is worth having more English looking code. Its the Ruby way.
|
@@ -433,6 +527,35 @@ class RWikiBot
|
|
433
527
|
return array
|
434
528
|
end
|
435
529
|
|
530
|
+
# This is a little something I cooked up because it seems like a totally logical thing for bots to want to do. Basically, you feed it a page title - any you want (that's the point) - and it returns TRUE or FALSE if the page exists inside the wiki. Technically, it pulls an attribute "missing", and in its presense, reports TRUE since the page is fake.
|
531
|
+
# That's something a bot would want to do, right?
|
532
|
+
#
|
533
|
+
# INPUT:: A title. Just one!
|
534
|
+
#
|
535
|
+
# OUTPUT:: TRUE/FALSE, depending on which is correct
|
536
|
+
def page_exists? (title)
|
537
|
+
|
538
|
+
# Prepare the request
|
539
|
+
@wikibotlogger.debug "PAGE EXISTS? - Preparing request information..."
|
540
|
+
post_me = {'titles' => title}
|
541
|
+
|
542
|
+
#Make the request
|
543
|
+
@wikibotlogger.debug "PAGE EXISTS? - Asking make_request to verify page existence..."
|
544
|
+
page_exists_result = make_request('query', post_me)
|
545
|
+
@wikibotlogger.debug "PAGE EXISTS? - We should have a result now..."
|
546
|
+
|
547
|
+
@wikibotlogger.debug "PAGE EXISTS? - Processing result..."
|
548
|
+
|
549
|
+
|
550
|
+
if page_exists_result.fetch('pages')[0].has_key?('missing')
|
551
|
+
@wikibotlogger.debug "PAGE EXISTS? - The page #{title} does NOT exist. Sorry."
|
552
|
+
return false
|
553
|
+
else
|
554
|
+
@wikibotlogger.debug "PAGE EXISTS? - The page #{title} DOES exist. You lucky, lucky bot."
|
555
|
+
return true
|
556
|
+
end
|
557
|
+
|
558
|
+
end
|
436
559
|
|
437
560
|
private
|
438
561
|
|
@@ -445,6 +568,11 @@ class RWikiBot
|
|
445
568
|
#Housekeeping. We need to add format and action to the request hash
|
446
569
|
post_this['format'] = 'yaml'
|
447
570
|
post_this['action'] = action
|
571
|
+
if @config.fetch('logged_in')
|
572
|
+
post_this['lgusername'] = @config.fetch('lgusername')
|
573
|
+
post_this['lgtoken'] = @config.fetch('lgtoken')
|
574
|
+
post_this['lguserid'] = @config.fetch('lguserid')
|
575
|
+
end
|
448
576
|
|
449
577
|
#change - preparing a POST string instead of hash.
|
450
578
|
post_string = ''
|
@@ -456,7 +584,7 @@ class RWikiBot
|
|
456
584
|
|
457
585
|
#Send the actual request
|
458
586
|
@wikibotlogger.debug "MAKE REQUEST - Sending request..."
|
459
|
-
resp = @http.post( @config.fetch('uri').path , post_string , {'User-agent' => 'RWikiBot/0.1','Cookie' => bake(@config.fetch('cookie')) } )
|
587
|
+
resp = @http.post( @config.fetch('uri').path , post_string , {'User-agent' => 'RWikiBot/0.1', 'Cookie' => bake(@config.fetch('cookie')) } ) #
|
460
588
|
@wikibotlogger.info "MAKE REQUEST - Response: "
|
461
589
|
@wikibotlogger.info resp.body
|
462
590
|
@wikibotlogger.debug "MAKE REQUEST - End Response "
|
@@ -490,9 +618,6 @@ class RWikiBot
|
|
490
618
|
|
491
619
|
# Make some dough. Let CGI parse set-cookie, since there's multiple set-cookies and this is hard
|
492
620
|
dough = CGI::Cookie::parse(cookie)
|
493
|
-
dough.each do |key, value|
|
494
|
-
puts "#{key} => #{value}"
|
495
|
-
end
|
496
621
|
|
497
622
|
# Take the dough and get my three elements out
|
498
623
|
|
metadata
CHANGED
@@ -3,8 +3,8 @@ rubygems_version: 0.8.11
|
|
3
3
|
specification_version: 1
|
4
4
|
name: rwikibot
|
5
5
|
version: !ruby/object:Gem::Version
|
6
|
-
version: 1.0.
|
7
|
-
date: 2007-03-
|
6
|
+
version: 1.0.5
|
7
|
+
date: 2007-03-05 00:00:00 -06:00
|
8
8
|
summary: A library for creating MediaWiki bots.
|
9
9
|
require_paths:
|
10
10
|
- lib
|