wikibot 0.2.0
Sign up to get free protection for your applications and to get access to all the features.
- data/Manifest +9 -0
- data/README.textile +17 -0
- data/Rakefile +15 -0
- data/lib/category.rb +48 -0
- data/lib/class_ext.rb +40 -0
- data/lib/hash_ext.rb +17 -0
- data/lib/openhash.rb +23 -0
- data/lib/page.rb +108 -0
- data/lib/wikibot.rb +208 -0
- data/wikibot.gemspec +39 -0
- metadata +104 -0
data/Manifest
ADDED
data/README.textile
ADDED
@@ -0,0 +1,17 @@
|
|
1
|
+
h1=(). WikiBot
|
2
|
+
|
3
|
+
h2. About
|
4
|
+
|
5
|
+
WikiBot was originally a PHP-based framework for bots I run on Wikipedia, however when it broke due to changes in mediawiki code, I decided to rewrite it in Ruby, using the MediaWiki API instead of screen-scraping. This is the result.
|
6
|
+
|
7
|
+
As you'll notice, the features it provides are rather sparse at the moment, as I've only been adding features that I require. When I get more time, I'll flesh it out more.
|
8
|
+
|
9
|
+
h2. Gem Requirements
|
10
|
+
|
11
|
+
* "taf2-curb":taf2 (a fork of curb; using the base curb gem might work but is untested)
|
12
|
+
* "xml-simple":xml
|
13
|
+
* "deep_merge":dm
|
14
|
+
|
15
|
+
[taf2]http://github.com/taf2/curb/tree/master
|
16
|
+
[xml]http://xml-simple.rubyforge.org/
|
17
|
+
[dm]http://rubyforge.org/projects/deepmerge/
|
data/Rakefile
ADDED
@@ -0,0 +1,15 @@
|
|
1
|
+
require 'rubygems'
|
2
|
+
require 'rake'
|
3
|
+
require 'echoe'
|
4
|
+
|
5
|
+
Echoe.new('wikibot', '0.2.0') do |p|
|
6
|
+
p.description = "Mediawiki Bot framework"
|
7
|
+
p.url = "http://github.com/dvandersluis/wiki_bot"
|
8
|
+
p.author = "Daniel Vandersluis"
|
9
|
+
p.email = "daniel@codexed.com"
|
10
|
+
p.ignore_pattern = ["tmp/*", "script/*"]
|
11
|
+
p.development_dependencies = []
|
12
|
+
p.runtime_dependencies = ['taf2-curb', 'xml-simple', 'deep_merge']
|
13
|
+
end
|
14
|
+
|
15
|
+
Dir["#{File.dirname(__FILE__)}/tasks/*.rake"].sort.each { |ext| load ext }
|
data/lib/category.rb
ADDED
@@ -0,0 +1,48 @@
|
|
1
|
+
module WikiBot
|
2
|
+
class Category < Page
|
3
|
+
def category_info
|
4
|
+
data = {
|
5
|
+
:action => :query,
|
6
|
+
:titles => @name,
|
7
|
+
:prop => :categoryinfo
|
8
|
+
}
|
9
|
+
|
10
|
+
# The query API returns nothing for an empty cat, so we'll return a hash with all the normal
|
11
|
+
# properties set to 0 instead
|
12
|
+
empty_cat = { "pages" => 0, "size" => 0, "files" => 0, "subcats" => 0, "hidden" => "" }
|
13
|
+
@wiki_bot.query_api(:get, data).query.pages.page.categoryinfo || empty_cat
|
14
|
+
end
|
15
|
+
|
16
|
+
def members(sort = :sortkey, dir = :desc, namespace = nil)
|
17
|
+
data = {
|
18
|
+
:action => :query,
|
19
|
+
:list => :categorymembers,
|
20
|
+
:cmtitle => @name,
|
21
|
+
:cmsort => sort,
|
22
|
+
:cmdir => dir,
|
23
|
+
:cmnamespace => namespace
|
24
|
+
}
|
25
|
+
|
26
|
+
@wiki_bot.query_api(:get, data).query.categorymembers.cm
|
27
|
+
end
|
28
|
+
|
29
|
+
# Returns a hash of how many pages live in a category
|
30
|
+
def count(include_subcats = false)
|
31
|
+
out = {}
|
32
|
+
ci = category_info
|
33
|
+
|
34
|
+
out[@name] = {
|
35
|
+
:pages => ci.pages.to_i
|
36
|
+
}
|
37
|
+
|
38
|
+
if include_subcats and ci.subcats.to_i > 0
|
39
|
+
out[@name][:subcats] = {}
|
40
|
+
members.each do |m|
|
41
|
+
out[@name][:subcats].merge! WikiPage.new(@wiki_bot, m.title).count(include_subcats)
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
out
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
data/lib/class_ext.rb
ADDED
@@ -0,0 +1,40 @@
|
|
1
|
+
# Borrowed from Rails
|
2
|
+
class Class
|
3
|
+
def cattr_reader(*syms)
|
4
|
+
syms.flatten.each do |sym|
|
5
|
+
next if sym.is_a?(Hash)
|
6
|
+
class_eval(<<-EOS, __FILE__, __LINE__)
|
7
|
+
unless defined? @@#{sym}
|
8
|
+
@@#{sym} = nil
|
9
|
+
end
|
10
|
+
|
11
|
+
def self.#{sym}
|
12
|
+
@@#{sym}
|
13
|
+
end
|
14
|
+
|
15
|
+
def #{sym}
|
16
|
+
@@#{sym}
|
17
|
+
end
|
18
|
+
EOS
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
def cattr_writer(*syms)
|
23
|
+
syms.flatten.each do |sym|
|
24
|
+
class_eval(<<-EOS, __FILE__, __LINE__)
|
25
|
+
unless defined? @@#{sym}
|
26
|
+
@@#{sym} = nil
|
27
|
+
end
|
28
|
+
|
29
|
+
def self.#{sym}=(obj)
|
30
|
+
@@#{sym} = obj
|
31
|
+
end
|
32
|
+
EOS
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def cattr_accessor(*syms)
|
37
|
+
cattr_reader(*syms)
|
38
|
+
cattr_writer(*syms)
|
39
|
+
end
|
40
|
+
end
|
data/lib/hash_ext.rb
ADDED
@@ -0,0 +1,17 @@
|
|
1
|
+
require 'cgi'
|
2
|
+
|
3
|
+
class Hash
|
4
|
+
def to_post_fields
|
5
|
+
inject([]) do |memo, pair|
|
6
|
+
key, val = pair
|
7
|
+
memo.push Curl::PostField.content(key.to_s, val.to_s)
|
8
|
+
end
|
9
|
+
end
|
10
|
+
|
11
|
+
def to_querystring
|
12
|
+
inject([]) do |memo, pair|
|
13
|
+
key, val = pair
|
14
|
+
memo.push "#{CGI::escape(key.to_s)}=#{CGI::escape(val.to_s)}"
|
15
|
+
end.join("&")
|
16
|
+
end
|
17
|
+
end
|
data/lib/openhash.rb
ADDED
@@ -0,0 +1,23 @@
|
|
1
|
+
class OpenHash < Hash
|
2
|
+
undef_method :id
|
3
|
+
|
4
|
+
def initialize(hash = {})
|
5
|
+
super()
|
6
|
+
update(hash)
|
7
|
+
end
|
8
|
+
|
9
|
+
def [](key)
|
10
|
+
h = super(key)
|
11
|
+
h.is_a?(Hash) ? OpenHash.new(h) : h
|
12
|
+
end
|
13
|
+
|
14
|
+
# Allow hash properties to be referenced by dot notation
|
15
|
+
def method_missing(name, *args)
|
16
|
+
name = name.to_s
|
17
|
+
if self.include? name
|
18
|
+
self[name]
|
19
|
+
elsif self.include? name.to_sym
|
20
|
+
self[name.to_sym]
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
data/lib/page.rb
ADDED
@@ -0,0 +1,108 @@
|
|
1
|
+
module WikiBot
|
2
|
+
class Page
|
3
|
+
class WriteError < StandardError; end
|
4
|
+
|
5
|
+
attr_writer :wiki_bot
|
6
|
+
attr_reader :name
|
7
|
+
|
8
|
+
def initialize(wiki_bot, name)
|
9
|
+
@wiki_bot = wiki_bot
|
10
|
+
@name = name
|
11
|
+
end
|
12
|
+
|
13
|
+
###
|
14
|
+
# Read from page
|
15
|
+
def content
|
16
|
+
@content ||= begin
|
17
|
+
data = {
|
18
|
+
:action => :query,
|
19
|
+
:titles => @name,
|
20
|
+
:prop => :revisions,
|
21
|
+
:rvprop => :content
|
22
|
+
}
|
23
|
+
|
24
|
+
@wiki_bot.query_api(:get, data).query.pages.page.revisions.rev.content
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
# Parse page content
|
29
|
+
def text
|
30
|
+
@text ||= begin
|
31
|
+
data = {
|
32
|
+
:action => :parse,
|
33
|
+
:page => @name
|
34
|
+
}
|
35
|
+
|
36
|
+
@wiki_bot.query_api(:get, data).parse.text.content
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
###
|
41
|
+
# Get page categories
|
42
|
+
def categories(show = :all)
|
43
|
+
# Cache hidden and non-hidden categories separately
|
44
|
+
@categories ||= begin
|
45
|
+
puts "Loading category data"
|
46
|
+
data = {
|
47
|
+
:action => :query,
|
48
|
+
:titles => @name,
|
49
|
+
:prop => :categories,
|
50
|
+
:clshow => "!hidden"
|
51
|
+
}
|
52
|
+
|
53
|
+
categories = @wiki_bot.query_api(:get, data).query.pages.page.categories.cl
|
54
|
+
categories = categories.inject([]) do |memo, category|
|
55
|
+
memo.push(WikiBot::Category.new(@wiki_bot, category.title))
|
56
|
+
end
|
57
|
+
|
58
|
+
data = {
|
59
|
+
:action => :query,
|
60
|
+
:titles => @name,
|
61
|
+
:prop => :categories,
|
62
|
+
:clshow => "hidden"
|
63
|
+
}
|
64
|
+
|
65
|
+
hidden_categories = @wiki_bot.query_api(:get, data).query.pages.page.categories.cl
|
66
|
+
hidden_categories = hidden_categories.inject([]) do |memo, category|
|
67
|
+
memo.push(WikiBot::Category.new(@wiki_bot, category.title))
|
68
|
+
end
|
69
|
+
|
70
|
+
{:nonhidden => categories, :hidden => hidden_categories}
|
71
|
+
end
|
72
|
+
|
73
|
+
show = :all unless [:all, :hidden, :nonhidden].include? show
|
74
|
+
return @categories[:nonhidden] + @categories[:hidden] if show == :all
|
75
|
+
@categories[show]
|
76
|
+
end
|
77
|
+
|
78
|
+
def category_names(show = :all)
|
79
|
+
categories(show).map{ |c| c.name }
|
80
|
+
end
|
81
|
+
|
82
|
+
###
|
83
|
+
# Write to page
|
84
|
+
def write(text, summary, section = nil, minor = false)
|
85
|
+
return if @wiki_bot.debug or @wiki_bot.readonly
|
86
|
+
|
87
|
+
data = {
|
88
|
+
:action => :edit,
|
89
|
+
:title => @name,
|
90
|
+
|
91
|
+
:text => text,
|
92
|
+
:token => @wiki_bot.edit_token,
|
93
|
+
:summary => summary,
|
94
|
+
:recreate => 1,
|
95
|
+
:bot => 1
|
96
|
+
}
|
97
|
+
|
98
|
+
data[:section] = section if !section.nil?
|
99
|
+
data[:minor] = 1 if minor
|
100
|
+
data[:notminor] = 1 if !minor
|
101
|
+
|
102
|
+
result = @wiki_bot.query_api(:post, data)
|
103
|
+
status = result.edit.result
|
104
|
+
@wiki_bot.page_writes += 1
|
105
|
+
raise WriteError, status unless status == "Success"
|
106
|
+
end
|
107
|
+
end
|
108
|
+
end
|
data/lib/wikibot.rb
ADDED
@@ -0,0 +1,208 @@
|
|
1
|
+
require 'class_ext'
|
2
|
+
require 'hash_ext'
|
3
|
+
require 'openhash'
|
4
|
+
require 'page'
|
5
|
+
require 'category'
|
6
|
+
|
7
|
+
require 'rubygems'
|
8
|
+
require 'curb'
|
9
|
+
require 'xmlsimple'
|
10
|
+
require 'deep_merge'
|
11
|
+
|
12
|
+
module WikiBot
|
13
|
+
class CurbError < StandardError
|
14
|
+
attr_accessor :curb
|
15
|
+
def initialize(curb)
|
16
|
+
@curb = curb
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
class APIError < StandardError
|
21
|
+
attr_accessor :code, :info
|
22
|
+
def initialize(code, info)
|
23
|
+
@code = code
|
24
|
+
@info = info
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
class Bot
|
29
|
+
class LoginError < StandardError; end
|
30
|
+
|
31
|
+
@@version = "0.2.0" # WikiBot version
|
32
|
+
|
33
|
+
cattr_reader :version
|
34
|
+
cattr_accessor :cookiejar # Filename where cookies will be stored
|
35
|
+
|
36
|
+
attr_reader :config
|
37
|
+
attr_reader :api_hits
|
38
|
+
attr_accessor :page_writes
|
39
|
+
attr_accessor :debug # In debug mode, no writes will be made to the wiki
|
40
|
+
attr_accessor :readonly # Writes will not be made
|
41
|
+
|
42
|
+
def initialize(username, password, options = {})
|
43
|
+
@config = Hash.new
|
44
|
+
@cookies = Hash.new
|
45
|
+
@api_hits = 0
|
46
|
+
@page_writes = 0
|
47
|
+
|
48
|
+
api = options[:api] || "http://en.wikipedia.org/w/api.php"
|
49
|
+
auto_login = options[:auto_login] || false
|
50
|
+
@readonly = options[:readonly] || false
|
51
|
+
@debug = options[:debug] || false
|
52
|
+
|
53
|
+
@config = {
|
54
|
+
:username => username,
|
55
|
+
:password => password,
|
56
|
+
:api => api,
|
57
|
+
:logged_in => false
|
58
|
+
}
|
59
|
+
|
60
|
+
# Set up cURL:
|
61
|
+
@curl = Curl::Easy.new do |c|
|
62
|
+
c.headers["User-Agent"] = "Mozilla/5.0 Curb/Taf2/0.2.8 WikiBot/#{config[:username]}/#{@@version}"
|
63
|
+
#c.enable_cookies = true
|
64
|
+
#c.cookiejar = @@cookiejar
|
65
|
+
end
|
66
|
+
|
67
|
+
login if auto_login
|
68
|
+
end
|
69
|
+
|
70
|
+
def query_api(method, raw_data = {})
|
71
|
+
url = @config[:api]
|
72
|
+
|
73
|
+
raw_data[:format] = :xml if !(raw_data.include? :format and raw_data.include? 'format')
|
74
|
+
|
75
|
+
@curl.headers["Cookie"] = @cookies.inject([]) do |memo, pair|
|
76
|
+
key, val = pair
|
77
|
+
memo.push(CGI::escape(key) + "=" + CGI::escape(val))
|
78
|
+
end.join("; ") unless @cookies.nil?
|
79
|
+
|
80
|
+
response_xml = {}
|
81
|
+
|
82
|
+
while true
|
83
|
+
if method == :post
|
84
|
+
data = raw_data.to_post_fields
|
85
|
+
elsif method == :get
|
86
|
+
url = url.chomp("?") + "?" + raw_data.to_querystring
|
87
|
+
data = nil
|
88
|
+
end
|
89
|
+
|
90
|
+
@curl.url = url
|
91
|
+
@curl.headers["Expect"] = nil # MediaWiki will give a 417 error if Expect is set
|
92
|
+
|
93
|
+
if @debug
|
94
|
+
@curl.on_debug do |type, data|
|
95
|
+
p data
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
# If Set-Cookie headers are given in the response, set the cookies
|
100
|
+
@curl.on_header do |data|
|
101
|
+
header, text = data.split(":").map(&:strip)
|
102
|
+
if header == "Set-Cookie"
|
103
|
+
parts = text.split(";")
|
104
|
+
cookie_name, cookie_value = parts[0].split("=")
|
105
|
+
@cookies[cookie_name] = cookie_value
|
106
|
+
end
|
107
|
+
data.length
|
108
|
+
end
|
109
|
+
|
110
|
+
if data.nil? or (data.is_a? Array and data.empty?)
|
111
|
+
@curl.send("http_#{method}".to_sym)
|
112
|
+
else
|
113
|
+
@curl.send("http_#{method}".to_sym, *data)
|
114
|
+
end
|
115
|
+
@api_hits += 1
|
116
|
+
|
117
|
+
raise CurbError.new(@curl) unless @curl.response_code == 200
|
118
|
+
|
119
|
+
xml = XmlSimple.xml_in(@curl.body_str, {'ForceArray' => false})
|
120
|
+
raise APIError.new(xml['error']['code'], xml['error']['info']) if xml['error']
|
121
|
+
|
122
|
+
response_xml.deep_merge! xml
|
123
|
+
if xml['query-continue']
|
124
|
+
raw_data.merge! xml['query-continue'][xml['query-continue'].keys.first]
|
125
|
+
else
|
126
|
+
break
|
127
|
+
end
|
128
|
+
end
|
129
|
+
|
130
|
+
OpenHash.new(response_xml)
|
131
|
+
end
|
132
|
+
|
133
|
+
def login
|
134
|
+
return if @config[:logged_in]
|
135
|
+
|
136
|
+
data = {
|
137
|
+
:action => :login,
|
138
|
+
:lgname => @config[:username],
|
139
|
+
:lgpassword => @config[:password]
|
140
|
+
}
|
141
|
+
|
142
|
+
response = query_api(:post, data).login
|
143
|
+
|
144
|
+
if response.result == "NeedToken"
|
145
|
+
data = {
|
146
|
+
:action => :login,
|
147
|
+
:lgname => @config[:username],
|
148
|
+
:lgpassword => @config[:password],
|
149
|
+
:lgtoken => response.token
|
150
|
+
}
|
151
|
+
|
152
|
+
response = query_api(:post, data).login
|
153
|
+
end
|
154
|
+
|
155
|
+
raise LoginError, response.result unless response.result == "Success"
|
156
|
+
|
157
|
+
@config[:cookieprefix] = response.cookieprefix
|
158
|
+
@config[:logged_in] = true
|
159
|
+
end
|
160
|
+
|
161
|
+
def logout
|
162
|
+
return if !@config[:logged_in]
|
163
|
+
|
164
|
+
query_api(:post, { :action => :logout })
|
165
|
+
@config[:logged_in] = false
|
166
|
+
@config[:edit_token] = nil
|
167
|
+
end
|
168
|
+
|
169
|
+
def edit_token(page = "Main Page")
|
170
|
+
@config[:edit_token] ||= begin
|
171
|
+
data = {
|
172
|
+
:action => :query,
|
173
|
+
:prop => :info,
|
174
|
+
:intoken => :edit,
|
175
|
+
:titles => page
|
176
|
+
}
|
177
|
+
|
178
|
+
query_api(:get, data).query.pages.page.edittoken
|
179
|
+
end
|
180
|
+
end
|
181
|
+
|
182
|
+
# Get wiki stats
|
183
|
+
def stats
|
184
|
+
data = {
|
185
|
+
:action => :query,
|
186
|
+
:meta => :siteinfo,
|
187
|
+
:siprop => :statistics
|
188
|
+
}
|
189
|
+
|
190
|
+
query_api(:get, data).query.statistics
|
191
|
+
end
|
192
|
+
|
193
|
+
def page(name)
|
194
|
+
WikiBot::Page.new(self, name)
|
195
|
+
end
|
196
|
+
|
197
|
+
def category(name)
|
198
|
+
WikiBot::Category.new(self, name)
|
199
|
+
end
|
200
|
+
|
201
|
+
def format_date(date)
|
202
|
+
# Formats a date into the Wikipedia format
|
203
|
+
time = date.strftime("%H:%M")
|
204
|
+
month = date.strftime("%B")
|
205
|
+
"#{time}, #{date.day} #{month} #{date.year} (UTC)"
|
206
|
+
end
|
207
|
+
end
|
208
|
+
end
|
data/wikibot.gemspec
ADDED
@@ -0,0 +1,39 @@
|
|
1
|
+
# -*- encoding: utf-8 -*-
|
2
|
+
|
3
|
+
Gem::Specification.new do |s|
|
4
|
+
s.name = %q{wikibot}
|
5
|
+
s.version = "0.2.0"
|
6
|
+
|
7
|
+
s.required_rubygems_version = Gem::Requirement.new(">= 1.2") if s.respond_to? :required_rubygems_version=
|
8
|
+
s.authors = ["Daniel Vandersluis"]
|
9
|
+
s.date = %q{2010-04-09}
|
10
|
+
s.description = %q{Mediawiki Bot framework}
|
11
|
+
s.email = %q{daniel@codexed.com}
|
12
|
+
s.extra_rdoc_files = ["README.textile", "lib/category.rb", "lib/class_ext.rb", "lib/hash_ext.rb", "lib/openhash.rb", "lib/page.rb", "lib/wikibot.rb"]
|
13
|
+
s.files = ["README.textile", "Rakefile", "lib/category.rb", "lib/class_ext.rb", "lib/hash_ext.rb", "lib/openhash.rb", "lib/page.rb", "lib/wikibot.rb", "Manifest", "wikibot.gemspec"]
|
14
|
+
s.homepage = %q{http://github.com/dvandersluis/wiki_bot}
|
15
|
+
s.rdoc_options = ["--line-numbers", "--inline-source", "--title", "Wikibot", "--main", "README.textile"]
|
16
|
+
s.require_paths = ["lib"]
|
17
|
+
s.rubyforge_project = %q{wikibot}
|
18
|
+
s.rubygems_version = %q{1.3.5}
|
19
|
+
s.summary = %q{Mediawiki Bot framework}
|
20
|
+
|
21
|
+
if s.respond_to? :specification_version then
|
22
|
+
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
|
23
|
+
s.specification_version = 3
|
24
|
+
|
25
|
+
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
|
26
|
+
s.add_runtime_dependency(%q<taf2-curb>, [">= 0"])
|
27
|
+
s.add_runtime_dependency(%q<xml-simple>, [">= 0"])
|
28
|
+
s.add_runtime_dependency(%q<deep_merge>, [">= 0"])
|
29
|
+
else
|
30
|
+
s.add_dependency(%q<taf2-curb>, [">= 0"])
|
31
|
+
s.add_dependency(%q<xml-simple>, [">= 0"])
|
32
|
+
s.add_dependency(%q<deep_merge>, [">= 0"])
|
33
|
+
end
|
34
|
+
else
|
35
|
+
s.add_dependency(%q<taf2-curb>, [">= 0"])
|
36
|
+
s.add_dependency(%q<xml-simple>, [">= 0"])
|
37
|
+
s.add_dependency(%q<deep_merge>, [">= 0"])
|
38
|
+
end
|
39
|
+
end
|
metadata
ADDED
@@ -0,0 +1,104 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: wikibot
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.2.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Daniel Vandersluis
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
|
12
|
+
date: 2010-04-09 00:00:00 -06:00
|
13
|
+
default_executable:
|
14
|
+
dependencies:
|
15
|
+
- !ruby/object:Gem::Dependency
|
16
|
+
name: taf2-curb
|
17
|
+
type: :runtime
|
18
|
+
version_requirement:
|
19
|
+
version_requirements: !ruby/object:Gem::Requirement
|
20
|
+
requirements:
|
21
|
+
- - ">="
|
22
|
+
- !ruby/object:Gem::Version
|
23
|
+
version: "0"
|
24
|
+
version:
|
25
|
+
- !ruby/object:Gem::Dependency
|
26
|
+
name: xml-simple
|
27
|
+
type: :runtime
|
28
|
+
version_requirement:
|
29
|
+
version_requirements: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - ">="
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: "0"
|
34
|
+
version:
|
35
|
+
- !ruby/object:Gem::Dependency
|
36
|
+
name: deep_merge
|
37
|
+
type: :runtime
|
38
|
+
version_requirement:
|
39
|
+
version_requirements: !ruby/object:Gem::Requirement
|
40
|
+
requirements:
|
41
|
+
- - ">="
|
42
|
+
- !ruby/object:Gem::Version
|
43
|
+
version: "0"
|
44
|
+
version:
|
45
|
+
description: Mediawiki Bot framework
|
46
|
+
email: daniel@codexed.com
|
47
|
+
executables: []
|
48
|
+
|
49
|
+
extensions: []
|
50
|
+
|
51
|
+
extra_rdoc_files:
|
52
|
+
- README.textile
|
53
|
+
- lib/category.rb
|
54
|
+
- lib/class_ext.rb
|
55
|
+
- lib/hash_ext.rb
|
56
|
+
- lib/openhash.rb
|
57
|
+
- lib/page.rb
|
58
|
+
- lib/wikibot.rb
|
59
|
+
files:
|
60
|
+
- README.textile
|
61
|
+
- Rakefile
|
62
|
+
- lib/category.rb
|
63
|
+
- lib/class_ext.rb
|
64
|
+
- lib/hash_ext.rb
|
65
|
+
- lib/openhash.rb
|
66
|
+
- lib/page.rb
|
67
|
+
- lib/wikibot.rb
|
68
|
+
- Manifest
|
69
|
+
- wikibot.gemspec
|
70
|
+
has_rdoc: true
|
71
|
+
homepage: http://github.com/dvandersluis/wiki_bot
|
72
|
+
licenses: []
|
73
|
+
|
74
|
+
post_install_message:
|
75
|
+
rdoc_options:
|
76
|
+
- --line-numbers
|
77
|
+
- --inline-source
|
78
|
+
- --title
|
79
|
+
- Wikibot
|
80
|
+
- --main
|
81
|
+
- README.textile
|
82
|
+
require_paths:
|
83
|
+
- lib
|
84
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
85
|
+
requirements:
|
86
|
+
- - ">="
|
87
|
+
- !ruby/object:Gem::Version
|
88
|
+
version: "0"
|
89
|
+
version:
|
90
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
91
|
+
requirements:
|
92
|
+
- - ">="
|
93
|
+
- !ruby/object:Gem::Version
|
94
|
+
version: "1.2"
|
95
|
+
version:
|
96
|
+
requirements: []
|
97
|
+
|
98
|
+
rubyforge_project: wikibot
|
99
|
+
rubygems_version: 1.3.5
|
100
|
+
signing_key:
|
101
|
+
specification_version: 3
|
102
|
+
summary: Mediawiki Bot framework
|
103
|
+
test_files: []
|
104
|
+
|