wikian 0.1.8 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 87a4af9592758ef39b75a05614ac443ba2fed75a618ae07cd684f0752ee4f617
4
- data.tar.gz: 6b0c11ebc5322d0617b8be7fbda496375da3550184a23fedcaae54ae386d7a42
3
+ metadata.gz: 34b650b2c286774ef79f660715de782f54cd5b86d7690f6055dd662cdda8e1d0
4
+ data.tar.gz: 4b11d22fc6556d696b75dcd1d0373573e02a8eda58a02d661090be8faf03ec8d
5
5
  SHA512:
6
- metadata.gz: ecb8533aa37113d4649c0d5f3000932682dcc0fa0dee25b72ee81e4d193da0197a981d53d35a5372f869c918b4eb5d017382a27c169c70ff465e334a271bda79
7
- data.tar.gz: 8f07fa08ffe918b7bea4fa032552c27b340bf80e03414022b67f1f8b684a0f71645d8d7530353757a01e55a2c2f2170dd6a2aacc4e22af8886315675d42fc7d5
6
+ metadata.gz: 9688be1c774d25208822ccba31d0381f512d9fb161c829631b22d70a824d841ed78b70303a32161f3826649fc1ec34a1af819724a1c34d6c0471d87cdf2ef92f
7
+ data.tar.gz: 5d3cb888c767d07f3ead8c9dbc56d962042dcb1a719274dd9e28493e8e146ed5807e646af17cf08219737b6e85f3ebe76c60d48854d73f4e1fa0438d2685b490
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- wikian (0.1.8)
4
+ wikian (0.2.0)
5
5
 
6
6
  GEM
7
7
  remote: https://rubygems.org/
@@ -13,6 +13,7 @@ require 'fileutils'
13
13
  require 'json'
14
14
  require 'net/http'
15
15
  require 'open-uri'
16
+ require 'tempfile'
16
17
  require 'yaml'
17
18
 
18
19
  class Wikian
@@ -43,6 +44,7 @@ class Wikian
43
44
  api = Wikian::Get.new(args)
44
45
  api.doit
45
46
  api.extract_wikitext
47
+ api.save_metadata
46
48
  elsif subcommand[0] == 's'
47
49
  api = Wikian::Search.new(args)
48
50
  api.doit
@@ -58,6 +60,15 @@ class Wikian
58
60
  puts "#{e.class} #{e.message} in #{__FILE__}"
59
61
  end
60
62
 
63
+ def self.meta_dir
64
+ '.wikian'
65
+ end
66
+
67
+ # file to store metadata of fetched articles
68
+ def self.meta_file
69
+ File.join(meta_dir, 'meta.yml')
70
+ end
71
+
61
72
  def help
62
73
  puts <<~eos
63
74
  Usage:
@@ -66,46 +77,48 @@ class Wikian
66
77
  Options:
67
78
  -a, --append append the input file
68
79
  -c, --captcha ID:MESSAGE captcha info
69
- -d, --debug debug
80
+ -d, --debug print debugging messages
81
+ -h, --help print this help message
70
82
  -m, --message MESSAGE add a commit message (HIGHLY recommended)
71
83
  -p, --prepend prepend the input file
72
84
  -r, --remove-cookie remove API cookie
73
85
  -s, --section NUMBER section to edit
74
86
  -t, --template create template configuration file
75
- -v, --version
87
+ -v, --version print version number
76
88
 
77
89
  Subcommands:
78
- c, contributions [N] get user last N contributions. N defaults to 20
79
- g, get get wikitext file from a wikipedia article
80
- p, post post wikitext file to a wikipedia article
81
- s, search search wikitext file to a wikipedia article
90
+ c, contributions [N] get user last N contributions (defaults to #{Contributions::DEFAULT_MAX_CONTRIBUTIONS})
91
+ g, get get wikitext from a Wikipedia article
92
+ p, post post wikitext to a Wikipedia article
93
+ s, search search wikitext in Wikipedia
82
94
 
83
95
  Examples:
84
96
  # create wiki.yml template
85
- wiki -t
97
+ wi g -t
86
98
 
87
- # download article and create response and wikitext files
88
- wiki get https://en.wikipedia.org/wiki/Spider-Man
99
+ # download article
100
+ wi get -t
101
+ wi get https://en.wikipedia.org/wiki/Wikipedia:Sandbox
89
102
 
90
- # upload file to English Wikipedia
91
- wiki post Spider-Man.en.wikipedia.org.wiki
103
+ # upload file
104
+ wi post Wikipedia:Sandbox.en.wikipedia.org.wiki
92
105
 
93
106
  # upload file to Spanish Wikipedia
94
- wiki post Spider-Man.es.wikipedia.org.wiki
107
+ wi post Wikipedia:Sandbox.es.wikipedia.org.wiki
95
108
 
96
109
  # upload file to English Wiktionary
97
- wiki file to Spider-Man.es.wiktionary.org.wiki
110
+ wi post Wikipedia:Sandbox.en.wiktionary.org.wiki
98
111
 
99
- # append new section to article
100
- wiki post -a Spider-Man-new-section.wiki
112
+ # append wikitext to section 2 of the article
113
+ wi post -a -s 2 Wikipedia:Sandbox.en.wikipedia.org.wiki
101
114
 
102
115
  # heavy use of the API may require cache validation
103
- wiki post -c 1234:someMessage spider-Man.wiki
116
+ wi post -c 1234:someMessage spider-Man.wiki
104
117
 
105
118
  Comments:
106
119
  Posted files must follow the convention:
107
- <article_name>.<host>.wiki
108
- where <host> is a wikimedia site.
120
+ <article_name>.<site>.wiki
121
+ where <site> is a wikimedia site.
109
122
  More info at: https://meta.wikimedia.org/wiki/Our_projects
110
123
  eos
111
124
  exit
@@ -11,15 +11,15 @@ class Wikian
11
11
 
12
12
  max_contributions = args.find(&:numeric?) || DEFAULT_MAX_CONTRIBUTIONS
13
13
 
14
- raise(BadUrlError, "Try passing the '-t' option") unless yaml['meta']['site']
14
+ raise(BadUrlError, "Try passing the '-t' option") unless config['meta']['site']
15
15
 
16
- @output_file = 'User:' + ENV['WIKI_USER'] + '.contributions.' + yaml['meta']['site']
16
+ @output_file = 'User:' + ENV['WIKI_USER'] + '.contributions.' + config['meta']['site']
17
17
 
18
18
  @params.merge!('ucuser' => ENV['WIKI_USER'], 'uclimit' => max_contributions, 'format' => Wikian::RESPONSE_FORMAT)
19
19
 
20
20
  @query = @params.to_query
21
21
 
22
- @api_url = URI("https://#{yaml['meta']['site']}/w/api.php?#{query}")
22
+ @api_url = URI("https://#{config['meta']['site']}/w/api.php?#{query}")
23
23
  rescue => e
24
24
  puts "#{e.class} in #{__FILE__}. #{e.message}"
25
25
  exit
@@ -27,12 +27,11 @@ class Wikian
27
27
 
28
28
  def template
29
29
  <<~eos
30
+ # for a list of parameters to use here see: https://www.mediawiki.org/wiki/API:Usercontribs
30
31
  meta:
31
- http_method: get
32
32
  site: en.wikipedia.org
33
33
  headers:
34
- #accept-encoding: gzip
35
- user-agent: Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:60.0) Gecko/20100101 Firefox/60.0
34
+ user-agent: Wikian
36
35
  api:
37
36
  action:
38
37
  - query
@@ -1,17 +1,16 @@
1
1
  class Wikian
2
2
  class WikianGetError < StandardError; end
3
- class ExtractWikiError < WikianGetError; end
4
3
  class ArgumentRequiredError < WikianGetError; end
5
4
 
6
5
  class Get < Subcommand
7
- attr_accessor :title
6
+ attr_accessor :title, :url, :latest_revision
8
7
 
9
8
  def initialize(args)
10
9
  raise ArgumentRequiredError if args.empty?
11
10
 
12
11
  super
13
12
 
14
- url = URI(args.find{|arg| arg =~ URI.regexp})
13
+ @url = URI(args.find{|arg| arg =~ URI.regexp})
15
14
 
16
15
  raise BadUrlError unless url.path
17
16
 
@@ -33,45 +32,43 @@ class Wikian
33
32
  #
34
33
  # return: nil
35
34
  def extract_wikitext
36
- if !res['content-type'].match?('json') || !(pages = JSON.parse(res.body).dig('query','pages'))
37
- raise ExtractWikiError, 'JSON response has no pages'
38
- end
35
+ pages = JSON.parse(res_body).dig('query','pages')
39
36
 
40
- create_wiki = -> (title, revisions) do
41
- revisions.each do |revision|
42
- wiki_file= File.basename(response_file, File.extname(response_file)) + '.wiki'
43
- if revision['revid'].nil? && revisions.size > 1
44
- STDERR.puts "Warning: you should specify 'revid' in #{Wikian::CONFIG_FILE} to prevent overriding different revisions"
45
- end
46
- File.open(wiki_file,'w') do |f|
47
- content = revision.dig('slots', 'main', 'content') ||
48
- revision.dig('slots', '*') ||
49
- revision.dig('*')
50
- STDERR.puts "Warning: nil 'content' in #{Wikian::CONFIG_FILE}" unless content
51
- STDERR.puts "Writing to #{wiki_file}"
52
- f.puts content
53
- end
54
- end
55
- end
37
+ # Wikipedia is inconsistent in their value for 'pages', it's sometimes a hash, sometimes an array
38
+ @latest_revision = (pages.respond_to?(:keys) ? pages.values.first : pages.first)['revisions'].first
39
+
40
+ content = latest_revision.dig('slots', 'main', 'content') ||
41
+ latest_revision.dig('slots', '*') ||
42
+ latest_revision.dig('*')
56
43
 
57
- # this is ugly, but Wikipedia is inconsistent in their JSON value for 'pages'. Sometimes it's a hash, sometimes it's an array.
58
- if pages.respond_to? :keys
59
- create_wiki.call(pages.values.first['title'], pages.values.first['revisions'])
60
- else
61
- pages.each do |page|
62
- create_wiki.call(page['title'], page['revisions'])
63
- end
44
+ wiki_file= File.basename(response_file, File.extname(response_file)) + '.wiki'
45
+
46
+ File.open(wiki_file,'w') do |f|
47
+ STDERR.puts "Warning: nil 'content' in #{Wikian::CONFIG_FILE}" unless content
48
+ STDERR.puts "Writing to #{wiki_file}"
49
+ f.puts content
64
50
  end
65
51
 
66
52
  rescue => e
67
- puts "An error occurred while extracting the wikitext",
68
- "Try using a new config file by pasing the '-t' option.",
69
- "Or pass the '-d' option for debugging"
53
+ puts "An error occurred while extracting the wikitext"
70
54
  exit
71
55
  end
72
56
 
57
+ # save article metadata
58
+ #
59
+ # metadata like article timestamp is used to solve edit conflicts
60
+ def save_metadata
61
+ FileUtils.mkdir_p(Wikian.meta_dir)
62
+
63
+ metadata = File.exist?(Wikian.meta_file) ? YAML.load(File.open(Wikian.meta_file)) : {}
64
+ metadata['meta'].merge!(title => {'timestamp' => latest_revision['timestamp']})
65
+
66
+ File.write(Wikian.meta_file, YAML.dump(metadata))
67
+ end
68
+
73
69
  def template
74
70
  <<~eos
71
+ # for a list of parameters to use here see: https://www.mediawiki.org/wiki/API:Revisions
75
72
  meta:
76
73
  headers:
77
74
  user-agent: Wikian
@@ -82,9 +79,10 @@ class Wikian
82
79
  - revisions
83
80
  rvprop:
84
81
  - content
85
- #rvsection: # get specific sections
86
- # - 0
87
- # - 2
82
+ - timestamp
83
+ #rvsection: # get specific sections
84
+ # - 0
85
+ # - 2
88
86
  rvslots:
89
87
  - main
90
88
  formatversion:
@@ -9,7 +9,7 @@ end
9
9
  class Hash
10
10
  # return a query string representation of a hash
11
11
  def to_query
12
- URI.decode(URI.encode_www_form(self))
12
+ URI::DEFAULT_PARSER.unescape(URI.encode_www_form(self))
13
13
  end
14
14
  end
15
15
 
@@ -1,19 +1,24 @@
1
1
  class Wikian
2
2
  class WikianPostError < StandardError; end
3
3
  class WikiFileError < WikianPostError; end
4
+ class WikiFileNameError < WikianPostError; end
5
+ class WikiMergeError < WikianPostError; end
4
6
 
5
7
  class Post
6
8
  attr_accessor :args, :baseurl, :header, :input_file, :debug, :login_token,
7
- :login_cookie, :csrf_token, :csrf_cookie, :query, :body_text, :username
9
+ :login_cookie, :csrf_token, :csrf_cookie, :query, :body_text,
10
+ :username, :params, :latest_revision, :latest_content, :metadata
8
11
 
9
12
  def initialize(args)
10
13
  @args = args
11
14
 
15
+ long_to_short_options
16
+
12
17
  # input wikitext file
13
- @input_file = args.find{|f| File.exist? f}
14
- raise WikiFileError unless input_file
18
+ raise WikiFileError unless @input_file = args.find{|f| File.exist? f}
15
19
 
16
- site = input_file.match(/\.(.*)\.wiki/)[1]
20
+ site = input_file.match(/\.(.*)\.wiki/)&.[](1)
21
+ raise(WikiFileNameError, "Use the Input file name convention <article_name>.<site>.wiki") unless site
17
22
 
18
23
  @baseurl = "https://#{site}/w/api.php"
19
24
 
@@ -21,15 +26,20 @@ class Wikian
21
26
 
22
27
  @username = ENV['WIKI_USER']
23
28
 
24
- @debug = (args & %w(-d --debug)).length > 0 ? true : false
29
+ @debug = (args & %w(-d)).length > 0 ? true : false
25
30
  rescue => e
26
31
  puts "#{e.class} in #{__FILE__}. #{e.message}"
27
32
  exit
28
33
  end
29
34
 
35
+ # transform long options like '--message' to short options like '-m'
36
+ def long_to_short_options
37
+ args.map! {|opt| opt[0,2] == '--' ? opt[1,2] : opt}
38
+ end
39
+
30
40
  def post
31
41
  # remove expired cookie
32
- if expired_cookie? || args.have?(%w(-r --remove-cookie))
42
+ if expired_cookie? || args.have?(%w(-r))
33
43
  FileUtils.rm_f(csrf_cookie_file)
34
44
  end
35
45
 
@@ -41,10 +51,17 @@ class Wikian
41
51
 
42
52
  get_csrf_cookie
43
53
  end
44
- get_csrf_token
45
-
46
54
  build_query_string
47
55
 
56
+ get_latest_revision
57
+
58
+ if @body_text && Time.parse(params['starttimestamp']) < Time.parse(params['basetimestamp'])
59
+ puts "\e[31mEdit conflict detected, merging with latest version\e[m"
60
+ merge_versions
61
+ end
62
+
63
+ get_csrf_token
64
+
48
65
  upload_article
49
66
  end
50
67
 
@@ -84,9 +101,19 @@ class Wikian
84
101
  puts(res.body) if debug
85
102
  end
86
103
 
104
+ def get_latest_revision
105
+ res = URI.open("#{baseurl}?action=query&prop=revisions&titles=#{params['title']}&rvslots=main&rvprop=content|timestamp&format=json")
106
+ @latest_revision = JSON.parse(res.read).dig('query', 'pages').values.first.dig('revisions').first
107
+ params['basetimestamp'] = latest_revision['timestamp']
108
+ @latest_content = latest_revision.dig('slots', 'main', 'content') ||
109
+ latest_revision.dig('slots', 'main', '*') ||
110
+ latest_revision.dig('slots', '*') ||
111
+ latest_revision.dig('*')
112
+ end
113
+
87
114
  def get_csrf_token
88
115
  puts("\nGetting csrf token using csrf cookies") if debug
89
- url = URI("#{baseurl}?action=query&meta=tokens&format=json&type=csrf")
116
+ url = URI("#{baseurl}?action=query&meta=tokens&format=json&prop=info|revisions&rvprop=timestamp")
90
117
  res = URI.open(url, header.merge('cookie' => csrf_cookie))
91
118
  json = JSON.parse(res.read)
92
119
  @csrf_token = json.dig('query','tokens','csrftoken')
@@ -94,32 +121,51 @@ class Wikian
94
121
  end
95
122
 
96
123
  def build_query_string
97
- params={}
124
+ @params={}
98
125
  params['action'] = 'edit'
99
126
  params['format'] = Wikian::RESPONSE_FORMAT
100
127
  params['title'] = input_file.sub(/\..*/,'')
128
+ @metadata = File.exist?(Wikian.meta_file) ? YAML.load(File.open(Wikian.meta_file)) : {}
129
+ params['starttimestamp'] =
130
+ if timestamp = metadata.dig('meta', params['title'], 'timestamp')
131
+ timestamp
132
+ else
133
+ FileUtils.mkdir_p(Wikian.meta_dir)
134
+ metadata = {'meta' => {'title' => {'timestamp' => File.ctime(input_file).utc.iso8601}}}
135
+ File.write(Wikian.meta_file, YAML.dump(metadata))
136
+ end
101
137
  wikitext = File.read(input_file)
102
- if args.have?(%w(-a --append))
138
+ if args.have?(%w(-a))
103
139
  params['appendtext'] = wikitext
104
- elsif args.have?(%w(-p --prepend))
140
+ elsif args.have?(%w(-p))
105
141
  params['prependtext'] = wikitext
106
142
  else
107
143
  # pass the wikitext in request body
108
144
  @body_text = wikitext
109
145
  end
110
- if args.have?(%w(-c --captcha))
146
+ if args.have?(%w(-c))
111
147
  params['captchaid'], params['captchaword'] = args[args.index('-c')+1].split(':')
112
148
  end
113
- if args.have?(%w(-m --message))
149
+ if args.have?(%w(-m))
114
150
  params['summary'] = args[args.index('-m')+1]
115
151
  end
116
- if args.have?(%w(-s --section))
152
+ if args.have?(%w(-s))
117
153
  params['section'] = args[args.index('-s')+1]
118
154
  end
119
- @query = URI.encode_www_form(params)
155
+ end
156
+
157
+ def merge_versions
158
+ tmp_local = Tempfile.open {|f| f.write @body_text; f}
159
+ tmp_latest = Tempfile.open {|f| f.write latest_content; f}
160
+ @body_text = %x(diff --line-format %L #{tmp_local.path} #{tmp_latest.path})
161
+ metadata['meta'].merge!(params['title'] => {'timestamp' => Time.now.utc.iso8601})
162
+ rescue => e
163
+ puts "WikiMergeError in #{__FILE__}"
164
+ exit
120
165
  end
121
166
 
122
167
  def upload_article
168
+ @query = URI.encode_www_form(params)
123
169
  puts("\nUploading the wiki article using csrf token #{csrf_token}") if debug
124
170
  url = URI("#{baseurl}?#{query}")
125
171
  req = Net::HTTP::Post.new(url, header.merge('cookie' => csrf_cookie, 'content-type' => 'application/x-www-form-urlencoded'))
@@ -6,13 +6,13 @@ class Wikian
6
6
  def initialize(args)
7
7
  super
8
8
 
9
- @output_file = yaml['api']['srsearch'].first
9
+ @output_file = config['api']['srsearch'].first
10
10
 
11
11
  @params.merge!('format' => Wikian::RESPONSE_FORMAT)
12
12
 
13
13
  @query = @params.to_query
14
14
 
15
- @api_url = URI("https://#{yaml['meta']['site']}/w/api.php?#{query}")
15
+ @api_url = URI("https://#{config['meta']['site']}/w/api.php?#{query}")
16
16
  rescue => e
17
17
  puts "#{e.class} in #{__FILE__}. #{e.message}"
18
18
  exit
@@ -20,7 +20,7 @@ class Wikian
20
20
 
21
21
  def template
22
22
  <<~eos
23
- # Get last 5 revisions of the Main Page.
23
+ # for a list of parameters to use here see: https://www.mediawiki.org/wiki/API:Search
24
24
  meta:
25
25
  site: en.wikipedia.org
26
26
  headers:
@@ -1,12 +1,11 @@
1
- #!/usr/bin/env -S ruby -W0
2
1
  class Wikian
3
2
  class WikianSubcommandError < StandardError; end
4
- class MissingConfigFileError < WikianSubcommandError; end
5
3
  class BadUrlError < WikianSubcommandError; end
6
4
 
7
5
  # class to be inherited by other Wikian classes
8
6
  class Subcommand
9
- attr_accessor :args, :res, :yaml, :query, :title, :api_url, :debug, :output_file
7
+ attr_accessor :args, :res, :config, :query, :title, :api_url,
8
+ :debug, :output_file, :res_body
10
9
 
11
10
  def initialize(args)
12
11
  @args = args
@@ -19,11 +18,15 @@ class Wikian
19
18
 
20
19
  @debug = (args & %w(-d --debug)).length > 0 ? true : false
21
20
 
22
- raise MissingConfigFileError unless File.exist?(Wikian::CONFIG_FILE)
23
- @yaml=YAML.load(File.open(Wikian::CONFIG_FILE))
21
+ @config =
22
+ if File.exist?(Wikian::CONFIG_FILE)
23
+ YAML.load(File.open(Wikian::CONFIG_FILE))
24
+ else
25
+ YAML.load(template)
26
+ end
24
27
 
25
28
  # some params like 'titles' can contain multiple entries joined by '|'. More info in Wikipedia API docs
26
- @params = Hash[yaml['api'].keys.zip(yaml['api'].values.map{|arr| arr.join("|")})]
29
+ @params = Hash[config['api'].keys.zip(config['api'].values.map{|arr| arr.join("|")})]
27
30
  rescue MissingConfigFileError => e
28
31
  puts "#{e.class} try passing the '-t' option to generate #{Wikian::CONFIG_FILE} in #{__FILE__}"
29
32
  exit
@@ -36,35 +39,29 @@ class Wikian
36
39
  (puts 'Bye'; exit) if answer != 'y'
37
40
  end
38
41
 
39
- File.open(CONFIG_FILE, 'w') do |f|
40
- f.write template
41
- end
42
+ File.write(CONFIG_FILE, template)
42
43
  exit
43
44
  end
44
45
 
45
46
  # HTTP response file name. Its extension depends on the 'content-type' header
46
47
  def response_file
47
- output_file + '.' + res['content-type'].split('/').last.sub(/;.*/,'')
48
+ output_file + '.' + res.meta['content-type'].split('/').last.sub(/;.*/,'')
48
49
  end
49
50
 
50
51
  # write response in to `response_file`
51
52
  def write_response
52
53
  STDERR.puts "Writing to #{response_file}"
53
54
  File.open(response_file, 'w') do |f|
54
- f.puts prettify(res.body)
55
+ f.puts prettify(res_body)
55
56
  end
56
57
  end
57
58
 
58
59
  def doit
59
60
  puts api_url if debug
60
61
 
61
- req = Net::HTTP::Get.new(api_url, yaml['meta']['headers'])
62
-
63
- http = Net::HTTP.new(api_url.host, api_url.port)
64
-
65
- http.use_ssl = true
62
+ @res=URI.open(api_url, config['meta']['headers'])
66
63
 
67
- @res=http.request(req)
64
+ @res_body = res.read
68
65
 
69
66
  write_response
70
67
  rescue => e
@@ -76,7 +73,7 @@ class Wikian
76
73
 
77
74
  # if response is JSON prettify it, otherwise return it unchanged
78
75
  def prettify(str)
79
- res['content-type'].match?('json') ? JSON.pretty_generate(JSON.parse(str)) : str
76
+ res.meta['content-type'].match?('json') ? JSON.pretty_generate(JSON.parse(str)) : str
80
77
  end
81
78
  end
82
79
  end
@@ -1,3 +1,3 @@
1
1
  class Wikian
2
- VERSION = "0.1.8"
2
+ VERSION = "0.2.0"
3
3
  end
@@ -6,8 +6,8 @@ Gem::Specification.new do |spec|
6
6
  spec.authors = ["sergioro"]
7
7
  spec.email = ["yo@sergioro.com"]
8
8
 
9
- spec.summary = %q{Get and edit wikipedia articles}
10
- spec.description = %q{Get and edit wikipedia articles}
9
+ spec.summary = %q{Get and update Wikipedia articles}
10
+ spec.description = %q{Get and update Wikipedia articles}
11
11
  spec.homepage = "https://sergioro.mx/posts/wikian"
12
12
  spec.license = "MIT"
13
13
  spec.required_ruby_version = Gem::Requirement.new(">= 2.3.0")
metadata CHANGED
@@ -1,16 +1,16 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: wikian
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.8
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - sergioro
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2020-09-13 00:00:00.000000000 Z
11
+ date: 2020-09-15 00:00:00.000000000 Z
12
12
  dependencies: []
13
- description: Get and edit wikipedia articles
13
+ description: Get and update Wikipedia articles
14
14
  email:
15
15
  - yo@sergioro.com
16
16
  executables:
@@ -62,5 +62,5 @@ requirements: []
62
62
  rubygems_version: 3.1.4
63
63
  signing_key:
64
64
  specification_version: 4
65
- summary: Get and edit wikipedia articles
65
+ summary: Get and update Wikipedia articles
66
66
  test_files: []