reddit_bot 1.6.6 → 1.6.7

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: fb0c92530c0139f3abcf98680613b118ab21eeb8
4
- data.tar.gz: 68ff542161f8ea0e73bbeb0ca3ecd2a3fef6d618
3
+ metadata.gz: 125c3fc98ca91c87bbfbf759229f01a04ab866f8
4
+ data.tar.gz: 49bef2ef0766f1926013bb735e772d9dcac4042f
5
5
  SHA512:
6
- metadata.gz: 6f162ba96a47e7ea121f26dc2a1e4a79a23c5448355fdf931b6c518216881758aa527880957e608f8b356a1340dd00999154440678105517b44540d893e4e134
7
- data.tar.gz: 9143a8d2ba251a0871097ab726eab1fdae48449a678363ae1c58c73a98baa204cb7f12d5b325bdc4d87a561f7bbf8e2c100540b96dee32681f8ed82b7034148b
6
+ metadata.gz: e0b083b817628c284c18ff17f5f13e7e5e8de2a35b335024412b34c866774643f5e12409650affb9ffc9db9773e943cb3412767409339783caf73a3068797bef
7
+ data.tar.gz: 1855665d51f7c148035bb64ccd3e928966c3c6aa0efea8a09e6e6078793acfc4e8ba40fb389e6bcd490c4579446fa8ee25c5813fc389807af1ae1e45ad20d356
data/README.md CHANGED
@@ -27,6 +27,7 @@ The [examples folder](examples) includes:
27
27
  * **oneplus** -- bot that removes and modmails about links to 1080x1920 images
28
28
  * **yayornay** -- bot that flairs posts according to voting in top level comments
29
29
  * **realtimeww2** -- bot that posts tweets to a subreddit from a Twitter user timeline
30
+ * **johnnymarr** -- another Twitter timeline streaming bot working in the similar way
30
31
  * **largeimages** -- this was my first bot -- it uses two approaches to track the most high resolution photos posted anywhere on Reddit to x-post them to [subreddit /r/largeimages](https://www.reddit.com/r/largeimages)
31
32
  * **largeimagesreview** -- script that was used /r/largeimages to calculates quality of x-posts from different subreddits based on mods activity (remove/approve) so it showed that /r/pics and /r/foodporn should better be excluded:
32
33
 
@@ -9,6 +9,4 @@ end
9
9
 
10
10
  require "reddit_bot"
11
11
 
12
- require_relative "#{Dir.home}/beat" unless Gem::Platform.local.os == "darwin"
13
-
14
12
  require "yaml"
@@ -1,4 +1,6 @@
1
1
  source "https://rubygems.org"
2
2
 
3
- gem "reddit_bot", "~>1.5.4"
4
- gem "nethttputils", git: "git@github.com:Nakilon/nethttputils.git", tag: "v0.1.0.4"
3
+ gem "reddit_bot", "~>1.6.6"
4
+
5
+ gem "public_suffix", "<3.0" # Ruby 2.0
6
+ gem "google-cloud-error_reporting", "~>0.27.0" # Ruby 2.0
@@ -1,23 +1,74 @@
1
- GIT
2
- remote: git@github.com:Nakilon/nethttputils.git
3
- revision: 6cb2b6a2422d82e266e08b16da4fa29e809fd260
4
- tag: v0.1.0.4
5
- specs:
6
- nethttputils (0.1.0.4)
7
-
8
1
  GEM
9
2
  remote: https://rubygems.org/
10
3
  specs:
4
+ addressable (2.5.2)
5
+ public_suffix (>= 2.0.2, < 4.0)
6
+ faraday (0.15.2)
7
+ multipart-post (>= 1.2, < 3)
8
+ google-cloud-core (1.2.1)
9
+ google-cloud-env (~> 1.0)
10
+ google-cloud-env (1.0.2)
11
+ faraday (~> 0.11)
12
+ google-cloud-error_reporting (0.27.0)
13
+ google-cloud-core (~> 1.0)
14
+ google-gax (~> 0.8.0)
15
+ stackdriver-core (~> 1.2)
16
+ google-gax (0.8.12)
17
+ google-protobuf (~> 3.2)
18
+ googleapis-common-protos (~> 1.3.5)
19
+ googleauth (~> 0.5.1)
20
+ grpc (~> 1.6.6)
21
+ rly (~> 0.2.3)
22
+ google-protobuf (3.6.0)
23
+ googleapis-common-protos (1.3.7)
24
+ google-protobuf (~> 3.0)
25
+ googleapis-common-protos-types (~> 1.0)
26
+ grpc (~> 1.0)
27
+ googleapis-common-protos-types (1.0.1)
28
+ google-protobuf (~> 3.0)
29
+ googleauth (0.5.3)
30
+ faraday (~> 0.12)
31
+ jwt (~> 1.4)
32
+ logging (~> 2.0)
33
+ memoist (~> 0.12)
34
+ multi_json (~> 1.11)
35
+ os (~> 0.9)
36
+ signet (~> 0.7)
37
+ grpc (1.6.7)
38
+ google-protobuf (~> 3.1)
39
+ googleapis-common-protos-types (~> 1.0.0)
40
+ googleauth (~> 0.5.1)
11
41
  json (2.1.0)
12
- reddit_bot (1.5.4)
42
+ jwt (1.5.6)
43
+ little-plugger (1.1.4)
44
+ logging (2.2.2)
45
+ little-plugger (~> 1.1)
46
+ multi_json (~> 1.10)
47
+ memoist (0.16.0)
48
+ multi_json (1.13.1)
49
+ multipart-post (2.0.0)
50
+ nethttputils (0.2.4.1)
51
+ os (0.9.6)
52
+ public_suffix (2.0.5)
53
+ reddit_bot (1.6.6)
13
54
  json
55
+ nethttputils (~> 0.2.4.1)
56
+ rly (0.2.3)
57
+ signet (0.8.1)
58
+ addressable (~> 2.3)
59
+ faraday (~> 0.9)
60
+ jwt (>= 1.5, < 3.0)
61
+ multi_json (~> 1.10)
62
+ stackdriver-core (1.3.0)
63
+ google-cloud-core (~> 1.2)
14
64
 
15
65
  PLATFORMS
16
66
  ruby
17
67
 
18
68
  DEPENDENCIES
19
- nethttputils!
20
- reddit_bot (~> 1.5.4)
69
+ google-cloud-error_reporting (~> 0.27.0)
70
+ public_suffix (< 3.0)
71
+ reddit_bot (~> 1.6.6)
21
72
 
22
73
  BUNDLED WITH
23
74
  1.16.1
@@ -1,14 +1,34 @@
1
- require_relative "../boilerplate"
1
+ # The bot changes the post flair class with one with "dev" prefix
2
+ # if there is response from someone with "developer" user flair in comments.
3
+ # Seems like in current implementation it reads 10 last comments per subreddit
4
+ # so it's possible to miss if bot is down for a while but in fact it's enough stable and does not go down.
2
5
 
6
+ require_relative "../boilerplate"
3
7
  BOT = RedditBot::Bot.new YAML.load File.read "secrets.yaml"
4
8
 
9
+ fail("no ENV['ERROR_REPORTING_KEYFILE'] specified") unless ENV["ERROR_REPORTING_KEYFILE"]
10
+ require "google/cloud/error_reporting"
11
+ Google::Cloud::ErrorReporting.configure do |config|
12
+ config.project_id = (JSON.load File.read ENV["ERROR_REPORTING_KEYFILE"])["project_id"]
13
+ end
14
+
15
+ reported = []
5
16
  loop do
6
17
  puts "LOOP #{Time.now}"
7
18
 
19
+ moderated = BOT.json(:get, "/subreddits/mine/moderator")["data"]["children"].map do |child|
20
+ fail unless child["kind"] == "t5"
21
+ child["data"]["display_name"].downcase
22
+ end
8
23
  [
9
- ["ion", "Developer"],
24
+ # ["ion", "Developer"],
10
25
  # ["survivetheculling", "Developer"],
26
+ ["vigorgame", "Developer"],
27
+ ["insurgency", "Developer"],
28
+ ["Battalion1944", "Developer"],
11
29
  ].each do |subreddit, developer_class|
30
+ subreddit.downcase!
31
+ next puts "!!! can't moderate #{subreddit} !!!" unless moderated.include? subreddit
12
32
  puts "sub: #{subreddit}"
13
33
 
14
34
  JSON.parse( begin
@@ -20,13 +40,13 @@ loop do
20
40
  end )["data"]["children"].each do |comment|
21
41
  id = comment["data"]["link_id"][3..-1]
22
42
  commenter_flair = comment["data"]["author_flair_css_class"]
23
- # puts "flair: #{commenter_flair}" if commenter_flair
43
+ puts "flair: #{commenter_flair}" if commenter_flair
24
44
  next unless developer_class == commenter_flair
25
45
  puts "https://reddit.com/r/#{subreddit}/comments/#{id}/#{comment["data"]["id"]} '#{commenter_flair}'"
26
46
  flairselector = BOT.json :post, "/api/flairselector", { link: comment["data"]["link_id"] }
27
- existing_flair_class = flairselector["current"]["flair_css_class"]
28
- puts "https://reddit.com/#{id} '#{existing_flair_class}'"
29
- next unless target = case existing_flair_class
47
+ current_flair_class = flairselector["current"]["flair_css_class"]
48
+ puts "existing https://reddit.com/#{id} #{current_flair_class.inspect}"
49
+ next unless target = case current_flair_class
30
50
  when nil then "untaggeddev"
31
51
  when "news" then "newsdev"
32
52
  when "discussion" then "discussiondev"
@@ -36,10 +56,16 @@ loop do
36
56
  when "bug" then "bugdev"
37
57
  when "announcement" then "announcementdev"
38
58
  when "suggestion" then "suggestiondev"
39
- else puts "ignored https://reddit.com/#{id} '#{existing_flair_class}'"
59
+ else puts "ignored https://reddit.com/#{id} #{current_flair_class.inspect}"
60
+ end
61
+ unless choice = flairselector["choices"].find{ |choice| choice["flair_css_class"] == target }
62
+ next if reported.include? comment["data"]["link_id"]
63
+ Google::Cloud::ErrorReporting.report RuntimeError.new("no '#{target}' link flair in /r/#{subreddit}").tap{ |_| _.set_backtrace caller }
64
+ reported.push comment["data"]["link_id"]
65
+ next
40
66
  end
41
- choice = flairselector["choices"].find{ |choice| choice["flair_css_class"] == target }
42
67
  puts "assigning '#{target}' (#{choice}) flair to post https://reddit.com/#{id}"
68
+ next if ENV["TEST"]
43
69
  _ = BOT.json :post,
44
70
  "/api/selectflair", {
45
71
  flair_template_id: choice["flair_template_id"],
@@ -1,4 +1,6 @@
1
- # TODO maybe activate raise_on_failure optional FastImage param
1
+ # TODO deprecate in favor of the gem directlink
2
+
3
+ # TODO maybe activate raise_on_failure optional FastImage param
2
4
 
3
5
  require "pp"
4
6
  # require "json"
@@ -0,0 +1,3 @@
1
+ source "https://rubygems.org"
2
+
3
+ gem "reddit_bot", git: "git@github.com:nakilon/reddit_bot.git"
@@ -0,0 +1,22 @@
1
+ GIT
2
+ remote: git@github.com:nakilon/reddit_bot.git
3
+ revision: 0a0649b20ff0b1a28366a17ec7c037e481e850b3
4
+ specs:
5
+ reddit_bot (1.6.7)
6
+ json
7
+ nethttputils (~> 0.2.4.1)
8
+
9
+ GEM
10
+ remote: https://rubygems.org/
11
+ specs:
12
+ json (2.1.0)
13
+ nethttputils (0.2.4.2)
14
+
15
+ PLATFORMS
16
+ ruby
17
+
18
+ DEPENDENCIES
19
+ reddit_bot!
20
+
21
+ BUNDLED WITH
22
+ 1.16.1
@@ -0,0 +1,54 @@
1
+ require_relative "../boilerplate"
2
+ SUBREDDIT = "JohnnyMarr"
3
+ BOT = RedditBot::Bot.new YAML.load(File.read "secrets.yaml"), subreddit: SUBREDDIT
4
+
5
+ TWITTER = "Johnny_Marr"
6
+ require_relative "twitter"
7
+
8
+ loop do
9
+ id = BOT.new_posts.find do |post|
10
+ /\(https:\/\/twitter\.com\/#{TWITTER}\/status\/(\d{18,})\)/i =~ post["selftext"] and break $1
11
+ end.to_i
12
+ n = if id.zero?
13
+ fail "no tweets found in subreddit" unless [ "#{SUBREDDIT}_TEST" ].include?(SUBREDDIT) || ENV["START"]
14
+ 10
15
+ else
16
+ 200
17
+ end
18
+
19
+ fail unless flair = BOT.json(:get, "/r/#{SUBREDDIT}/api/link_flair").find do |flair|
20
+ flair["text"] == "Twitter"
21
+ end
22
+
23
+ timeout = 0
24
+ JSON.load( begin
25
+ NetHTTPUtils.request_data(
26
+ "https://api.twitter.com/1.1/statuses/user_timeline.json",
27
+ form: { screen_name: TWITTER, count: n, tweet_mode: "extended" },
28
+ header: { Authorization: "Bearer #{TWITTER_ACCESS_TOKEN}" }
29
+ )
30
+ rescue NetHTTPUtils::Error => e
31
+ fail if e.code != 503
32
+ sleep timeout += 1
33
+ retry
34
+ end ).sort_by{ |tweet| -tweet["id"] }.take_while do |tweet|
35
+ tweet["id"] > id && (!File.exist?("id") || tweet["id"] > File.read("id").to_i)
36
+ end.reverse_each do |tweet|
37
+ title, text, contains_media = Tweet2titleNtext[tweet]
38
+ result = BOT.json :post, "/api/submit", {
39
+ sr: SUBREDDIT,
40
+ kind: "self",
41
+ title: title,
42
+ text: text,
43
+ }.tap{ |h| h.merge!({ flair_id: flair["id"] }) }
44
+ unless result["json"]["errors"].empty?
45
+ fail unless result["json"]["errors"].map(&:first) == ["ALREADY_SUB"]
46
+ puts "ALREADY_SUB error for #{tweet["id"]}"
47
+ end
48
+ File.write "id", tweet["id"]
49
+ abort if ENV["ONCE"]
50
+ end
51
+
52
+ puts "END LOOP #{Time.now}"
53
+ sleep 300
54
+ end
@@ -0,0 +1,80 @@
1
+ require "json"
2
+ require "nethttputils"
3
+
4
+ TWITTER_ACCESS_TOKEN = JSON.load(
5
+ NetHTTPUtils.request_data "https://api.twitter.com/oauth2/token", :post,
6
+ auth: File.read("twitter.token").split,
7
+ form: {grant_type: :client_credentials}
8
+ )["access_token"]
9
+
10
+ Tweet2titleNtext = lambda do |tweet|
11
+ pp tweet if ENV["TEST"]
12
+ text = ""
13
+ contains_media = false
14
+ up = ->s{ s.split.map{ |w| "^#{w}" }.join " " }
15
+
16
+ tweet_to_get_media_from = tweet["retweeted_status"] || tweet
17
+ if tweet_to_get_media_from["extended_entities"] && !tweet_to_get_media_from["extended_entities"]["media"].empty?
18
+ contains_media = true
19
+ tweet_to_get_media_from["extended_entities"]["media"].each_with_index do |media, i|
20
+ text.concat "* [Image #{i + 1}](#{media["media_url_https"]})\n\n"
21
+ end
22
+ end
23
+ if !tweet_to_get_media_from["entities"]["urls"].empty?
24
+ contains_media = true
25
+ tweet_to_get_media_from["entities"]["urls"].each_with_index do |url, i|
26
+ text.concat "* [Link #{i + 1}](#{url["expanded_url"]})\n\n"
27
+ end
28
+ end
29
+
30
+ require "date"
31
+ text.concat "^- #{
32
+ up[tweet["user"]["name"]]
33
+ } [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [#{
34
+ up[Date.parse(tweet["created_at"]).strftime "%B %-d, %Y"]
35
+ }](https://twitter.com/#{TWITTER}/status/#{tweet["id"]})"
36
+ require "cgi"
37
+ # [CGI::unescapeHTML(tweet["full_text"]).sub(/( https:\/\/t\.co\/[0-9a-zA-Z]{10})*\z/, ""), text, contains_media]
38
+ [CGI::unescapeHTML(tweet["retweeted_status"] ? "RT: #{tweet["retweeted_status"]["full_text"]}" : tweet["full_text"]).sub(/(\s+https:\/\/t\.co\/[0-9a-zA-Z]{10})*\z/, ""), text, contains_media]
39
+ end
40
+ [
41
+ [905764294687633408, true, "The Polish government & military high command is now evacuating Warsaw for Brest, 120 miles east: German armies are too close to the capital", "* [Image 1](https://pbs.twimg.com/media/DJHq71BXYAA6KJ0.jpg)\n\n" "^- ^WW2 ^Tweets ^from ^1940 [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^""September ^7, ^2017](https://twitter.com/#{TWITTER}/status/905764294687633408)"],
42
+ [915534673471733760, true, "In east Poland (now Soviet Ukraine) industry & farms to be collectivised, political parties banned, aristocrats & capitalists \"re-educated\".", "* [Image 1](https://pbs.twimg.com/media/DLSh2J9W4AACcOG.jpg)\n\n* [Image 2](https://pbs.twimg.com/media/DLSh4sKX0AEBaXq.jpg)\n\n^- ^WW2 ^Tweets ^from ^1940 [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^" "October ^4, ^2017](https://twitter.com/#{TWITTER}/status/915534673471733760)"],
43
+ [915208866408824832, true, "For 1st time, RAF planes dropping propaganda leaflets on Berlin itself, entitled \"Germans: these are your leaders!\"", "* [Image 1](https://pbs.twimg.com/media/DLN5jJ-XkAEUz9M.jpg)\n\n* [Link 1](https://www.psywar.org/product_1939EH158.php)\n\n" "^- ^WW2 ^Tweets ^from ^1940 [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^" "October ^3, ^2017](https://twitter.com/#{TWITTER}/status/915208866408824832)"],
44
+ [914577848891006978, true, "\"In Poland, Russia pursued a cold policy of selfinterest. But clearly necessary for Russia… against Nazi menace.\"", "* [Link 1](https://www.youtube.com/watch?v=ygmP5A3n2JA)\n\n" "^- ^WW2 ^Tweets ^from ^1940 [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^" "October ^1, ^2017](https://twitter.com/#{TWITTER}/status/914577848891006978)"],
45
+ [926581977372942336, false, "Finland rejects Soviet demand to surrender land near Leningrad & give Red Navy base in Hanko; Soviets now claim Finns' manner \"warlike\".", "^- ^WW2 ^Tweets ^from ^1940 [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^" "November ^3, ^2017](https://twitter.com/#{TWITTER}/status/926581977372942336)"],
46
+ [1007650044441329664, true, "RT: SOLD OUT | Tonight’s @Johnny_Marr signing at Rough Trade East is now completely sold out! Catch you in a bit. ‘Call The Comet’ is out now:", "* [Image 1](https://pbs.twimg.com/media/DfvdN1_WsAE_a3r.jpg)\n\n* [Link 1](https://roughtrade.com/gb/music/johnny-marr-call-the-comet)\n\n^- ^Johnny ^Marr [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^June ^15, ^2018](https://twitter.com/#{TWITTER}/status/1007650044441329664)"],
47
+ [1007155648612581376, true, "Tomorrow. #CallTheComet", "* [Image 1](https://pbs.twimg.com/ext_tw_video_thumb/1007155601913204736/pu/img/IREVPkgUVHoQHfBB.jpg)\n\n" "^- ^Johnny ^Marr [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^June ^14, ^2018](https://twitter.com/#{TWITTER}/status/1007155648612581376)"],
48
+ ].each do |id, contains_media_, title_, text_|
49
+ title, text, contains_media = Tweet2titleNtext[ JSON.load NetHTTPUtils.request_data(
50
+ "https://api.twitter.com/1.1/statuses/show.json",
51
+ form: { id: id, tweet_mode: "extended" },
52
+ header: { Authorization: "Bearer #{TWITTER_ACCESS_TOKEN}" },
53
+ ) ]
54
+ unless contains_media_ == contains_media
55
+ puts "expected: #{contains_media_}"
56
+ puts "got: #{contains_media}"
57
+ abort "CONTAINS_MEDIA ERROR"
58
+ end
59
+ unless title_ == title
60
+ puts "expected:\n#{title_.inspect}"
61
+ puts "got:\n#{title.inspect}"
62
+ abort "TITLE FORMATTING ERROR"
63
+ end
64
+ unless text_ == text
65
+ puts "expected:\n#{text_.inspect}"
66
+ puts "got:\n#{text.inspect}"
67
+ abort "TEXT FORMATTING ERROR"
68
+ end
69
+ if ENV["TEST_POST"]
70
+ pp BOT.json :post, "/api/submit", {
71
+ sr: "#{SUBREDDIT}_TEST",
72
+ kind: "self",
73
+ title: title,
74
+ text: text,
75
+ }.tap{ |h| h.merge!({ flair_id: BOT.json(:get, "/r/#{SUBREDDIT}_TEST/api/link_flair").find{ |flair|
76
+ flair["text"] == "Contains Media"
77
+ }["id"] }) if contains_media }
78
+ end
79
+ end
80
+ abort "OK" if ENV["TEST"]
@@ -1,8 +1,8 @@
1
1
  GIT
2
2
  remote: git@github.com:Nakilon/directlink.git
3
- revision: 18b1544dcbdee3d9f5c2ad685e4eaf134773e5a1
3
+ revision: b22cb9f103d474742f773b9434747f9cf5a09ed0
4
4
  specs:
5
- directlink (0.0.2.0)
5
+ directlink (0.0.3.0)
6
6
  fastimage (~> 2.1.3)
7
7
  nethttputils (~> 0.2.4.0)
8
8
 
@@ -83,7 +83,7 @@ GEM
83
83
  mini_portile2 (2.1.0)
84
84
  multi_json (1.13.1)
85
85
  multipart-post (2.0.0)
86
- nethttputils (0.2.4.0)
86
+ nethttputils (0.2.4.2)
87
87
  nokogiri (1.6.8.1)
88
88
  mini_portile2 (~> 2.1.0)
89
89
  os (0.9.6)
@@ -37,7 +37,13 @@ EXCLUDE = %w{ foodporn powerwashingporn }
37
37
  checked = []
38
38
 
39
39
  search_url = lambda do |url|
40
- JSON.load(NetHTTPUtils.request_data "https://www.reddit.com/r/largeimages/search.json", form: {q: "url:#{url}", restrict_sr: "on"}, header: ["User-Agent", "ajsdjasdasd"])["data"]["children"]
40
+ JSON.load( begin
41
+ NetHTTPUtils.request_data "https://www.reddit.com/r/largeimages/search.json", form: {q: "url:#{url}", restrict_sr: "on"}, header: ["User-Agent", "ajsdjasdasd"]
42
+ rescue NetHTTPUtils::Error => e
43
+ raise unless [503].include? e.code
44
+ sleep 60
45
+ retry
46
+ end )["data"]["children"]
41
47
  end
42
48
  fail unless 1 == search_url["https://i.imgur.com/9JTxtjW.jpg"].size
43
49
 
@@ -88,12 +94,12 @@ loop do
88
94
  next logger.warn "skipped a post by /u/bekalaki" if author == "bekalaki" # 9 ways to divide a karmawhore
89
95
 
90
96
  t = begin
91
- DirectLink url
92
- rescue NetHTTPUtils::Error,
93
- SocketError,
97
+ DirectLink url, 60
98
+ rescue SocketError,
99
+ Net::OpenTimeout,
100
+ NetHTTPUtils::Error,
94
101
  FastImage::UnknownImageType,
95
102
  FastImage::ImageFetchFailure,
96
- # DirectLink::ErrorMissingEnvVar,
97
103
  DirectLink::ErrorNotFound,
98
104
  DirectLink::ErrorBadLink => e
99
105
  next logger.error "skipped (#{e}) #{url} from http://redd.it/#{id}"
@@ -112,10 +118,8 @@ loop do
112
118
  " [#{tt.size} images]" if tt.size > 1
113
119
  } #{
114
120
  title.sub(/\s*\[?#{tt.first.width}\s*[*x×]\s*#{tt.first.height}\]?\s*/i, " ").
115
- sub("[OC]", " ").gsub(/\s+/, " ").strip.
116
- gsub(/(?<=.{190 - subreddit.size}).+/, "...")
117
- } /r/#{subreddit}".
118
- gsub(/\s+\(\s+\)\s+/, " ")
121
+ sub("[OC]", " ").gsub(/\s+/, " ").strip
122
+ } /r/#{subreddit}".gsub(/\s+\(\s+\)\s+/, " ").sub(/(?<=.{297}).+/, "...")
119
123
  logger.warn "new post #{source}: #{url} #{title.inspect}"
120
124
  unless Gem::Platform.local.os == "darwin"
121
125
  result = BOT.json :post,
@@ -1,3 +1,5 @@
1
+ # TODO deprecate in favor of the gem nethttputils
2
+
1
3
  require "net/http"
2
4
  require "openssl"
3
5
 
@@ -1,4 +1,3 @@
1
1
  source "https://rubygems.org"
2
2
 
3
- gem "reddit_bot", "~>1.6.4"
4
- gem "nethttputils", git: "git@github.com:Nakilon/nethttputils.git", tag: "v0.1.0.4"
3
+ gem "reddit_bot", git: "git@github.com:nakilon/reddit_bot.git"
@@ -1,23 +1,22 @@
1
1
  GIT
2
- remote: git@github.com:Nakilon/nethttputils.git
3
- revision: 6cb2b6a2422d82e266e08b16da4fa29e809fd260
4
- tag: v0.1.0.4
2
+ remote: git@github.com:nakilon/reddit_bot.git
3
+ revision: 351c559e507289aa6385e769eb6d5c6de80ba629
5
4
  specs:
6
- nethttputils (0.1.0.4)
5
+ reddit_bot (1.6.7)
6
+ json
7
+ nethttputils (~> 0.2.4.1)
7
8
 
8
9
  GEM
9
10
  remote: https://rubygems.org/
10
11
  specs:
11
12
  json (2.1.0)
12
- reddit_bot (1.6.4)
13
- json
13
+ nethttputils (0.2.4.2)
14
14
 
15
15
  PLATFORMS
16
16
  ruby
17
17
 
18
18
  DEPENDENCIES
19
- nethttputils!
20
- reddit_bot (~> 1.6.4)
19
+ reddit_bot!
21
20
 
22
21
  BUNDLED WITH
23
22
  1.16.1
data/lib/reddit_bot.rb CHANGED
@@ -7,8 +7,15 @@ require "json"
7
7
 
8
8
  require "nethttputils"
9
9
 
10
- require_relative "reddit_bot/version"
10
+ require_relative "reddit_bot/version" # TODO: deprecate this
11
+
11
12
  module RedditBot
13
+ require "logger"
14
+ class << self
15
+ attr_accessor :logger
16
+ end
17
+ self.logger = Logger.new STDOUT
18
+
12
19
  class Bot
13
20
 
14
21
  # bot's Reddit username; set via constructor parameter secrets[:login]
@@ -31,12 +38,12 @@ module RedditBot
31
38
  response = JSON.parse resp_with_token mtd, path, form.merge({api_type: "json"})
32
39
  if response.is_a?(Hash) && response["json"] && # for example, flairlist.json and {"error": 403} do not have it
33
40
  !response["json"]["errors"].empty?
34
- puts "ERROR OCCURED on #{[mtd, path]}"
41
+ Module.nesting[1].logger.error "ERROR OCCURED on #{[mtd, path]}"
35
42
  fail "unknown how to handle multiple errors" if 1 < response["json"]["errors"].size
36
- puts "error: #{response["json"]["errors"]}"
43
+ Module.nesting[1].logger.error "error: #{response["json"]["errors"]}"
37
44
  error, description = response["json"]["errors"].first
38
45
  case error
39
- when "ALREADY_SUB" ; puts "was rejected by moderator if you didn't see in dups"
46
+ when "ALREADY_SUB" ; Module.nesting[1].logger.warn "was rejected by moderator if you didn't see in dups"
40
47
  # when "BAD_CAPTCHA" ; update_captcha
41
48
  # json mtd, path, form.merger( {
42
49
  # iden: @iden_and_captcha[0],
@@ -44,7 +51,7 @@ module RedditBot
44
51
  # } ) unless @ignore_captcha
45
52
  when "RATELIMIT"
46
53
  fail error unless description[/\Ayou are doing that too much\. try again in (\d) minutes\.\z/]
47
- puts "retrying in #{$1.to_i + 1} minutes"
54
+ Module.nesting[1].logger.info "retrying in #{$1.to_i + 1} minutes"
48
55
  sleep ($1.to_i + 1) * 60
49
56
  return json mtd, path, _form
50
57
  else ; fail error
@@ -68,7 +75,7 @@ module RedditBot
68
75
  # [reason] :nodoc:
69
76
  # [thing_id] +String+ fullname of a "link, commenr or message"
70
77
  def report reason, thing_id
71
- puts "reporting '#{thing_id}'"
78
+ Module.nesting[1].logger.warn "reporting '#{thing_id}'"
72
79
  json :post, "/api/report",
73
80
  reason: "other",
74
81
  other_reason: reason,
@@ -79,8 +86,11 @@ module RedditBot
79
86
  # [link_flair_css_class] :nodoc:
80
87
  # [link_flair_text] :nodoc:
81
88
  def set_post_flair post, link_flair_css_class, link_flair_text
82
- puts "setting flair '#{link_flair_css_class}' with text '#{link_flair_text}' to post '#{post["name"]}'"
83
- return puts "possibly not enough permissions for /r/#{@subreddit}/api/flairselector" if {"error"=>403} == @flairselector_choices ||= json(:post, "/r/#{@subreddit}/api/flairselector", link: post["name"])
89
+ Module.nesting[1].logger.warn "setting flair '#{link_flair_css_class}' with text '#{link_flair_text}' to post '#{post["name"]}'"
90
+ if {"error"=>403} == @flairselector_choices ||= json(:post, "/r/#{@subreddit}/api/flairselector", link: post["name"])
91
+ Module.nesting[1].logger.error "possibly not enough permissions for /r/#{@subreddit}/api/flairselector"
92
+ return
93
+ end
84
94
  json :post, "/api/selectflair",
85
95
  link: post["name"],
86
96
  text: link_flair_text,
@@ -92,7 +102,7 @@ module RedditBot
92
102
  # [thing_id] +String+ fullname of a post (or self.post?), comment (and private message?)
93
103
  # [text] :nodoc:
94
104
  def leave_a_comment thing_id, text
95
- puts "leaving a comment on '#{thing_id}'"
105
+ Module.nesting[1].logger.warn "leaving a comment on '#{thing_id}'"
96
106
  json(:post, "/api/comment",
97
107
  thing_id: thing_id,
98
108
  text: text,
@@ -180,7 +190,7 @@ module RedditBot
180
190
  fail "bot #{@name} isn't a 'developer' of app at https://www.reddit.com/prefs/apps/" if response == {"error"=>"invalid_grant"}
181
191
  fail response.inspect
182
192
  end
183
- puts "new token is: #{@token_cached}"
193
+ Module.nesting[1].logger.info "new token is: #{@token_cached}"
184
194
  # update_captcha if "true" == resp_with_token(:get, "/api/needs_captcha", {})
185
195
  @token_cached
186
196
  end
@@ -216,7 +226,7 @@ module RedditBot
216
226
  NetHTTPUtils.request_data(url, mtd, form: form, header: headers, auth: basic_auth) do |response|
217
227
  next unless remaining = response.to_hash["x-ratelimit-remaining"]
218
228
  if Gem::Platform.local.os == "darwin"
219
- puts %w{
229
+ Module.nesting[1].logger.debug %w{
220
230
  x-ratelimit-remaining
221
231
  x-ratelimit-used
222
232
  x-ratelimit-reset
@@ -225,13 +235,13 @@ module RedditBot
225
235
  fail remaining[0] if remaining[0].size < 4
226
236
  next if remaining[0].size > 4
227
237
  t = (response.to_hash["x-ratelimit-reset"][0].to_f + 1) / [remaining[0].to_f - 10, 1].max + 1
228
- puts "sleeping #{t} seconds because of x-ratelimit"
238
+ Module.nesting[1].logger.info "sleeping #{t} seconds because of x-ratelimit"
229
239
  sleep t
230
240
  end
231
241
  rescue NetHTTPUtils::Error => e
232
242
  sleep 5
233
- raise unless e.code == 503 || e.code == 502
234
- puts "API ERROR 503"
243
+ raise unless e.code.to_s.start_with? "50"
244
+ Module.nesting[1].logger.error "API ERROR 50*"
235
245
  retry
236
246
  end
237
247
  end
@@ -1,3 +1,3 @@
1
1
  module RedditBot
2
- VERSION = "1.6.6" # :nodoc:
2
+ VERSION = "1.6.7" # :nodoc:
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: reddit_bot
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.6.6
4
+ version: 1.6.7
5
5
  platform: ruby
6
6
  authors:
7
7
  - Victor Maslov
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-06-14 00:00:00.000000000 Z
11
+ date: 2018-08-22 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: json
@@ -77,6 +77,10 @@ files:
77
77
  - examples/iostroubleshooting/Gemfile
78
78
  - examples/iostroubleshooting/Gemfile.lock
79
79
  - examples/iostroubleshooting/main.rb
80
+ - examples/johnnymarr/Gemfile
81
+ - examples/johnnymarr/Gemfile.lock
82
+ - examples/johnnymarr/main.rb
83
+ - examples/johnnymarr/twitter.rb
80
84
  - examples/largeimages/Gemfile
81
85
  - examples/largeimages/Gemfile.lock
82
86
  - examples/largeimages/main.rb