reddit_bot 1.7.7 → 1.7.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. checksums.yaml +4 -4
  2. data/lib/reddit_bot.rb +3 -1
  3. data/reddit_bot.gemspec +9 -11
  4. metadata +9 -77
  5. data/.gitignore +0 -5
  6. data/Gemfile +0 -5
  7. data/README.md +0 -101
  8. data/Rakefile +0 -6
  9. data/examples/.bashrc +0 -2
  10. data/examples/.gitignore +0 -2
  11. data/examples/Gemfile.lock +0 -17
  12. data/examples/boilerplate.rb +0 -12
  13. data/examples/councilofricks/Gemfile +0 -4
  14. data/examples/councilofricks/Gemfile.lock +0 -17
  15. data/examples/councilofricks/main.rb +0 -58
  16. data/examples/cptflairbot3/.bashrc +0 -1
  17. data/examples/cptflairbot3/Code.gs +0 -13
  18. data/examples/cptflairbot3/Gemfile +0 -5
  19. data/examples/cptflairbot3/Gemfile.lock +0 -74
  20. data/examples/cptflairbot3/app.js +0 -40
  21. data/examples/cptflairbot3/casual/casualpokemontrades.htm +0 -910
  22. data/examples/cptflairbot3/casual/script.js +0 -55
  23. data/examples/cptflairbot3/casual/style.css +0 -1099
  24. data/examples/cptflairbot3/log.htm +0 -1
  25. data/examples/cptflairbot3/main.rb +0 -62
  26. data/examples/cptflairbot3/package.json +0 -6
  27. data/examples/cptflairbot3/pubsub.rb +0 -30
  28. data/examples/devflairbot/Gemfile +0 -6
  29. data/examples/devflairbot/Gemfile.lock +0 -74
  30. data/examples/devflairbot/main.rb +0 -81
  31. data/examples/dut/Gemfile +0 -4
  32. data/examples/dut/Gemfile.lock +0 -24
  33. data/examples/dut/main.rb +0 -41
  34. data/examples/get_dimensions.rb +0 -212
  35. data/examples/iostroubleshooting/Gemfile +0 -5
  36. data/examples/iostroubleshooting/Gemfile.lock +0 -16
  37. data/examples/iostroubleshooting/main.rb +0 -36
  38. data/examples/johnnymarr/Gemfile +0 -3
  39. data/examples/johnnymarr/Gemfile.lock +0 -17
  40. data/examples/johnnymarr/main.rb +0 -54
  41. data/examples/johnnymarr/twitter.rb +0 -80
  42. data/examples/largeimages/Gemfile +0 -11
  43. data/examples/largeimages/Gemfile.lock +0 -105
  44. data/examples/largeimages/main.rb +0 -173
  45. data/examples/largeimagesreview/Gemfile +0 -4
  46. data/examples/largeimagesreview/Gemfile.lock +0 -15
  47. data/examples/largeimagesreview/main.rb +0 -43
  48. data/examples/mlgtv/Gemfile +0 -4
  49. data/examples/mlgtv/Gemfile.lock +0 -23
  50. data/examples/mlgtv/channels.txt +0 -127
  51. data/examples/mlgtv/main.rb +0 -160
  52. data/examples/net_http_utils.rb +0 -148
  53. data/examples/oneplus/Gemfile +0 -5
  54. data/examples/oneplus/Gemfile.lock +0 -26
  55. data/examples/oneplus/main.rb +0 -43
  56. data/examples/realtimeww2/.bashrc +0 -1
  57. data/examples/realtimeww2/Gemfile +0 -3
  58. data/examples/realtimeww2/Gemfile.lock +0 -17
  59. data/examples/realtimeww2/main.rb +0 -129
  60. data/examples/sexypizza/Gemfile +0 -3
  61. data/examples/sexypizza/Gemfile.lock +0 -15
  62. data/examples/sexypizza/main.rb +0 -33
  63. data/examples/unisa/Gemfile +0 -4
  64. data/examples/unisa/Gemfile.lock +0 -24
  65. data/examples/unisa/main.rb +0 -41
  66. data/examples/wallpaper/Gemfile +0 -5
  67. data/examples/wallpaper/Gemfile.lock +0 -34
  68. data/examples/wallpaper/main.rb +0 -27
  69. data/examples/yayornay/Gemfile +0 -3
  70. data/examples/yayornay/Gemfile.lock +0 -15
  71. data/examples/yayornay/main.rb +0 -33
@@ -1,5 +0,0 @@
1
- source "https://rubygems.org"
2
-
3
- # ruby "2.0.0"
4
- gem "json"#, "1.7.7"
5
- gem "reddit_bot", "~>1.1.5"
@@ -1,16 +0,0 @@
1
- GEM
2
- remote: https://rubygems.org/
3
- specs:
4
- json (1.8.3)
5
- reddit_bot (1.1.5)
6
- json
7
-
8
- PLATFORMS
9
- ruby
10
-
11
- DEPENDENCIES
12
- json
13
- reddit_bot (~> 1.1.5)
14
-
15
- BUNDLED WITH
16
- 1.11.2
@@ -1,36 +0,0 @@
1
- require_relative File.join "..", "boilerplate"
2
- BOT = RedditBot::Bot.new YAML.load(File.read "secrets.yaml"), ignore_captcha: true
3
-
4
- # SUBREDDIT = "test___________"
5
- SUBREDDIT = "iostroubleshooting"
6
-
7
- # require "open-uri"
8
- require "csv" # /api/flaircsv
9
-
10
- loop do
11
- AWSStatus::touch
12
- catch :loop do
13
-
14
- existing = BOT.json(:get, "/r/#{SUBREDDIT}/api/flairlist")["users"]
15
- begin
16
- JSON.parse(DownloadWithRetry::download_with_retry("#{File.read "gas.url"}sheet_name=Bot&spreadsheet_id=10UzXUbawBgXLQkxXDMz28Qcx3IQPjwG9nByd_d8y31I", &:read))
17
- rescue JSON::ParserError
18
- puts "smth wrong with GAS script"
19
- throw :loop
20
- end.drop(1).reverse.uniq{ |_, user, _, _| user }.map do |row|
21
- next unless row.map(&:empty?) == [false, false, false, false]
22
- _, user, ios, flair = row
23
- next if existing.include?({"flair_css_class"=>flair, "user"=>user, "flair_text"=>ios})
24
- [user, ios, flair]
25
- # {"iPhone"=>"greenflair", "iPad"=>"blue", "iPod"=>"red"}[device[/iP(od|ad|hone)/]]]
26
- end.compact.each_slice(50) do |slice|
27
- CSV(load = ""){ |csv| slice.each{ |record| csv << record } }
28
- puts load
29
- BOT.json(:post, "/r/#{SUBREDDIT}/api/flaircsv", [["flair_csv", load]]).each do |report|
30
- pp report unless report.values_at("errors", "ok", "warnings") == [{}, true, {}]
31
- end
32
- end
33
-
34
- end
35
- sleep 60
36
- end
@@ -1,3 +0,0 @@
1
- source "https://rubygems.org"
2
-
3
- gem "reddit_bot"
@@ -1,17 +0,0 @@
1
- GEM
2
- remote: https://rubygems.org/
3
- specs:
4
- json (2.1.0)
5
- nethttputils (0.2.5.1)
6
- reddit_bot (1.6.10)
7
- json
8
- nethttputils (~> 0.2.5.1)
9
-
10
- PLATFORMS
11
- ruby
12
-
13
- DEPENDENCIES
14
- reddit_bot
15
-
16
- BUNDLED WITH
17
- 1.17.1
@@ -1,54 +0,0 @@
1
- require_relative "../boilerplate"
2
- SUBREDDIT = "JohnnyMarr"
3
- BOT = RedditBot::Bot.new YAML.load(File.read "secrets.yaml"), subreddit: SUBREDDIT
4
-
5
- TWITTER = "Johnny_Marr"
6
- require_relative "twitter"
7
-
8
- loop do
9
- id = BOT.new_posts.find do |post|
10
- /\(https:\/\/twitter\.com\/#{TWITTER}\/status\/(\d{18,})\)/i =~ post["selftext"] and break $1
11
- end.to_i
12
- n = if id.zero?
13
- fail "no tweets found in subreddit" unless [ "#{SUBREDDIT}_TEST" ].include?(SUBREDDIT) || ENV["START"]
14
- 10
15
- else
16
- 200
17
- end
18
-
19
- fail unless flair = BOT.json(:get, "/r/#{SUBREDDIT}/api/link_flair").find do |flair|
20
- flair["text"] == "Twitter"
21
- end
22
-
23
- timeout = 0
24
- JSON.load( begin
25
- NetHTTPUtils.request_data(
26
- "https://api.twitter.com/1.1/statuses/user_timeline.json",
27
- form: { screen_name: TWITTER, count: n, tweet_mode: "extended" },
28
- header: { Authorization: "Bearer #{TWITTER_ACCESS_TOKEN}" }
29
- )
30
- rescue NetHTTPUtils::Error => e
31
- fail unless [500, 503].include? e.code
32
- sleep timeout += 1
33
- retry
34
- end ).sort_by{ |tweet| -tweet["id"] }.take_while do |tweet|
35
- tweet["id"] > id && (!File.exist?("id") || tweet["id"] > File.read("id").to_i)
36
- end.reverse_each do |tweet|
37
- title, text, contains_media = Tweet2titleNtext[tweet]
38
- result = BOT.json :post, "/api/submit", {
39
- sr: SUBREDDIT,
40
- kind: "self",
41
- title: title,
42
- text: text,
43
- }.tap{ |h| h.merge!({ flair_id: flair["id"] }) }
44
- unless result["json"]["errors"].empty?
45
- fail unless result["json"]["errors"].map(&:first) == ["ALREADY_SUB"]
46
- puts "ALREADY_SUB error for #{tweet["id"]}"
47
- end
48
- File.write "id", tweet["id"]
49
- abort if ENV["ONCE"]
50
- end
51
-
52
- puts "END LOOP #{Time.now}"
53
- sleep 300
54
- end
@@ -1,80 +0,0 @@
1
- require "json"
2
- require "nethttputils"
3
-
4
- TWITTER_ACCESS_TOKEN = JSON.load(
5
- NetHTTPUtils.request_data "https://api.twitter.com/oauth2/token", :post,
6
- auth: File.read("twitter.token").split,
7
- form: {grant_type: :client_credentials}
8
- )["access_token"]
9
-
10
- Tweet2titleNtext = lambda do |tweet|
11
- pp tweet if ENV["TEST"]
12
- text = ""
13
- contains_media = false
14
- up = ->s{ s.split.map{ |w| "^#{w}" }.join " " }
15
-
16
- tweet_to_get_media_from = tweet["retweeted_status"] || tweet
17
- if tweet_to_get_media_from["extended_entities"] && !tweet_to_get_media_from["extended_entities"]["media"].empty?
18
- contains_media = true
19
- tweet_to_get_media_from["extended_entities"]["media"].each_with_index do |media, i|
20
- text.concat "* [Image #{i + 1}](#{media["media_url_https"]})\n\n"
21
- end
22
- end
23
- if !tweet_to_get_media_from["entities"]["urls"].empty?
24
- contains_media = true
25
- tweet_to_get_media_from["entities"]["urls"].each_with_index do |url, i|
26
- text.concat "* [Link #{i + 1}](#{url["expanded_url"]})\n\n"
27
- end
28
- end
29
-
30
- require "date"
31
- text.concat "^- #{
32
- up[tweet["user"]["name"]]
33
- } [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [#{
34
- up[Date.parse(tweet["created_at"]).strftime "%B %-d, %Y"]
35
- }](https://twitter.com/#{TWITTER}/status/#{tweet["id"]})"
36
- require "cgi"
37
- # [CGI::unescapeHTML(tweet["full_text"]).sub(/( https:\/\/t\.co\/[0-9a-zA-Z]{10})*\z/, ""), text, contains_media]
38
- [CGI::unescapeHTML(tweet["retweeted_status"] ? "RT: #{tweet["retweeted_status"]["full_text"]}" : tweet["full_text"]).sub(/(\s+https:\/\/t\.co\/[0-9a-zA-Z]{10})*\z/, ""), text, contains_media]
39
- end
40
- [
41
- [905764294687633408, true, "The Polish government & military high command is now evacuating Warsaw for Brest, 120 miles east: German armies are too close to the capital", "* [Image 1](https://pbs.twimg.com/media/DJHq71BXYAA6KJ0.jpg)\n\n" "^- ^WW2 ^Tweets ^from ^1940 [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^""September ^7, ^2017](https://twitter.com/#{TWITTER}/status/905764294687633408)"],
42
- [915534673471733760, true, "In east Poland (now Soviet Ukraine) industry & farms to be collectivised, political parties banned, aristocrats & capitalists \"re-educated\".", "* [Image 1](https://pbs.twimg.com/media/DLSh2J9W4AACcOG.jpg)\n\n* [Image 2](https://pbs.twimg.com/media/DLSh4sKX0AEBaXq.jpg)\n\n^- ^WW2 ^Tweets ^from ^1940 [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^" "October ^4, ^2017](https://twitter.com/#{TWITTER}/status/915534673471733760)"],
43
- [915208866408824832, true, "For 1st time, RAF planes dropping propaganda leaflets on Berlin itself, entitled \"Germans: these are your leaders!\"", "* [Image 1](https://pbs.twimg.com/media/DLN5jJ-XkAEUz9M.jpg)\n\n* [Link 1](https://www.psywar.org/product_1939EH158.php)\n\n" "^- ^WW2 ^Tweets ^from ^1940 [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^" "October ^3, ^2017](https://twitter.com/#{TWITTER}/status/915208866408824832)"],
44
- [914577848891006978, true, "\"In Poland, Russia pursued a cold policy of selfinterest. But clearly necessary for Russia… against Nazi menace.\"", "* [Link 1](https://www.youtube.com/watch?v=ygmP5A3n2JA)\n\n" "^- ^WW2 ^Tweets ^from ^1940 [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^" "October ^1, ^2017](https://twitter.com/#{TWITTER}/status/914577848891006978)"],
45
- [926581977372942336, false, "Finland rejects Soviet demand to surrender land near Leningrad & give Red Navy base in Hanko; Soviets now claim Finns' manner \"warlike\".", "^- ^WW2 ^Tweets ^from ^1940 [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^" "November ^3, ^2017](https://twitter.com/#{TWITTER}/status/926581977372942336)"],
46
- [1007650044441329664, true, "RT: SOLD OUT | Tonight’s @Johnny_Marr signing at Rough Trade East is now completely sold out! Catch you in a bit. ‘Call The Comet’ is out now:", "* [Image 1](https://pbs.twimg.com/media/DfvdN1_WsAE_a3r.jpg)\n\n* [Link 1](https://roughtrade.com/gb/music/johnny-marr-call-the-comet)\n\n^- ^Johnny ^Marr [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^June ^15, ^2018](https://twitter.com/#{TWITTER}/status/1007650044441329664)"],
47
- [1007155648612581376, true, "Tomorrow. #CallTheComet", "* [Image 1](https://pbs.twimg.com/ext_tw_video_thumb/1007155601913204736/pu/img/IREVPkgUVHoQHfBB.jpg)\n\n" "^- ^Johnny ^Marr [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^June ^14, ^2018](https://twitter.com/#{TWITTER}/status/1007155648612581376)"],
48
- ].each do |id, contains_media_, title_, text_|
49
- title, text, contains_media = Tweet2titleNtext[ JSON.load NetHTTPUtils.request_data(
50
- "https://api.twitter.com/1.1/statuses/show.json",
51
- form: { id: id, tweet_mode: "extended" },
52
- header: { Authorization: "Bearer #{TWITTER_ACCESS_TOKEN}" },
53
- ) ]
54
- unless contains_media_ == contains_media
55
- puts "expected: #{contains_media_}"
56
- puts "got: #{contains_media}"
57
- abort "CONTAINS_MEDIA ERROR"
58
- end
59
- unless title_ == title
60
- puts "expected:\n#{title_.inspect}"
61
- puts "got:\n#{title.inspect}"
62
- abort "TITLE FORMATTING ERROR"
63
- end
64
- unless text_ == text
65
- puts "expected:\n#{text_.inspect}"
66
- puts "got:\n#{text.inspect}"
67
- abort "TEXT FORMATTING ERROR"
68
- end
69
- if ENV["TEST_POST"]
70
- pp BOT.json :post, "/api/submit", {
71
- sr: "#{SUBREDDIT}_TEST",
72
- kind: "self",
73
- title: title,
74
- text: text,
75
- }.tap{ |h| h.merge!({ flair_id: BOT.json(:get, "/r/#{SUBREDDIT}_TEST/api/link_flair").find{ |flair|
76
- flair["text"] == "Contains Media"
77
- }["id"] }) if contains_media }
78
- end
79
- end
80
- abort "OK" if ENV["TEST"]
@@ -1,11 +0,0 @@
1
- source "https://rubygems.org"
2
-
3
- gem "json"
4
-
5
- gem "nokogiri"
6
-
7
- gem "nethttputils", git: "git@github.com:nakilon/nethttputils.git"
8
- gem "directlink", git: "git@github.com:nakilon/directlink.git"
9
-
10
- gem "gcplogger", git: "git@github.com:nakilon/gcplogger.git"
11
- gem "google-cloud-error_reporting"
@@ -1,105 +0,0 @@
1
- GIT
2
- remote: git@github.com:nakilon/gcplogger.git
3
- revision: 7c1451fac49bd0d242c6de43315ae6e9a70d8f7f
4
- tag: v0.1.1.0
5
- specs:
6
- gcplogger (0.1.1.0)
7
- google-cloud-logging (~> 1.4.0)
8
- public_suffix (~> 2.0)
9
-
10
- GEM
11
- remote: https://rubygems.org/
12
- specs:
13
- addressable (2.7.0)
14
- public_suffix (>= 2.0.2, < 5.0)
15
- directlink (0.0.9.0)
16
- addressable
17
- fastimage (~> 2.1.3)
18
- kramdown
19
- nethttputils (~> 0.4.0.0)
20
- nokogiri
21
- reddit_bot (~> 1.7.0)
22
- faraday (0.14.0)
23
- multipart-post (>= 1.2, < 3)
24
- fastimage (2.1.7)
25
- google-cloud-core (1.2.0)
26
- google-cloud-env (~> 1.0)
27
- google-cloud-env (1.0.1)
28
- faraday (~> 0.11)
29
- google-cloud-error_reporting (0.30.0)
30
- google-cloud-core (~> 1.2)
31
- google-gax (~> 1.0)
32
- stackdriver-core (~> 1.3)
33
- google-cloud-logging (1.4.0)
34
- google-cloud-core (~> 1.1)
35
- google-gax (~> 1.0)
36
- stackdriver-core (~> 1.2)
37
- google-gax (1.0.1)
38
- google-protobuf (~> 3.2)
39
- googleapis-common-protos (>= 1.3.5, < 2.0)
40
- googleauth (~> 0.6.2)
41
- grpc (>= 1.7.2, < 2.0)
42
- rly (~> 0.2.3)
43
- google-protobuf (3.5.1.2)
44
- googleapis-common-protos (1.3.7)
45
- google-protobuf (~> 3.0)
46
- googleapis-common-protos-types (~> 1.0)
47
- grpc (~> 1.0)
48
- googleapis-common-protos-types (1.0.1)
49
- google-protobuf (~> 3.0)
50
- googleauth (0.6.2)
51
- faraday (~> 0.12)
52
- jwt (>= 1.4, < 3.0)
53
- logging (~> 2.0)
54
- memoist (~> 0.12)
55
- multi_json (~> 1.11)
56
- os (~> 0.9)
57
- signet (~> 0.7)
58
- grpc (1.10.0)
59
- google-protobuf (~> 3.1)
60
- googleapis-common-protos-types (~> 1.0.0)
61
- googleauth (>= 0.5.1, < 0.7)
62
- json (2.2.0)
63
- jwt (2.1.0)
64
- kramdown (2.3.0)
65
- rexml
66
- little-plugger (1.1.4)
67
- logging (2.2.2)
68
- little-plugger (~> 1.1)
69
- multi_json (~> 1.10)
70
- memoist (0.16.0)
71
- mini_portile2 (2.4.0)
72
- multi_json (1.13.1)
73
- multipart-post (2.0.0)
74
- nethttputils (0.4.0.0)
75
- addressable
76
- nokogiri (1.10.8)
77
- mini_portile2 (~> 2.4.0)
78
- os (0.9.6)
79
- public_suffix (2.0.5)
80
- reddit_bot (1.7.6)
81
- json
82
- nethttputils (~> 0.4.0.0)
83
- rexml (3.2.4)
84
- rly (0.2.3)
85
- signet (0.8.1)
86
- addressable (~> 2.3)
87
- faraday (~> 0.9)
88
- jwt (>= 1.5, < 3.0)
89
- multi_json (~> 1.10)
90
- stackdriver-core (1.3.0)
91
- google-cloud-core (~> 1.2)
92
-
93
- PLATFORMS
94
- ruby
95
-
96
- DEPENDENCIES
97
- directlink (~> 0.0.9.0)
98
- gcplogger!
99
- google-cloud-error_reporting
100
- json
101
- nethttputils
102
- nokogiri
103
-
104
- BUNDLED WITH
105
- 2.0.2
@@ -1,173 +0,0 @@
1
- ### THIS WAS MY THE VERY FIRST REDDIT BOT
2
-
3
-
4
- require "gcplogger"
5
- logger = GCPLogger.logger "largeimagesbot"
6
-
7
- fail "no ENV['ERROR_REPORTING_KEYFILE'] specified" unless ENV["ERROR_REPORTING_KEYFILE"]
8
- require "google/cloud/error_reporting"
9
- Google::Cloud::ErrorReporting.configure do |config|
10
- config.project_id = JSON.load(File.read ENV["ERROR_REPORTING_KEYFILE"])["project_id"]
11
- end
12
-
13
-
14
- require "directlink"
15
-
16
- require "nokogiri"
17
-
18
- require "../boilerplate"
19
- bot = RedditBot::Bot.new YAML.load_file "secrets.yaml"
20
-
21
- INCLUDE = %w{
22
- user/kjoneslol/m/sfwpornnetwork
23
-
24
- r/woahdude
25
- r/pic
26
-
27
- r/highres
28
- r/wallpapers
29
- r/wallpaper
30
- r/WQHD_Wallpaper
31
-
32
- r/oldmaps
33
- r/telephotolandscapes
34
- }
35
- EXCLUDE = %w{ foodporn powerwashingporn }
36
-
37
- checked = []
38
-
39
- search_url = lambda do |url|
40
- JSON.load( begin
41
- NetHTTPUtils.request_data "https://www.reddit.com/r/largeimages/search.json", form: {q: "url:#{url}", restrict_sr: "on"}, header: ["User-Agent", "ajsdjasdasd"]
42
- rescue NetHTTPUtils::Error => e
43
- raise unless [500, 503].include? e.code
44
- sleep 60
45
- retry
46
- end )["data"]["children"]
47
- end
48
- fail unless 1 == search_url["https://i.imgur.com/9JTxtjW.jpg"].size
49
-
50
- loop do
51
- begin
52
- logger.info "LOOP #{Time.now}"
53
- rescue => e
54
- puts "oops"
55
- Google::Cloud::ErrorReporting.report e
56
- sleep 5
57
- raise
58
- end
59
-
60
- [ [:source_ultireddit, 10000000, ( Nokogiri::XML( begin
61
- NetHTTPUtils.request_data ENV["FEEDPCBR_URL"]
62
- rescue NetHTTPUtils::Error => e
63
- raise unless [502, 504].include? e.code
64
- sleep 60
65
- retry
66
- end ).remove_namespaces!.xpath("feed/entry").map do |entry|
67
- [
68
- entry.at_xpath("id").text,
69
- entry.at_xpath("link[@rel='via']")["href"],
70
- entry.at_xpath("title").text,
71
- entry.at_xpath("category")["term"],
72
- entry.at_xpath("author/name").text,
73
- entry.at_xpath("link[@rel='alternate']")["href"],
74
- ]
75
- end ) ],
76
- [:source_reddit, 30000000, ( INCLUDE.flat_map do |sortasub|
77
- bot.new_posts(sortasub).take(100).map do |child|
78
- next if child["is_self"]
79
- next if EXCLUDE.include? child["subreddit"].downcase
80
- child.values_at(
81
- *%w{ id url title subreddit author permalink }
82
- ).tap{ |_| _.last.prepend "https://www.reddit.com" }
83
- end.compact
84
- end ) ],
85
- ].each do |source, min_resolution, entries|
86
- logger.warn "#{source}.size: #{entries.size}"
87
- entries.each do |id, url, title, subreddit, author, permalink|
88
- author.downcase!
89
- next if checked.include? id
90
- checked << id
91
- # next if Gem::Platform.local.os == "darwin" # prevent concurrent posting
92
- logger.debug "image url for #{id}: #{url}"
93
- next logger.warn "skipped a post by /u/sjhill" if author == "sjhill" # opt-out
94
- next logger.warn "skipped a post by /u/redisforever" if author == "redisforever" # opt-out
95
- next logger.warn "skipped a post by /u/bekalaki" if author == "bekalaki" # 9 ways to divide a karmawhore
96
- next logger.warn "skipped a post by /u/cherryblackeyes" if author == "cherryblackeyes" # he's not nice
97
- next logger.warn "skipped a post by /u/abel_a_kay" if author == "abel_a_kay" # posting very similar images of the same thing for the history
98
- next logger.warn "skipped gifv" if ( begin
99
- URI url
100
- rescue URI::InvalidURIError
101
- require "addressable"
102
- URI Addressable::URI.escape url
103
- end ).host.split(?.) == %w{ v redd it }
104
-
105
- t = begin
106
- DirectLink url, 60
107
- rescue *DirectLink::NORMAL_EXCEPTIONS => e
108
- next logger.error "skipped (#{e}) #{url} from http://redd.it/#{id}"
109
- end
110
- logger.debug "DirectLink: #{t.inspect}"
111
- tt = t.is_a?(Array) ? t : [t]
112
- next logger.error "probably crosspost of a self post: http://redd.it/#{id}" if tt.empty?
113
- next logger.info "skipped low resolution #{source}" unless min_resolution <= tt.first.width * tt.first.height
114
- # puts "https://www.reddit.com/r/LargeImages/search.json?q=url%3A#{CGI.escape url}&restrict_sr=on"
115
- resolution = "[#{tt.first.width}x#{tt.first.height}]"
116
- next logger.warn "already submitted #{resolution} #{id}: '#{url}'" unless Gem::Platform.local.os == "darwin" || search_url[url].empty?
117
-
118
- system "curl -s '#{tt.first.url}' -o temp --retry 5" or fail
119
- next logger.warn "skipped <2mb id=#{id}" if 2000000 > File.size("temp")
120
- if "mapporn" == subreddit.downcase
121
- `vips pngsave temp temp.png`
122
- next logger.warn "skipped /r/mapporn <10mb PNG id=#{id}" if 10000000 > File.size("temp.png")
123
- end
124
-
125
- title = "#{resolution}#{
126
- " [#{tt.size} images]" if tt.size > 1
127
- } #{
128
- title.sub(/\s*\[?#{tt.first.width}\s*[*x×]\s*#{tt.first.height}\]?\s*/i, " ").
129
- sub("[OC]", " ").gsub(/\s+/, " ").strip
130
- } /r/#{subreddit}".gsub(/\s+\(\s+\)\s+/, " ").sub(/(?<=.{297}).+/, "...")
131
- logger.warn "new post #{source}: #{url} #{title.inspect}"
132
- unless Gem::Platform.local.os == "darwin"
133
- result = bot.json :post,
134
- "/api/submit",
135
- {
136
- kind: "link",
137
- url: url,
138
- sr: "LargeImages",
139
- title: title,
140
- }
141
- next unless result["json"]["errors"].empty?
142
- logger.info "post url: #{result["json"]["data"]["url"]}"
143
- end
144
- # {"json"=>
145
- # {"errors"=>[],
146
- # "data"=>
147
- # {"url"=>
148
- # "https://www.reddit.com/r/LargeImages/comments/3a9rel/2594x1724_overlooking_wildhorse_lake_from_near/",
149
- # "id"=>"3a9rel",
150
- # "name"=>"t3_3a9rel"}}}
151
- line1 = "[Original thread](#{permalink}) by /u/#{author}"
152
- line2 = "Direct link#{" (the largest image)" if tt.size > 1}: #{tt.first.url}"
153
- line3 = [
154
- "Direct links to all other images in album:",
155
- tt.map(&:url) - [tt.first.url]
156
- ] if tt.size > 1
157
- text = [line1, line2, line3].compact.join(" \n")
158
- logger.info "new comment: #{text.inspect}"
159
- unless Gem::Platform.local.os == "darwin"
160
- result = bot.leave_a_comment "#{result["json"]["data"]["name"]}", text.sub(/(?<=.{9000}).+/m, "...")
161
- unless result["json"]["errors"].empty?
162
- logger.error result.inspect
163
- fail "failed to leave comment"
164
- end
165
- end
166
-
167
- abort if Gem::Platform.local.os == "darwin"
168
- end
169
- end
170
-
171
- logger.info "END LOOP #{Time.now}"
172
- sleep 300
173
- end