reddit_bot 1.7.7 → 1.10.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (72) hide show
  1. checksums.yaml +4 -4
  2. data/lib/reddit_bot.rb +36 -21
  3. data/reddit_bot.gemspec +9 -11
  4. metadata +12 -80
  5. data/.gitignore +0 -5
  6. data/Gemfile +0 -5
  7. data/README.md +0 -101
  8. data/Rakefile +0 -6
  9. data/examples/.bashrc +0 -2
  10. data/examples/.gitignore +0 -2
  11. data/examples/Gemfile.lock +0 -17
  12. data/examples/boilerplate.rb +0 -12
  13. data/examples/councilofricks/Gemfile +0 -4
  14. data/examples/councilofricks/Gemfile.lock +0 -17
  15. data/examples/councilofricks/main.rb +0 -58
  16. data/examples/cptflairbot3/.bashrc +0 -1
  17. data/examples/cptflairbot3/Code.gs +0 -13
  18. data/examples/cptflairbot3/Gemfile +0 -5
  19. data/examples/cptflairbot3/Gemfile.lock +0 -74
  20. data/examples/cptflairbot3/app.js +0 -40
  21. data/examples/cptflairbot3/casual/casualpokemontrades.htm +0 -910
  22. data/examples/cptflairbot3/casual/script.js +0 -55
  23. data/examples/cptflairbot3/casual/style.css +0 -1099
  24. data/examples/cptflairbot3/log.htm +0 -1
  25. data/examples/cptflairbot3/main.rb +0 -62
  26. data/examples/cptflairbot3/package.json +0 -6
  27. data/examples/cptflairbot3/pubsub.rb +0 -30
  28. data/examples/cptflairbot3/update_gas_hook_secret.rb +0 -4
  29. data/examples/devflairbot/Gemfile +0 -6
  30. data/examples/devflairbot/Gemfile.lock +0 -74
  31. data/examples/devflairbot/main.rb +0 -81
  32. data/examples/dut/Gemfile +0 -4
  33. data/examples/dut/Gemfile.lock +0 -24
  34. data/examples/dut/main.rb +0 -41
  35. data/examples/get_dimensions.rb +0 -212
  36. data/examples/iostroubleshooting/Gemfile +0 -5
  37. data/examples/iostroubleshooting/Gemfile.lock +0 -16
  38. data/examples/iostroubleshooting/main.rb +0 -36
  39. data/examples/johnnymarr/Gemfile +0 -3
  40. data/examples/johnnymarr/Gemfile.lock +0 -17
  41. data/examples/johnnymarr/main.rb +0 -54
  42. data/examples/johnnymarr/twitter.rb +0 -80
  43. data/examples/largeimages/Gemfile +0 -11
  44. data/examples/largeimages/Gemfile.lock +0 -105
  45. data/examples/largeimages/main.rb +0 -173
  46. data/examples/largeimagesreview/Gemfile +0 -4
  47. data/examples/largeimagesreview/Gemfile.lock +0 -15
  48. data/examples/largeimagesreview/main.rb +0 -43
  49. data/examples/mlgtv/Gemfile +0 -4
  50. data/examples/mlgtv/Gemfile.lock +0 -23
  51. data/examples/mlgtv/channels.txt +0 -127
  52. data/examples/mlgtv/main.rb +0 -160
  53. data/examples/net_http_utils.rb +0 -148
  54. data/examples/oneplus/Gemfile +0 -5
  55. data/examples/oneplus/Gemfile.lock +0 -26
  56. data/examples/oneplus/main.rb +0 -43
  57. data/examples/realtimeww2/.bashrc +0 -1
  58. data/examples/realtimeww2/Gemfile +0 -3
  59. data/examples/realtimeww2/Gemfile.lock +0 -17
  60. data/examples/realtimeww2/main.rb +0 -129
  61. data/examples/sexypizza/Gemfile +0 -3
  62. data/examples/sexypizza/Gemfile.lock +0 -15
  63. data/examples/sexypizza/main.rb +0 -33
  64. data/examples/unisa/Gemfile +0 -4
  65. data/examples/unisa/Gemfile.lock +0 -24
  66. data/examples/unisa/main.rb +0 -41
  67. data/examples/wallpaper/Gemfile +0 -5
  68. data/examples/wallpaper/Gemfile.lock +0 -34
  69. data/examples/wallpaper/main.rb +0 -27
  70. data/examples/yayornay/Gemfile +0 -3
  71. data/examples/yayornay/Gemfile.lock +0 -15
  72. data/examples/yayornay/main.rb +0 -33
@@ -1,160 +0,0 @@
1
- require_relative File.join "../boilerplate"
2
-
3
- BOT = RedditBot::Bot.new YAML.load_file "secrets.yaml"
4
-
5
- SUBREDDIT = "codcompetitive"
6
-
7
- loop do
8
- Hearthbeat.beat "u_OpTicNaDeBoT_r_CoDCompetitive", 70 unless Gem::Platform.local.os == "darwin"
9
- catch :loop do
10
-
11
- text = " Live Streams\n\n" + [].tap do |list|
12
-
13
- throw :loop unless statuses = JSON.parse( begin
14
- NetHTTPUtils.request_data("http://streamapi.majorleaguegaming.com/service/streams/all")[/\{.+\}/m]
15
- rescue NetHTTPUtils::Error => e
16
- fail unless e.code == 408
17
- puts 408
18
- sleep 60
19
- retry
20
- end )["data"]["items"]
21
- games = JSON.parse(
22
- NetHTTPUtils.request_data("http://www.majorleaguegaming.com/api/games/all")[/\{.+\}/m]
23
- )["data"]["items"]
24
- begin
25
- JSON.parse begin
26
- NetHTTPUtils.request_data("http://www.majorleaguegaming.com/api/channels/all?fields=name,url,tags,stream_name,game_id")
27
- rescue NetHTTPUtils::Error => e
28
- fail unless e.code == 404
29
- puts 404
30
- sleep 60
31
- retry
32
- end
33
- rescue JSON::ParserError
34
- puts "JSON::ParserError"
35
- sleep 60
36
- retry
37
- end["data"]["items"].each do |item1|
38
- next unless item1["tags"].include? "COD Pro League"
39
- status = statuses.find{ |item2| item1["stream_name"] == item2["stream_name"] }
40
- next unless status && status["status"] > 0
41
- game = games.find{ |game| game["id"] == item1["game_id"] }
42
- list << "* [](#mlg) [](##{
43
- ["?", "live", "replay"][status["status"]]
44
- }) #{
45
- "[](##{ {
46
- "Call of Duty: Modern Warfare 2" => "codmw2",
47
- "Call of Duty: Modern Warfare 3" => "codmw3",
48
- "Call of Duty: Black Ops" => "codbo12",
49
- "Call of Duty: Black Ops II" => "codbo2",
50
- "Call of Duty: Black Ops III" => "codbo3",
51
- "Call of Duty: Advanced Warfare" => "codaw",
52
- "Call of Duty: Ghosts" => "codghosts2",
53
- "Call of Duty: Infinite Warfare" => "codiw",
54
- }[game["name"]] }) " if game
55
- }[**#{
56
- item1["name"]
57
- }**](#{
58
- item1["url"]
59
- })"
60
- end
61
-
62
- # to update access_token:
63
- # 0. see 'client_id' here https://www.twitch.tv/settings/connections and 'client_secret' from local ./readme file
64
- # 1. get 'code' by visiting in browser: https://api.twitch.tv/kraken/oauth2/authorize?response_type=code&client_id=*******&redirect_uri=http://www.example.com/unused/redirect/uri&scope=channel_read channel_feed_read
65
- # 2. NetHTTPUtils.request_data("https://api.twitch.tv/kraken/oauth2/token", :post, form: {client_id: "*******", client_secret: "*****", grant_type: "authorization_code", redirect_uri: "http://www.example.com/unused/redirect/uri", code: "*******"})
66
- twitch = lambda do |url|
67
- max = 1000
68
- data_key = "top"
69
- next_key = "_links"
70
-
71
- request = lambda do |url, acc|
72
- uri = URI.parse url
73
- query = Hash[URI.decode_www_form uri.query || ""]
74
- # query.merge!({ "limit" => max }) if max
75
- uri.query = URI.encode_www_form query.merge( {
76
- "access_token" => File.read("twitch.token").strip,
77
- "client_id" => File.read("client.id").strip,
78
- } )
79
- json = JSON.parse NetHTTPUtils.request_data uri.to_s
80
- unless json[data_key]
81
- pp json
82
- fail
83
- end
84
- acc = acc + json[data_key]
85
- next acc.take max if max && max <= acc.size
86
- request[json[next_key]["next"], acc]
87
- end
88
-
89
- request[url, []]
90
- end
91
- # ? absent on twitch ? "Call of Duty: Modern Warfare 2" => "codmw2"
92
- # t = twitch["https://api.twitch.tv/kraken/games/top?limit=100"].map{ |hash| fail hash.keys.to_s unless hash.keys == %w{ game viewers channels }; hash.values.first["name"] }
93
- # pp t.grep("/call of duty/i")
94
- # pp t.grep("/warfare/i")
95
- # ? absent in css ? "Call of Duty: United Offensive"
96
- {
97
- "Call of Duty: Infinite Warfare" => "codiw",
98
- "Call of Duty: Modern Warfare Remastered" => "cod4",
99
- "Call of Duty 4: Modern Warfare" => "cod4",
100
- "Call of Duty: Modern Warfare 3" => "codmw3",
101
- "Call of Duty: Black Ops" => "codbo12",
102
- "Call of Duty: Black Ops II" => "codbo2",
103
- "Call of Duty: Black Ops III" => "codbo3",
104
- "Call of Duty: Advanced Warfare" => "codaw",
105
- "Call of Duty: Ghosts" => "codghosts2",
106
- "Call of Duty: World at War" => "codwaw",
107
- "Call of Duty: WWII" => "codwwii",
108
- "Modern Warfare 2" => "codmw2",
109
- }.each do |game, css|
110
- (begin
111
- require "cgi"
112
- begin
113
- t = NetHTTPUtils.get_response "https://api.twitch.tv/kraken/streams?game=#{CGI::escape game}&access_token=#{File.read("twitch.token").strip}&client_id=#{File.read("client.id").strip}&channel=#{File.read("channels.txt").split.join ?,}"
114
- end while t.code == 500
115
- JSON.parse t.body
116
- rescue JSON::ParserError
117
- puts "JSON::ParserError"
118
- sleep 60
119
- retry
120
- end["streams"] || []).each do |channel|
121
- list << "* [](#twitch) [](#live) #{
122
- "[](##{css}) "
123
- }[**#{
124
- channel["channel"]["display_name"]
125
- }**](#{
126
- channel["channel"]["url"]
127
- })"
128
- end
129
- end
130
-
131
- end.join(" \n") + "\n"
132
-
133
- settings = BOT.json(:get, "/r/#{SUBREDDIT}/about/edit")["data"]
134
- # https://github.com/praw-dev/praw/blob/c45e5f6ca0c5cd9968b51301989eb82740f8dc85/praw/__init__.py#L1592
135
- settings.store "sr", settings.delete("subreddit_id")
136
- settings.store "lang", settings.delete("language")
137
- settings.store "link_type", settings.delete("content_options")
138
- settings.store "type", settings.delete("subreddit_type")
139
- settings.store "header-title", settings.delete("header_hover_text") || ""
140
- settings["domain"] ||= ""
141
- settings["submit_link_label"] ||= ""
142
- settings["submit_text_label"] ||= ""
143
- settings["allow_top"] = settings["allow_top"]
144
- settings.delete "default_set"
145
-
146
- prefix, postfix = CGI.unescapeHTML(settings["description"]).split(/(?<=\n#####)\s*Live Streams.+?(?=\n#+)/im)
147
- unless postfix
148
- puts "!!! can't parse sidebar !!!"
149
- throw :loop
150
- end
151
- next puts "nothing to change" if prefix + text + postfix == CGI.unescapeHTML(settings["description"])
152
-
153
- puts "updating sidebar..."
154
- settings["description"] = prefix + text + postfix
155
- _ = BOT.json :post, "/api/site_admin", settings.to_a
156
- fail _.inspect if _ != {"json"=>{"errors"=>[]}} && !(_["json"]["errors"].map(&:first) - ["BAD_CAPTCHA"]).empty?
157
-
158
- end
159
- sleep 300
160
- end
@@ -1,148 +0,0 @@
1
- # TODO deprecate in favor of the gem nethttputils
2
-
3
- require "net/http"
4
- require "openssl"
5
-
6
- require "logger"
7
-
8
-
9
- module NetHTTPUtils
10
- class << self
11
-
12
- attr_accessor :logger
13
-
14
- # private?
15
- def get_response url, mtd = :get, form: {}, header: [], auth: nil, timeout: 30, patch_request: nil, &block
16
- # form = Hash[form.map{ |k, v| [k.to_s, v] }]
17
- uri = URI.parse url
18
- cookies = {}
19
- prepare_request = lambda do |uri|
20
- case mtd
21
- when :get ; Net::HTTP::Get
22
- when :post ; Net::HTTP::Post
23
- when :put ; Net::HTTP::Put
24
- when :delete ; Net::HTTP::Delete
25
- else ; raise "unknown method #{mtd}"
26
- end.new(uri).tap do |request| # somehow Get eats even raw url, not URI object
27
- patch_request.call uri, form, request if patch_request
28
- request.basic_auth *auth if auth
29
- header.each{ |k, v| request[k] = v }
30
- request["cookie"] = [*request["cookie"], cookies.map{ |k, v| "#{k}=#{v}" }].join "; " unless cookies.empty?
31
- request.set_form_data form unless form.empty?
32
- stack = caller.reverse.map do |level|
33
- /((?:[^\/:]+\/)?[^\/:]+):([^:]+)/.match(level).captures
34
- end.chunk(&:first).map do |file, group|
35
- "#{file}:#{group.map(&:last).chunk{|_|_}.map(&:first).join(",")}"
36
- end
37
- logger.info request.path
38
- logger.debug request.each_header.to_a.to_s
39
- logger.debug stack.join " -> "
40
- logger.debug request
41
- end
42
- end
43
- request = prepare_request[uri]
44
- start_http = lambda do |uri|
45
- begin
46
- Net::HTTP.start(
47
- uri.host, uri.port,
48
- use_ssl: uri.scheme == "https",
49
- verify_mode: OpenSSL::SSL::VERIFY_NONE,
50
- # read_timeout: 5,
51
- ).tap do |http|
52
- http.read_timeout = timeout #if timeout
53
- http.open_timeout = timeout #if timeout
54
- http.set_debug_output STDERR if logger.level == Logger::DEBUG # use `logger.debug?`?
55
- end
56
- rescue Errno::ECONNREFUSED => e
57
- e.message.concat " to #{uri}" # puts "#{e} to #{uri}"
58
- raise e
59
- rescue Errno::EHOSTUNREACH, Errno::ENETUNREACH, Errno::ECONNRESET, SocketError, OpenSSL::SSL::SSLError => e
60
- logger.warn "retrying in 5 seconds because of #{e.class}"
61
- sleep 5
62
- retry
63
- rescue Errno::ETIMEDOUT
64
- logger.warn "ETIMEDOUT, retrying in 5 minutes"
65
- sleep 300
66
- retry
67
- end
68
- end
69
- http = start_http[uri]
70
- do_request = lambda do |request|
71
- response = begin
72
- http.request request, &block
73
- rescue Errno::ECONNRESET, Errno::ECONNREFUSED, Net::ReadTimeout, Net::OpenTimeout, Zlib::BufError, OpenSSL::SSL::SSLError => e
74
- logger.error "retrying in 30 seconds because of #{e.class} at: #{request.uri}"
75
- sleep 30
76
- retry
77
- end
78
- response.to_hash.fetch("set-cookie", []).each{ |c| k, v = c.split(?=); cookies[k] = v[/[^;]+/] }
79
- case response.code
80
- when /\A3\d\d$/
81
- logger.info "redirect: #{response["location"]}"
82
- new_uri = URI.join(request.uri, response["location"])
83
- new_host = new_uri.host
84
- if http.address != new_host ||
85
- http.port != new_uri.port ||
86
- http.use_ssl? != (new_uri.scheme == "https")
87
- logger.debug "changing host from '#{http.address}' to '#{new_host}'"
88
- http = start_http[new_uri]
89
- end
90
- do_request.call prepare_request[new_uri]
91
- when "404"
92
- logger.error "404 at #{request.method} #{request.uri} with body: #{
93
- response.body.is_a?(Net::ReadAdapter) ? "impossible to reread Net::ReadAdapter -- check the IO you've used in block form" : response.body.tap do |body|
94
- body.replace body.strip.gsub(/<[^>]*>/, "") if body["<html>"]
95
- end.inspect
96
- }"
97
- response
98
- when /\A50\d$/
99
- logger.error "#{response.code} at #{request.method} #{request.uri} with body: #{response.body.inspect}"
100
- response
101
- else
102
- logger.info "code #{response.code} at #{request.method} #{request.uri}#{
103
- " and so #{url}" if request.uri.to_s != url
104
- } from #{
105
- [__FILE__, caller.map{ |i| i[/\d+/] }].join ?:
106
- } with body: #{
107
- response.body.tap do |body|
108
- body.replace body.strip.gsub(/<[^>]*>/, "") if body["<html>"]
109
- end.inspect
110
- }" unless response.code.start_with? "20"
111
- response
112
- end
113
- end
114
- do_request[request].tap do |response|
115
- cookies.each{ |k, v| response.add_field "Set-Cookie", "#{k}=#{v};" }
116
- logger.debug response.to_hash
117
- end
118
- end
119
-
120
- def request_data *args
121
- response = get_response *args
122
- throw :"404" if "404" == response.code
123
- throw :"500" if "500" == response.code
124
- response.body
125
- end
126
-
127
- end
128
- self.logger = Logger.new STDOUT
129
- self.logger.level = ENV["LOGLEVEL_#{name}"] ? Logger.const_get(ENV["LOGLEVEL_#{name}"]) : Logger::WARN
130
- self.logger.formatter = lambda do |severity, datetime, progname, msg|
131
- "#{severity.to_s[0]} #{datetime.strftime "%y%m%d %H%M%S"} : #{name} : #{msg}\n"
132
- end
133
- end
134
-
135
-
136
- if $0 == __FILE__
137
- print "self testing... "
138
-
139
- fail unless NetHTTPUtils.request_data("http://httpstat.us/200") == "200 OK"
140
- fail unless NetHTTPUtils.get_response("http://httpstat.us/404").body == "404 Not Found"
141
- catch(:"404"){ fail NetHTTPUtils.request_data "http://httpstat.us/404" }
142
- # TODO raise?
143
- fail unless NetHTTPUtils.request_data("http://httpstat.us/400") == "400 Bad Request"
144
- fail unless NetHTTPUtils.get_response("http://httpstat.us/500").body == "500 Internal Server Error"
145
- catch(:"500"){ fail NetHTTPUtils.request_data "http://httpstat.us/500" }
146
-
147
- puts "OK #{__FILE__}"
148
- end
@@ -1,5 +0,0 @@
1
- source "https://rubygems.org"
2
-
3
- gem "reddit_bot", "~>1.1.5"
4
- gem "fastimage", "1.7.0"
5
- gem "net_http_utils", ?0, gist: "97549ceb58d21e1fcbc0e6cdaf92fce8"
@@ -1,26 +0,0 @@
1
- GIT
2
- remote: https://gist.github.com/97549ceb58d21e1fcbc0e6cdaf92fce8.git
3
- revision: 54c47ab3f9665f3e35a8e94fdd267e3743c736a9
4
- specs:
5
- net_http_utils (0)
6
-
7
- GEM
8
- remote: https://rubygems.org/
9
- specs:
10
- addressable (2.4.0)
11
- fastimage (1.7.0)
12
- addressable (~> 2.3, >= 2.3.5)
13
- json (2.0.2)
14
- reddit_bot (1.1.8)
15
- json
16
-
17
- PLATFORMS
18
- ruby
19
-
20
- DEPENDENCIES
21
- fastimage (= 1.7.0)
22
- net_http_utils (= 0)!
23
- reddit_bot (~> 1.1.5)
24
-
25
- BUNDLED WITH
26
- 1.13.1
@@ -1,43 +0,0 @@
1
- require_relative "../boilerplate"
2
-
3
- BOT = RedditBot::Bot.new YAML.load(File.read "secrets.yaml"), ignore_captcha: true
4
- SUBREDDIT = "oneplus"
5
-
6
- if Gem::Platform.local.os == "darwin"
7
- require_relative "../../../../dimensioner/get_dimensions"
8
- else
9
- require_relative "#{Dir.home}/get_dimensions"
10
- end
11
-
12
- checked = []
13
- loop do
14
- Hearthbeat.beat "u_oneplus_mod_r_oneplus", 310 unless Gem::Platform.local.os == "darwin"
15
- puts "LOOP #{Time.now}"
16
-
17
- BOT.json(:get, "/r/#{SUBREDDIT}/new")["data"]["children"].each do |post|
18
- id, url, title, subreddit = post["data"].values_at(*%w{ id url title subreddit })
19
- next if checked.include? id
20
- checked.push id
21
- redd_it = "https://redd.it/#{id}"
22
- next puts "skipped #{url} from #{redd_it}" if :skipped == _ = GetDimensions::get_dimensions(url)
23
- next puts "unable #{url} from #{redd_it}" unless _
24
- width, height, * = _
25
- result = ([1080, 1920] != resolution = [width, height])
26
- puts "#{result} #{id} [#{resolution}] #{title} #{url}"
27
- next if result
28
-
29
- ### delete
30
- BOT.json :post, "/api/remove",
31
- id: "t3_#{id}",
32
- spam: false
33
- ### modmail
34
- BOT.json :post, "/api/compose",
35
- subject: "possible screenshot detected",
36
- text: "please, investigate: #{redd_it}",
37
- to: "/r/#{SUBREDDIT}"
38
-
39
- end or puts "/r/#{SUBREDDIT} seems to be 403-ed"
40
-
41
- puts "END LOOP #{Time.now}"
42
- sleep 300
43
- end
@@ -1 +0,0 @@
1
- echo "TEST=_ bundle exec ruby main.rb"
@@ -1,3 +0,0 @@
1
- source "https://rubygems.org"
2
-
3
- gem "reddit_bot"
@@ -1,17 +0,0 @@
1
- GEM
2
- remote: https://rubygems.org/
3
- specs:
4
- json (2.2.0)
5
- nethttputils (0.3.2.6)
6
- reddit_bot (1.7.1)
7
- json
8
- nethttputils (~> 0.3.2.0)
9
-
10
- PLATFORMS
11
- ruby
12
-
13
- DEPENDENCIES
14
- reddit_bot
15
-
16
- BUNDLED WITH
17
- 2.0.1
@@ -1,129 +0,0 @@
1
- require_relative "../boilerplate"
2
-
3
- require "nethttputils"
4
- TWITTER_ACCESS_TOKEN = JSON.load(
5
- NetHTTPUtils.request_data "https://api.twitter.com/oauth2/token", :post,
6
- auth: File.read("twitter.token").split,
7
- form: {grant_type: :client_credentials}
8
- )["access_token"]
9
-
10
- SUBREDDIT = "RealTimeWW2"
11
- BOT = RedditBot::Bot.new YAML.load(File.read "secrets.yaml"), subreddit: SUBREDDIT
12
- TWITTER = "RealTimeWWII"
13
-
14
- tweet2titleNtext = lambda do |tweet|
15
- pp tweet if ENV["TEST"]
16
- text = ""
17
- contains_media = false
18
- up = ->s{ s.split.map{ |w| "^#{w}" }.join " " }
19
- if tweet["extended_entities"] && !tweet["extended_entities"]["media"].empty?
20
- contains_media = true
21
- tweet["extended_entities"]["media"].each_with_index do |media, i|
22
- text.concat "* [Image #{i + 1}](#{media["media_url_https"]})\n\n"
23
- end
24
- end
25
- if !tweet["entities"]["urls"].empty?
26
- contains_media = true
27
- tweet["entities"]["urls"].each_with_index do |url, i|
28
- text.concat "* [Link #{i + 1}](#{url["expanded_url"]})\n\n"
29
- end
30
- end
31
- text.concat "^- #{
32
- up[tweet["user"]["name"]]
33
- } [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [#{
34
- up[Date.parse(tweet["created_at"]).strftime "%B %-d, %Y"]
35
- }](https://twitter.com/#{TWITTER}/status/#{tweet["id"]})"
36
- require "cgi"
37
- [CGI::unescapeHTML(tweet["full_text"]).sub(/( https:\/\/t\.co\/[0-9a-zA-Z]{10})*\z/, ""), text, contains_media]
38
- end
39
- [
40
- [905764294687633408, true, "The Polish government & military high command is now evacuating Warsaw for Brest, 120 miles east: German armies are too close to the capital", "* [Image 1](https://pbs.twimg.com/media/DJHq71BXYAA6KJ0.jpg)\n\n" "^- ^WW2 ^Tweets ^from ^1942 [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^""September ^7, ^2017](https://twitter.com/#{TWITTER}/status/905764294687633408)"],
41
- [915534673471733760, true, "In east Poland (now Soviet Ukraine) industry & farms to be collectivised, political parties banned, aristocrats & capitalists \"re-educated\".", "* [Image 1](https://pbs.twimg.com/media/DLSh2J9W4AACcOG.jpg)\n\n* [Image 2](https://pbs.twimg.com/media/DLSh4sKX0AEBaXq.jpg)\n\n^- ^WW2 ^Tweets ^from ^1942 [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^" "October ^4, ^2017](https://twitter.com/#{TWITTER}/status/915534673471733760)"],
42
- [915208866408824832, true, "For 1st time, RAF planes dropping propaganda leaflets on Berlin itself, entitled \"Germans: these are your leaders!\"", "* [Image 1](https://pbs.twimg.com/media/DLN5jJ-XkAEUz9M.jpg)\n\n* [Link 1](https://www.psywar.org/product_1939EH158.php)\n\n" "^- ^WW2 ^Tweets ^from ^1942 [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^" "October ^3, ^2017](https://twitter.com/#{TWITTER}/status/915208866408824832)"],
43
- [914577848891006978, true, "\"In Poland, Russia pursued a cold policy of selfinterest. But clearly necessary for Russia… against Nazi menace.\"", "* [Link 1](https://www.youtube.com/watch?v=ygmP5A3n2JA)\n\n" "^- ^WW2 ^Tweets ^from ^1942 [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^" "October ^1, ^2017](https://twitter.com/#{TWITTER}/status/914577848891006978)"],
44
- [926581977372942336, false, "Finland rejects Soviet demand to surrender land near Leningrad & give Red Navy base in Hanko; Soviets now claim Finns' manner \"warlike\".", "^- ^WW2 ^Tweets ^from ^1942 [^\\(@#{TWITTER}\\)](https://twitter.com/#{TWITTER}) ^| [^" "November ^3, ^2017](https://twitter.com/#{TWITTER}/status/926581977372942336)"],
45
- ].each do |id, contains_media_, title_, text_|
46
- title, text, contains_media = tweet2titleNtext[ JSON.load NetHTTPUtils.request_data(
47
- "https://api.twitter.com/1.1/statuses/show.json",
48
- form: { id: id, tweet_mode: "extended" },
49
- header: { Authorization: "Bearer #{TWITTER_ACCESS_TOKEN}" },
50
- ) ]
51
- unless contains_media_ == contains_media
52
- puts "expected: #{contains_media_}"
53
- puts "got: #{contains_media}"
54
- abort "CONTAINS_MEDIA ERROR"
55
- end
56
- unless title_ == title
57
- puts "expected:\n#{title_.inspect}"
58
- puts "got:\n#{title.inspect}"
59
- abort "TITLE FORMATTING ERROR"
60
- end
61
- unless text_ == text
62
- puts "expected:\n#{text_.inspect}"
63
- puts "got:\n#{text.inspect}"
64
- abort "TEXT FORMATTING ERROR"
65
- end
66
- if ENV["TEST_POST"]
67
- pp BOT.json :post, "/api/submit", {
68
- sr: "RealTimeWW2_TEST",
69
- kind: "self",
70
- title: title,
71
- text: text,
72
- }.tap{ |h| h.merge!({ flair_id: BOT.json(:get, "/r/RealTimeWW2_TEST/api/link_flair").find{ |flair|
73
- flair["text"] == "Contains Media"
74
- }["id"] }) if contains_media }
75
- end
76
- end
77
- abort "OK" if ENV["TEST"]
78
-
79
- loop do
80
- id = BOT.new_posts.find do |post|
81
- /\(https:\/\/twitter\.com\/#{TWITTER}\/status\/(\d{18,})\)/i =~ post["selftext"] and break $1
82
- end.to_i
83
- fail "no tweets found in subreddit" if id.zero? unless %w{ RealTimeWW2_TEST }.include? SUBREDDIT
84
-
85
- fail unless flair = BOT.json(:get, "/r/#{SUBREDDIT}/api/link_flair").find do |flair|
86
- flair["text"] == "Contains Media"
87
- end
88
-
89
- timeout = 1
90
- JSON.load( begin
91
- NetHTTPUtils.request_data(
92
- "https://api.twitter.com/1.1/statuses/user_timeline.json",
93
- form: { screen_name: TWITTER, count: 200, tweet_mode: "extended" },
94
- header: { Authorization: "Bearer #{TWITTER_ACCESS_TOKEN}" }
95
- ) do |res|
96
- next unless res.key? "x-rate-limit-remaining"
97
- remaining = res.fetch("x-rate-limit-remaining").to_i
98
- next if 100 < remaining
99
- t = (res.fetch("x-rate-limit-reset").to_i - Time.now.to_i + 1).fdiv remaining
100
- puts "sleep #{t}"
101
- sleep t
102
- end
103
- rescue NetHTTPUtils::Error => e
104
- fail unless [500, 503].include? e.code
105
- sleep timeout
106
- timeout *= 2
107
- retry
108
- end ).reverse_each do |tweet|
109
- next if tweet["id"] <= id
110
- # next unless tweet["id"] == 905724018996772865 # two media files
111
- title, text, contains_media = tweet2titleNtext[tweet]
112
- result = BOT.json :post, "/api/submit", {
113
- sr: SUBREDDIT,
114
- kind: "self",
115
- title: title,
116
- text: text,
117
- }.tap{ |h| h.merge!({ flair_id: flair["id"] }) if contains_media }
118
- pp result
119
- if result["json"]["errors"].empty?
120
- abort if ENV["ONCE"]
121
- next
122
- end
123
- fail unless result["json"]["errors"].map(&:first) == ["ALREADY_SUB"]
124
- puts "ALREADY_SUB error for #{tweet["id"]}"
125
- end
126
-
127
- puts "END LOOP #{Time.now}"
128
- sleep 300
129
- end
@@ -1,3 +0,0 @@
1
- source "https://rubygems.org"
2
-
3
- gem "reddit_bot", "~>1.3.0"
@@ -1,15 +0,0 @@
1
- GEM
2
- remote: https://rubygems.org/
3
- specs:
4
- json (2.1.0)
5
- reddit_bot (1.3.1)
6
- json
7
-
8
- PLATFORMS
9
- ruby
10
-
11
- DEPENDENCIES
12
- reddit_bot (~> 1.3.0)
13
-
14
- BUNDLED WITH
15
- 1.17.1
@@ -1,33 +0,0 @@
1
- require_relative "../boilerplate"
2
-
3
- BOT = RedditBot::Bot.new YAML.load(File.read "secrets.yaml"), ignore_captcha: true
4
- SUBREDDIT = "sexypizza"
5
-
6
- loop do
7
- puts "LOOP #{Time.now}"
8
-
9
- flairs = BOT.json(:get, "/r/#{SUBREDDIT}/api/flairlist", {limit: 1000})["users"]
10
-
11
- text = \
12
- "**Vote with your flair!**\n\n" +
13
- "Type of pizza | Number of lovers\n" +
14
- "--------------|-----------------\n" +
15
- flairs.
16
- group_by{ |flair| flair["flair_text"] }.
17
- sort_by{ |_, group| -group.size }.
18
- map{ |flair, group| "#{flair} | #{group.size}" }.
19
- join("\n")
20
-
21
- if text != BOT.json(:get, "/r/#{SUBREDDIT}/wiki/toppings")["data"]["content_md"]
22
- puts "editing wiki page '/r/#{SUBREDDIT}/wiki/toppings'"
23
- pp text
24
- p BOT.json :post,
25
- "/r/#{SUBREDDIT}/api/wiki/edit",
26
- page: "toppings",
27
- content: text
28
- else
29
- puts "nothing to change"
30
- end
31
-
32
- sleep 3600
33
- end
@@ -1,4 +0,0 @@
1
- source "https://rubygems.org"
2
- ruby ">=2.2"
3
-
4
- gem "reddit_bot"#, git: "git@github.com:nakilon/reddit_bot.git"
@@ -1,24 +0,0 @@
1
- GEM
2
- remote: https://rubygems.org/
3
- specs:
4
- addressable (2.7.0)
5
- public_suffix (>= 2.0.2, < 5.0)
6
- json (2.3.1)
7
- nethttputils (0.4.0.0)
8
- addressable
9
- public_suffix (4.0.5)
10
- reddit_bot (1.7.6)
11
- json
12
- nethttputils (~> 0.4.0.0)
13
-
14
- PLATFORMS
15
- ruby
16
-
17
- DEPENDENCIES
18
- reddit_bot
19
-
20
- RUBY VERSION
21
- ruby 2.3.8p459
22
-
23
- BUNDLED WITH
24
- 2.0.2
@@ -1,41 +0,0 @@
1
- require "reddit_bot"
2
- subreddit = "unisa".freeze
3
- bot = RedditBot::Bot.new YAML.load(File.read "secrets.yaml"), subreddit: subreddit
4
- RedditBot::Twitter.init_twitter "unisa"
5
-
6
- loop do
7
- bot.json(:get, "/message/unread")["data"]["children"].each do |msg|
8
- next unless %w{ nakilon technomod }.include? msg["data"]["author"]
9
- abort "ordered to die" if %w{ die die } == msg["data"].values_at("subject", "body")
10
- end
11
-
12
- id = bot.new_posts.flat_map do |post|
13
- post["selftext"].scan(/\(https:\/\/twitter\.com\/#{RedditBot::Twitter::TWITTER_ACCOUNT}\/status\/(\d{18,})\)/i).flatten.map(&:to_i).max
14
- end.find(&:itself)
15
- abort "no tweets found in subreddit" if id.zero? unless ENV["FIRST_RUN"]
16
- abort "flair isn't available" unless flair = bot.json(:get, "/r/#{subreddit}/api/link_flair").find{ |flair| flair["text"] == "Twitter" }
17
-
18
- timeline = RedditBot::Twitter.user_timeline
19
- timeline.replace timeline.take 2 if ENV["FIRST_RUN"] # against 200 posts long flood
20
- timeline.reverse_each do |tweet|
21
- next if tweet["id"] <= id
22
- title, text, _ = RedditBot::Twitter.tweet2titleNtext tweet
23
- result = bot.json :post, "/api/submit", {
24
- sr: subreddit,
25
- kind: "self",
26
- title: title,
27
- text: text,
28
- flair_id: flair["id"],
29
- }
30
- p result
31
- if result["json"]["errors"].empty?
32
- abort "OK" if ENV["ONCE"]
33
- next
34
- end
35
- fail unless result["json"]["errors"].map(&:first) == ["ALREADY_SUB"]
36
- puts "ALREADY_SUB error for #{tweet["id"]}"
37
- end
38
-
39
- puts "END LOOP #{Time.now}"
40
- sleep 300
41
- end