ex_twitter 0.1.1 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 5a6285b02440ee496486c9d354b6021886a2fd96
4
- data.tar.gz: 0e1face89fa2dcdfe61f66f225ce53860e0bf527
3
+ metadata.gz: fae8a4c6b259764373fb52ca806eb66f8c38a472
4
+ data.tar.gz: b2c3d986eb8ed42504f00ae2841b8dab8e302436
5
5
  SHA512:
6
- metadata.gz: 1eb8f2e80cbfd46564f03af107ca23b7c3eea9aba4fe4b673ab346f87bd323dc0459453437df59bd89f4f65307c36c53c5660c9195da46b0d062407460e446d9
7
- data.tar.gz: 125f18acc4477cc0b8a1013c276004b1b87a75f88bd52fb6d5f28a1194fe0519a3c7c7bd9de01c193c8b1802bca5fc656e1e6fa0b222893032c9cc3a8c64142e
6
+ metadata.gz: b7c77240b80287e4d73935813b8185658eeac95b7ccaa93540a72ac01ec3b2cac751c12098cda953ff4b2bd0fc8cbcbb8d8f87f078be941845b6982d1e02fdaf
7
+ data.tar.gz: 28a1dc726814e7e503b6d179945ef214b3b025e6cfe0650c56e54855f0f46c057d11e68a10b4e30ef2edd74aeb07c5fb4d6c6f5b69af2e0ce3fff101d19578ce
data/README.md CHANGED
@@ -50,11 +50,3 @@ end
50
50
  ```
51
51
  client.user_timeline
52
52
  ```
53
-
54
- ## Memo
55
-
56
- ```
57
- be rake build
58
- be rake release
59
- ```
60
-
data/ex_twitter.gemspec CHANGED
@@ -21,5 +21,5 @@ Gem::Specification.new do |spec|
21
21
  spec.require_paths = %w[lib]
22
22
  spec.summary = spec.description
23
23
  spec.test_files = Dir.glob('spec/**/*')
24
- spec.version = '0.1.1'
24
+ spec.version = '0.2.0'
25
25
  end
@@ -0,0 +1,194 @@
1
+ require 'active_support'
2
+ require 'active_support/cache'
3
+ require 'active_support/core_ext/string'
4
+
5
+ require 'ex_twitter/log_subscriber'
6
+ require 'ex_twitter/utils'
7
+ require 'ex_twitter/existing_api'
8
+ require 'ex_twitter/new_api'
9
+
10
+ require 'twitter'
11
+ require 'hashie'
12
+ require 'parallel'
13
+
14
+ module ExTwitter
15
+ class Client < Twitter::REST::Client
16
+ def initialize(options = {})
17
+ @cache = ActiveSupport::Cache::FileStore.new(File.join('tmp', 'api_cache'))
18
+ @call_count = 0
19
+
20
+ ExTwitter::LogSubscriber.attach_to :ex_twitter
21
+ ExTwitter::LogSubscriber.attach_to :active_support
22
+ @@logger = @logger =
23
+ if options[:logger]
24
+ options.delete(:logger)
25
+ else
26
+ Dir.mkdir('log') unless File.exists?('log')
27
+ Logger.new('log/ex_twitter.log')
28
+ end
29
+
30
+ super
31
+ end
32
+
33
+ def self.logger
34
+ @@logger
35
+ end
36
+
37
+ attr_accessor :call_count
38
+ attr_reader :cache, :authenticated_user, :logger
39
+
40
+ INDENT = 4
41
+
42
+ include ExTwitter::Utils
43
+
44
+ alias :old_friendship? :friendship?
45
+ alias :old_user? :user?
46
+ alias :old_user :user
47
+ alias :old_friend_ids :friend_ids
48
+ alias :old_follower_ids :follower_ids
49
+ alias :old_friends :friends
50
+ alias :old_followers :followers
51
+ alias :old_users :users
52
+ alias :old_home_timeline :home_timeline
53
+ alias :old_user_timeline :user_timeline
54
+ alias :old_mentions_timeline :mentions_timeline
55
+ alias :old_favorites :favorites
56
+ alias :old_search :search
57
+
58
+ include ExTwitter::ExistingApi
59
+ include ExTwitter::NewApi
60
+
61
+ def usage_stats_wday_series_data(times)
62
+ wday_count = times.each_with_object((0..6).map { |n| [n, 0] }.to_h) do |time, memo|
63
+ memo[time.wday] += 1
64
+ end
65
+ wday_count.map { |k, v| [I18n.t('date.abbr_day_names')[k], v] }.map do |key, value|
66
+ {name: key, y: value, drilldown: key}
67
+ end
68
+ end
69
+
70
+ def usage_stats_wday_drilldown_series(times)
71
+ hour_count =
72
+ (0..6).each_with_object((0..6).map { |n| [n, nil] }.to_h) do |wday, wday_memo|
73
+ wday_memo[wday] =
74
+ times.select { |t| t.wday == wday }.map { |t| t.hour }.each_with_object((0..23).map { |n| [n, 0] }.to_h) do |hour, hour_memo|
75
+ hour_memo[hour] += 1
76
+ end
77
+ end
78
+ hour_count.map { |k, v| [I18n.t('date.abbr_day_names')[k], v] }.map do |key, value|
79
+ {name: key, id: key, data: value.to_a.map{|a| [a[0].to_s, a[1]] }}
80
+ end
81
+ end
82
+
83
+ def usage_stats_hour_series_data(times)
84
+ hour_count = times.each_with_object((0..23).map { |n| [n, 0] }.to_h) do |time, memo|
85
+ memo[time.hour] += 1
86
+ end
87
+ hour_count.map do |key, value|
88
+ {name: key.to_s, y: value, drilldown: key.to_s}
89
+ end
90
+ end
91
+
92
+ def usage_stats_hour_drilldown_series(times)
93
+ wday_count =
94
+ (0..23).each_with_object((0..23).map { |n| [n, nil] }.to_h) do |hour, hour_memo|
95
+ hour_memo[hour] =
96
+ times.select { |t| t.hour == hour }.map { |t| t.wday }.each_with_object((0..6).map { |n| [n, 0] }.to_h) do |wday, wday_memo|
97
+ wday_memo[wday] += 1
98
+ end
99
+ end
100
+ wday_count.map do |key, value|
101
+ {name: key.to_s, id: key.to_s, data: value.to_a.map{|a| [I18n.t('date.abbr_day_names')[a[0]], a[1]] }}
102
+ end
103
+ end
104
+
105
+ def twitter_addiction_series(times)
106
+ five_mins = 5.minutes
107
+ wday_expended_seconds =
108
+ (0..6).each_with_object((0..6).map { |n| [n, nil] }.to_h) do |wday, wday_memo|
109
+ target_times = times.select { |t| t.wday == wday }
110
+ wday_memo[wday] = target_times.empty? ? nil : target_times.each_cons(2).map {|a, b| (a - b) < five_mins ? a - b : five_mins }.sum
111
+ end
112
+ days = times.map{|t| t.to_date.to_s(:long) }.uniq.size
113
+ weeks = (days > 7) ? days / 7.0 : 1.0
114
+ wday_expended_seconds.map { |k, v| [I18n.t('date.abbr_day_names')[k], (v.nil? ? nil : v / weeks / 60)] }.map do |key, value|
115
+ {name: key, y: value}
116
+ end
117
+ end
118
+
119
+ def usage_stats(user, options = {})
120
+ n_days_ago = options.has_key?(:days) ? options[:days].days.ago : 100.years.ago
121
+ tweets = options.has_key?(:tweets) ? options.delete(:tweets) : user_timeline(user)
122
+ times =
123
+ # TODO Use user specific time zone
124
+ tweets.map { |t| ActiveSupport::TimeZone['Tokyo'].parse(t.created_at.to_s) }.
125
+ select { |t| t > n_days_ago }
126
+ [
127
+ usage_stats_wday_series_data(times),
128
+ usage_stats_wday_drilldown_series(times),
129
+ usage_stats_hour_series_data(times),
130
+ usage_stats_hour_drilldown_series(times),
131
+ twitter_addiction_series(times)
132
+ ]
133
+ end
134
+
135
+
136
+ def calc_scores_from_users(users, options)
137
+ min = options.has_key?(:min) ? options[:min] : 0
138
+ max = options.has_key?(:max) ? options[:max] : 1000
139
+ users.each_with_object(Hash.new(0)) { |u, memo| memo[u.id] += 1 }.
140
+ select { |_k, v| min <= v && v <= max }.
141
+ sort_by { |_, v| -v }.to_h
142
+ end
143
+
144
+ def calc_scores_from_tweets(tweets, options = {})
145
+ calc_scores_from_users(tweets.map { |t| t.user }, options)
146
+ end
147
+
148
+ def select_favoriting_from_favs(favs, options = {})
149
+ return [] if favs.empty?
150
+ uids = calc_scores_from_tweets(favs)
151
+ result = uids.map { |uid, score| f = favs.
152
+ find { |f| f.user.id.to_i == uid.to_i }; Array.new(score, f) }.flatten.map { |f| f.user }
153
+ (options.has_key?(:uniq) && !options[:uniq]) ? result : result.uniq { |u| u.id }
154
+ end
155
+
156
+ def favoriting(user, options= {})
157
+ favs = options.has_key?(:favorites) ? options.delete(:favorites) : favorites(user, options)
158
+ select_favoriting_from_favs(favs, options)
159
+ rescue => e
160
+ logger.warn "#{__method__} #{user.inspect} #{e.class} #{e.message}"
161
+ raise e
162
+ end
163
+
164
+ def favorited_by(user)
165
+ end
166
+
167
+ def close_friends(_uid, options = {})
168
+ min = options.has_key?(:min) ? options[:min] : 0
169
+ max = options.has_key?(:max) ? options[:max] : 1000
170
+ uid_i = _uid.to_i
171
+ _replying = options.has_key?(:replying) ? options.delete(:replying) : replying(uid_i, options)
172
+ _replied = options.has_key?(:replied) ? options.delete(:replied) : replied(uid_i, options)
173
+ _favoriting = options.has_key?(:favoriting) ? options.delete(:favoriting) : favoriting(uid_i, options)
174
+
175
+ min_max = {min: min, max: max}
176
+ _users = _replying + _replied + _favoriting
177
+ return [] if _users.empty?
178
+
179
+ scores = calc_scores_from_users(_users, min_max)
180
+ replying_scores = calc_scores_from_users(_replying, min_max)
181
+ replied_scores = calc_scores_from_users(_replied, min_max)
182
+ favoriting_scores = calc_scores_from_users(_favoriting, min_max)
183
+
184
+ scores.keys.map { |uid| _users.find { |u| u.id.to_i == uid.to_i } }.
185
+ map do |u|
186
+ u[:score] = scores[u.id]
187
+ u[:replying_score] = replying_scores[u.id]
188
+ u[:replied_score] = replied_scores[u.id]
189
+ u[:favoriting_score] = favoriting_scores[u.id]
190
+ u
191
+ end
192
+ end
193
+ end
194
+ end
@@ -0,0 +1,122 @@
1
+ module ExTwitter
2
+ module ExistingApi
3
+ def friendship?(*args)
4
+ options = args.extract_options!
5
+ fetch_cache_or_call_api(__method__, args) {
6
+ call_old_method("old_#{__method__}", *args, options)
7
+ }
8
+ end
9
+
10
+ def user?(*args)
11
+ options = args.extract_options!
12
+ args[0] = verify_credentials(skip_status: true).id if args.empty?
13
+ fetch_cache_or_call_api(__method__, args[0], options) {
14
+ call_old_method("old_#{__method__}", args[0], options)
15
+ }
16
+ end
17
+
18
+ def user(*args)
19
+ options = args.extract_options!
20
+ args[0] = verify_credentials(skip_status: true).id if args.empty?
21
+ fetch_cache_or_call_api(__method__, args[0], options) {
22
+ call_old_method("old_#{__method__}", args[0], options)
23
+ }
24
+ end
25
+
26
+ def friend_ids(*args)
27
+ options = {count: 5000, cursor: -1}.merge(args.extract_options!)
28
+ args[0] = verify_credentials(skip_status: true).id if args.empty?
29
+ fetch_cache_or_call_api(__method__, args[0], options) {
30
+ collect_with_cursor("old_#{__method__}", *args, options)
31
+ }
32
+ end
33
+
34
+ def follower_ids(*args)
35
+ options = {count: 5000, cursor: -1}.merge(args.extract_options!)
36
+ args[0] = verify_credentials(skip_status: true).id if args.empty?
37
+ fetch_cache_or_call_api(__method__, args[0], options) {
38
+ collect_with_cursor("old_#{__method__}", *args, options)
39
+ }
40
+ end
41
+
42
+ # specify reduce: false to use tweet for inactive_*
43
+ def friends(*args)
44
+ options = {count: 200, include_user_entities: true, cursor: -1}.merge(args.extract_options!)
45
+ options[:reduce] = false unless options.has_key?(:reduce)
46
+ args[0] = verify_credentials(skip_status: true).id if args.empty?
47
+ fetch_cache_or_call_api(__method__, args[0], options) {
48
+ collect_with_cursor("old_#{__method__}", *args, options)
49
+ }
50
+ end
51
+
52
+ # specify reduce: false to use tweet for inactive_*
53
+ def followers(*args)
54
+ options = {count: 200, include_user_entities: true, cursor: -1}.merge(args.extract_options!)
55
+ options[:reduce] = false unless options.has_key?(:reduce)
56
+ args[0] = verify_credentials(skip_status: true).id if args.empty?
57
+ fetch_cache_or_call_api(__method__, args[0], options) {
58
+ collect_with_cursor("old_#{__method__}", *args, options)
59
+ }
60
+ end
61
+
62
+ # use compact, not use sort and uniq
63
+ # specify reduce: false to use tweet for inactive_*
64
+ def users(*args)
65
+ options = args.extract_options!
66
+ options[:reduce] = false
67
+ users_per_workers = args.first.compact.each_slice(100).to_a
68
+ processed_users = []
69
+
70
+ Parallel.each_with_index(users_per_workers, in_threads: [users_per_workers.size, 10].min) do |users_per_worker, i|
71
+ _users = fetch_cache_or_call_api(__method__, users_per_worker, options) {
72
+ call_old_method("old_#{__method__}", users_per_worker, options)
73
+ }
74
+
75
+ processed_users << {i: i, users: _users}
76
+ end
77
+
78
+ processed_users.sort_by{|p| p[:i] }.map{|p| p[:users] }.flatten.compact
79
+ rescue => e # debug
80
+ logger.warn "#{__method__}: #{args.inspect} #{e.class} #{e.message}"
81
+ raise e
82
+ end
83
+
84
+ def home_timeline(*args)
85
+ options = {count: 200, include_rts: true, call_limit: 3}.merge(args.extract_options!)
86
+ fetch_cache_or_call_api(__method__, user.screen_name, options) {
87
+ collect_with_max_id("old_#{__method__}", options)
88
+ }
89
+ end
90
+
91
+ def user_timeline(*args)
92
+ options = {count: 200, include_rts: true, call_limit: 3}.merge(args.extract_options!)
93
+ args[0] = verify_credentials(skip_status: true).id if args.empty?
94
+ fetch_cache_or_call_api(__method__, args[0], options) {
95
+ collect_with_max_id("old_#{__method__}", *args, options)
96
+ }
97
+ end
98
+
99
+ def mentions_timeline(*args)
100
+ options = {count: 200, include_rts: true, call_limit: 1}.merge(args.extract_options!)
101
+ fetch_cache_or_call_api(__method__, user.screen_name, options) {
102
+ collect_with_max_id("old_#{__method__}", options)
103
+ }
104
+ end
105
+
106
+ def favorites(*args)
107
+ options = {count: 100, call_count: 1}.merge(args.extract_options!)
108
+ args[0] = verify_credentials(skip_status: true).id if args.empty?
109
+ fetch_cache_or_call_api(__method__, args[0], options) {
110
+ collect_with_max_id("old_#{__method__}", *args, options)
111
+ }
112
+ end
113
+
114
+ def search(*args)
115
+ options = {count: 100, result_type: :recent, call_limit: 1}.merge(args.extract_options!)
116
+ options[:reduce] = false
117
+ fetch_cache_or_call_api(__method__, args[0], options) {
118
+ collect_with_max_id("old_#{__method__}", *args, options) { |response| response.attrs[:statuses] }
119
+ }
120
+ end
121
+ end
122
+ end
@@ -0,0 +1,79 @@
1
+ require 'active_support'
2
+ require 'active_support/core_ext'
3
+
4
+
5
+ module ExTwitter
6
+ class LogSubscriber < ActiveSupport::LogSubscriber
7
+
8
+ def initialize
9
+ super
10
+ @odd = false
11
+ end
12
+
13
+ def cache_any(event)
14
+ return unless logger.debug?
15
+
16
+ payload = event.payload
17
+ name = "#{payload.delete(:name)} (#{event.duration.round(1)}ms)"
18
+ name = colorize_payload_name(name, payload[:name], AS: true)
19
+ debug { "#{name}: #{(payload.inspect)}" }
20
+ end
21
+
22
+ %w(read write fetch_hit generate delete exist?).each do |operation|
23
+ class_eval <<-METHOD, __FILE__, __LINE__ + 1
24
+ def cache_#{operation}(event)
25
+ event.payload[:name] = '#{operation}'
26
+ cache_any(event)
27
+ end
28
+ METHOD
29
+ end
30
+
31
+ def call(event)
32
+ return unless logger.debug?
33
+
34
+ payload = event.payload
35
+ name = "#{payload.delete(:operation)} (#{event.duration.round(1)}ms)"
36
+
37
+ name = colorize_payload_name(name, payload[:name])
38
+ # sql = color(sql, sql_color(sql), true)
39
+
40
+ key = payload.delete(:key)
41
+ debug { "#{name}: #{key} #{(payload.inspect)}" }
42
+ end
43
+
44
+ private
45
+
46
+ def colorize_payload_name(name, payload_name, options = {})
47
+ if options[:AS]
48
+ color(name, MAGENTA, true)
49
+ else
50
+ color(name, CYAN, true)
51
+ end
52
+ end
53
+
54
+ def sql_color(sql)
55
+ case sql
56
+ when /\A\s*rollback/mi
57
+ RED
58
+ when /select .*for update/mi, /\A\s*lock/mi
59
+ WHITE
60
+ when /\A\s*select/i
61
+ BLUE
62
+ when /\A\s*insert/i
63
+ GREEN
64
+ when /\A\s*update/i
65
+ YELLOW
66
+ when /\A\s*delete/i
67
+ RED
68
+ when /transaction\s*\Z/i
69
+ CYAN
70
+ else
71
+ MAGENTA
72
+ end
73
+ end
74
+
75
+ def logger
76
+ ExTwitter::Client.logger
77
+ end
78
+ end
79
+ end
@@ -0,0 +1,238 @@
1
+ module ExTwitter
2
+ module NewApi
3
+ def friends_parallelly(*args)
4
+ options = {super_operation: __method__}.merge(args.extract_options!)
5
+ _friend_ids = friend_ids(*(args + [options]))
6
+ users(_friend_ids.map { |id| id.to_i }, options)
7
+ end
8
+
9
+ def followers_parallelly(*args)
10
+ options = {super_operation: __method__}.merge(args.extract_options!)
11
+ _follower_ids = follower_ids(*(args + [options]))
12
+ users(_follower_ids.map { |id| id.to_i }, options)
13
+ end
14
+
15
+ def _fetch_parallelly(signatures) # [{method: :friends, args: ['ts_3156', ...], {...}]
16
+ result = Array.new(signatures.size)
17
+
18
+ Parallel.each_with_index(signatures, in_threads: result.size) do |signature, i|
19
+ result[i] = send(signature[:method], *signature[:args])
20
+ end
21
+
22
+ result
23
+ end
24
+
25
+ def friends_and_followers(*args)
26
+ _fetch_parallelly(
27
+ [
28
+ {method: :friends_parallelly, args: args},
29
+ {method: :followers_parallelly, args: args}])
30
+ end
31
+
32
+ def friends_followers_and_statuses(*args)
33
+ _fetch_parallelly(
34
+ [
35
+ {method: :friends_parallelly, args: args},
36
+ {method: :followers_parallelly, args: args},
37
+ {method: :user_timeline, args: args}])
38
+ end
39
+
40
+ def one_sided_following(me)
41
+ if uid_or_screen_name?(me)
42
+ # TODO use friends_and_followers
43
+ friends_parallelly(me).to_a - followers_parallelly(me).to_a
44
+ elsif me.respond_to?(:friends) && me.respond_to?(:followers)
45
+ me.friends.to_a - me.followers.to_a
46
+ else
47
+ raise
48
+ end
49
+ end
50
+
51
+ def one_sided_followers(me)
52
+ if uid_or_screen_name?(me)
53
+ # TODO use friends_and_followers
54
+ followers_parallelly(me).to_a - friends_parallelly(me).to_a
55
+ elsif me.respond_to?(:friends) && me.respond_to?(:followers)
56
+ me.followers.to_a - me.friends.to_a
57
+ else
58
+ raise
59
+ end
60
+ end
61
+
62
+ def mutual_friends(me)
63
+ if uid_or_screen_name?(me)
64
+ # TODO use friends_and_followers
65
+ friends_parallelly(me).to_a & followers_parallelly(me).to_a
66
+ elsif me.respond_to?(:friends) && me.respond_to?(:followers)
67
+ me.friends.to_a & me.followers.to_a
68
+ else
69
+ raise
70
+ end
71
+ end
72
+
73
+ def common_friends(me, you)
74
+ if uid_or_screen_name?(me) && uid_or_screen_name?(you)
75
+ friends_parallelly(me).to_a & friends_parallelly(you).to_a
76
+ elsif me.respond_to?(:friends) && you.respond_to?(:friends)
77
+ me.friends.to_a & you.friends.to_a
78
+ else
79
+ raise
80
+ end
81
+ end
82
+
83
+ def common_followers(me, you)
84
+ if uid_or_screen_name?(me) && uid_or_screen_name?(you)
85
+ followers_parallelly(me).to_a & followers_parallelly(you).to_a
86
+ elsif me.respond_to?(:followers) && you.respond_to?(:followers)
87
+ me.followers.to_a & you.followers.to_a
88
+ else
89
+ raise
90
+ end
91
+ end
92
+
93
+ def removing(pre_me, cur_me)
94
+ if uid_or_screen_name?(pre_me) && uid_or_screen_name?(cur_me)
95
+ friends_parallelly(pre_me).to_a - friends_parallelly(cur_me).to_a
96
+ elsif pre_me.respond_to?(:friends) && cur_me.respond_to?(:friends)
97
+ pre_me.friends.to_a - cur_me.friends.to_a
98
+ else
99
+ raise
100
+ end
101
+ end
102
+
103
+ def removed(pre_me, cur_me)
104
+ if uid_or_screen_name?(pre_me) && uid_or_screen_name?(cur_me)
105
+ followers_parallelly(pre_me).to_a - followers_parallelly(cur_me).to_a
106
+ elsif pre_me.respond_to?(:followers) && cur_me.respond_to?(:followers)
107
+ pre_me.followers.to_a - cur_me.followers.to_a
108
+ else
109
+ raise
110
+ end
111
+ end
112
+
113
+ def _extract_screen_names(tweets, options = {})
114
+ result = tweets.map do |t|
115
+ $1 if t.text =~ /^(?:\.)?@(\w+)( |\W)/ # include statuses starts with .
116
+ end.compact
117
+ (options.has_key?(:uniq) && !options[:uniq]) ? result : result.uniq
118
+ end
119
+
120
+ # users which specified user is replying
121
+ # in_reply_to_user_id and in_reply_to_status_id is not used because of distinguishing mentions from replies
122
+ def replying(*args)
123
+ options = args.extract_options!
124
+ tweets =
125
+ if args.empty?
126
+ user_timeline(user.screen_name, options)
127
+ elsif uid_or_screen_name?(args[0])
128
+ user_timeline(args[0], options)
129
+ else
130
+ raise
131
+ end
132
+ screen_names = _extract_screen_names(tweets, options)
133
+ users(screen_names, {super_operation: __method__}.merge(options))
134
+ rescue Twitter::Error::NotFound => e
135
+ e.message == 'No user matches for specified terms.' ? [] : (raise e)
136
+ rescue => e
137
+ logger.warn "#{__method__} #{args.inspect} #{e.class} #{e.message}"
138
+ raise e
139
+ end
140
+
141
+ def _extract_uids(tweets, options)
142
+ result = tweets.map do |t|
143
+ t.user.id.to_i if t.text =~ /^(?:\.)?@(\w+)( |\W)/ # include statuses starts with .
144
+ end.compact
145
+ (options.has_key?(:uniq) && !options[:uniq]) ? result : result.uniq
146
+ end
147
+
148
+ def _extract_users(tweets, uids, options = {})
149
+ uids.map { |u| tweets.find { |t| t.user.id.to_i == u.to_i } }.map { |t| t.user }
150
+ end
151
+
152
+ # users which specified user is replied
153
+ # when user is login you had better to call mentions_timeline
154
+ def replied(*args)
155
+ options = args.extract_options!
156
+
157
+ if args.empty? || (uid_or_screen_name?(args[0]) && authenticating_user?(args[0]))
158
+ mentions_timeline.uniq { |m| m.user.id }.map { |m| m.user }
159
+ else
160
+ searched_result = search('@' + user(args[0]).screen_name, options)
161
+ uids = _extract_uids(searched_result, options)
162
+ _extract_users(searched_result, uids, options)
163
+ end
164
+ end
165
+
166
+ def _extract_inactive_users(users, options = {})
167
+ authorized = options.delete(:authorized)
168
+ two_weeks_ago = 2.weeks.ago.to_i
169
+ users.select do |u|
170
+ if authorized
171
+ (Time.parse(u.status.created_at).to_i < two_weeks_ago) rescue false
172
+ else
173
+ false
174
+ end
175
+ end
176
+ end
177
+
178
+ def inactive_friends(user = nil)
179
+ if user.blank?
180
+ _extract_inactive_users(friends_parallelly, authorized: true)
181
+ elsif uid_or_screen_name?(user)
182
+ authorized = authenticating_user?(user) || authorized_user?(user)
183
+ _extract_inactive_users(friends_parallelly(user), authorized: authorized)
184
+ elsif user.respond_to?(:friends)
185
+ authorized = authenticating_user?(user.uid.to_i) || authorized_user?(user.uid.to_i)
186
+ _extract_inactive_users(user.friends, authorized: authorized)
187
+ else
188
+ raise
189
+ end
190
+ end
191
+
192
+ def inactive_followers(user = nil)
193
+ if user.blank?
194
+ _extract_inactive_users(followers_parallelly, authorized: true)
195
+ elsif uid_or_screen_name?(user)
196
+ authorized = authenticating_user?(user) || authorized_user?(user)
197
+ _extract_inactive_users(followers_parallelly(user), authorized: authorized)
198
+ elsif user.respond_to?(:followers)
199
+ authorized = authenticating_user?(user.uid.to_i) || authorized_user?(user.uid.to_i)
200
+ _extract_inactive_users(user.followers, authorized: authorized)
201
+ else
202
+ raise
203
+ end
204
+ end
205
+
206
+ def clusters_belong_to(text)
207
+ return [] if text.blank?
208
+
209
+ exclude_words = JSON.parse(File.read(Rails.configuration.x.constants['cluster_bad_words_path']))
210
+ special_words = JSON.parse(File.read(Rails.configuration.x.constants['cluster_good_words_path']))
211
+
212
+ # クラスタ用の単語の出現回数を記録
213
+ cluster_word_counter =
214
+ special_words.map { |sw| [sw, text.scan(sw)] }
215
+ .delete_if { |item| item[1].empty? }
216
+ .each_with_object(Hash.new(1)) { |item, memo| memo[item[0]] = item[1].size }
217
+
218
+ # 同一文字種の繰り返しを見付ける。漢字の繰り返し、ひらがなの繰り返し、カタカナの繰り返し、など
219
+ text.scan(/[一-龠〆ヵヶ々]+|[ぁ-んー~]+|[ァ-ヴー~]+|[a-zA-Z0-9]+|[、。!!??]+/).
220
+
221
+ # 複数回繰り返される文字を除去
222
+ map { |w| w.remove /[?!?!。、w]|(ー{2,})/ }.
223
+
224
+ # 文字数の少なすぎる単語、ひらがなだけの単語、除外単語を除去する
225
+ delete_if { |w| w.length <= 1 || (w.length <= 2 && w =~ /^[ぁ-んー~]+$/) || exclude_words.include?(w) }.
226
+
227
+ # 出現回数を記録
228
+ each { |w| cluster_word_counter[w] += 1 }
229
+
230
+ # 複数個以上見付かった単語のみを残し、出現頻度順にソート
231
+ cluster_word_counter.select { |_, v| v > 3 }.sort_by { |_, v| -v }.to_h
232
+ end
233
+
234
+ def clusters_assigned_to
235
+ raise NotImplementedError.new
236
+ end
237
+ end
238
+ end