inbound_api 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 69510628f49862bdc8b1e3421ef97bcd331aeec7
4
+ data.tar.gz: 782e9ae0da4fd3e20120bcfcfc959f8c54110416
5
+ SHA512:
6
+ metadata.gz: d59354f19840e714e3d4713b8d068e836a50421d7ce0ece8d5494e0ca5abc36f5e72377f78dc733a042c3d5c63417455d2f33b1a87244a7e1ca082de70553bd8
7
+ data.tar.gz: 152b751fc4320ffe65fc5041051e667c42b1d697af7c3e373f70f1effb8909764361693e2cf9738a4bee2399fad31dc9e97cf84d820c3d47b6a3c94f1148c0d3
@@ -0,0 +1,33 @@
1
+ class FetchFollowers < InboundAPI
2
+
3
+ def fetch_followers username
4
+
5
+ url = "#{@member_root_url}#{username}#{@followers_url}"
6
+ begin
7
+ page = @scraper.get(url)
8
+
9
+ begin
10
+ followers = []
11
+ elements = page.search(".user-details a")
12
+ elements.each do |element|
13
+ followers.push(element["href"].gsub(@member_root_url,""))
14
+ end
15
+ rescue
16
+ followers = nil
17
+ end
18
+
19
+
20
+ return {
21
+ "response" => "Success",
22
+ "followers" => followers
23
+ }
24
+ rescue
25
+ return {
26
+ "response" => "Failure"
27
+ }
28
+
29
+ end
30
+ end
31
+ end
32
+
33
+ require 'mechanize'
@@ -0,0 +1,33 @@
1
+ class FetchFollowing < InboundAPI
2
+
3
+ def fetch_following username
4
+
5
+ url = "#{@member_root_url}#{username}#{@following_url}"
6
+ begin
7
+ page = @scraper.get(url)
8
+
9
+ begin
10
+ following = []
11
+ elements = page.search(".user-details a")
12
+ elements.each do |element|
13
+ following.push(element["href"].gsub(@member_root_url,""))
14
+ end
15
+ rescue
16
+ following = nil
17
+ end
18
+
19
+
20
+ return {
21
+ "response" => "Success",
22
+ "following" => following
23
+ }
24
+ rescue
25
+ return {
26
+ "response" => "Failure"
27
+ }
28
+
29
+ end
30
+ end
31
+ end
32
+
33
+ require 'mechanize'
@@ -0,0 +1,136 @@
1
+ class FetchUserData < InboundAPI
2
+
3
+ def fetch_user_data username
4
+
5
+ url = "#{@member_root_url}#{username}"
6
+ begin
7
+ page = @scraper.get(url)
8
+
9
+ userid = page.search(".toggle-follow-user")[0]["data-user-id"]
10
+
11
+ begin
12
+ fullname = page.search('.member-details h1')[0].children.text.to_s
13
+ rescue
14
+ fullname = nil
15
+ end
16
+
17
+ begin
18
+ karma = page.search('.user-stats .number')[0].text.to_i
19
+ rescue
20
+ karma = nil
21
+ end
22
+
23
+ begin
24
+ followers = page.search('.user-stats .number')[1].text.to_i
25
+ rescue
26
+ followers = nil
27
+ end
28
+
29
+ begin
30
+ following = page.search('.user-stats .number')[2].text.to_i
31
+ rescue
32
+ following = nil
33
+ end
34
+
35
+ begin
36
+ company = page.search("div.member-banner-details p:nth-of-type(1) a")[0].parent.text.strip.chomp.gsub("\n"," ").gsub("\t"," ")
37
+ rescue
38
+ company = nil
39
+ end
40
+
41
+ begin
42
+ company_link = page.search("div.member-banner-details p:nth-of-type(1) a")[0]["href"]
43
+ rescue
44
+ company_link = nil
45
+ end
46
+
47
+ begin
48
+ image = page.search(".member-banner .avatar img")[0]["src"]
49
+ rescue
50
+ image = nil
51
+ end
52
+
53
+ begin
54
+ location = page.search("div.member-banner-details p:nth-of-type(2)")[0].text.strip
55
+ rescue
56
+ location = nil
57
+ end
58
+
59
+ begin
60
+ tw_link = page.search(".member .twitter")[0]["href"]
61
+ rescue
62
+ tw_link = nil
63
+ end
64
+
65
+ begin
66
+ in_link = page.search(".member .linkedin")[0]["href"]
67
+ rescue
68
+ in_link = nil
69
+ end
70
+
71
+ begin
72
+ fb_link = page.search(".member .facebook")[0]["href"]
73
+ rescue
74
+ fb_link = nil
75
+ end
76
+
77
+ begin
78
+ gplus_link = page.search(".member .google-plus")[0]["href"]
79
+ rescue
80
+ gplus_link = nil
81
+ end
82
+
83
+ begin
84
+ web_link = page.search('.fa-link')[0].parent["href"]
85
+ rescue
86
+ web_link = nil
87
+ end
88
+
89
+ begin
90
+ badge_wrapper , badges = page.search('.badges li') , []
91
+ badge_wrapper.each do |badge|
92
+ badges.push(badge.search('.karma_title').text.strip)
93
+ end
94
+ number_badges = (badges.count == 0) ? nil : badges.count
95
+ badges = number_badges.nil? ? nil : badges
96
+
97
+ rescue
98
+ badges = nil
99
+ number_badges = nil
100
+ end
101
+ begin
102
+ recent_activity = page.search('.activity-list-submitted').first.text[2..-1]
103
+ rescue
104
+ recent_activity = nil
105
+ end
106
+
107
+ return {
108
+ "response" => "Success",
109
+ "userid" => userid,
110
+ "name" => fullname,
111
+ "image_url" => image,
112
+ "location" => location,
113
+ "karma" => karma,
114
+ "number_followers" => followers,
115
+ "number_following" => following,
116
+ "work" => company,
117
+ "company_link" => company_link,
118
+ "twitter_link" => tw_link,
119
+ "facebook_link" => fb_link,
120
+ "linkedin_link" => in_link,
121
+ "googleplus_link" => gplus_link,
122
+ "my_link" => web_link,
123
+ "number_badges" => number_badges,
124
+ "badges" => badges,
125
+ "recent_activity" => recent_activity
126
+ }
127
+ rescue
128
+ return {
129
+ "response" => "Failure"
130
+ }
131
+
132
+ end
133
+ end
134
+ end
135
+
136
+ require 'mechanize'
@@ -0,0 +1,12 @@
1
+ class FollowUser < InboundAPI
2
+ def follow_user userid , scraper
3
+ begin
4
+ scraper.get("#{@follow_user_url}#{userid}")
5
+ return {"response" => "Success"}
6
+ rescue
7
+ return {"response" => "Failure"}
8
+ end
9
+ end
10
+ end
11
+
12
+ require 'mechanize'
@@ -0,0 +1,24 @@
1
+ class Login < InboundAPI
2
+
3
+ def login credentials={}
4
+
5
+ begin
6
+ @scraper.post(@login_post_url, {
7
+ email: credentials["email"],
8
+ password: credentials["password"]
9
+ })
10
+ @scraper.get("https://inbound.org/")
11
+ if @scraper.get(@login_url).search(".modal-content .form-login").count == 0
12
+ return {"response" => "Success" , "agent" => @scraper}
13
+ else
14
+ return {"response" => "Failure"}
15
+ end
16
+ rescue
17
+ return {"response" => "Failure"}
18
+ end
19
+
20
+ end
21
+ end
22
+
23
+ require 'json'
24
+ require 'mechanize'
@@ -0,0 +1,53 @@
1
+ class SearchUsers < InboundAPI
2
+
3
+ def get_url_from_string string , query_url
4
+ if string != ""
5
+ string=string.split(" ").join("+")
6
+ query_url = query_url + "query=" + string + "&"
7
+ end
8
+ return query_url
9
+ end
10
+
11
+ def get_url_from_list source_list , search_list , query_url
12
+ source_list.each do |s|
13
+ search_list.each do |l|
14
+ if s["name"] == l || s["slug"] == l
15
+ query_url = query_url + "skills%5B%5D=" + s["id"] + "&"
16
+ end
17
+ end
18
+ end
19
+ return query_url
20
+ end
21
+
22
+ def search_users_page scraper , query_url
23
+ page = scraper.get(query_url)
24
+ object = JSON.parse(page.body)
25
+ usernames = object["data"].map {|o| o["username"]}
26
+ usernames.keep_if {|u| !u.nil? }
27
+ return usernames
28
+ end
29
+
30
+ def search_users scraper , query_url , pages = 1
31
+ begin
32
+ list = []
33
+ n_pages = JSON.parse(scraper.get(query_url).body)["meta"]["pagination"]["total_pages"]
34
+ if pages < 1 || pages > n_pages
35
+ pages = n_pages
36
+ end
37
+ for i in (1..pages)
38
+ list = list + search_users_page(scraper,"#{query_url}page=#{i.to_s}")
39
+ end
40
+ return {
41
+ "response" => "Success",
42
+ "usernames" => list
43
+ }
44
+ rescue Exception => e
45
+ return {
46
+ "response" => "Error #{e}"
47
+ }
48
+ end
49
+ end
50
+ end
51
+
52
+ require 'mechanize'
53
+ require 'json'
@@ -0,0 +1,12 @@
1
+ class UnfollowUser < InboundAPI
2
+ def unfollow_user userid , scraper
3
+ begin
4
+ scraper.get("#{@unfollow_user_url}#{userid}")
5
+ return {"response" => "Success"}
6
+ rescue
7
+ return {"response" => "Failure"}
8
+ end
9
+ end
10
+ end
11
+
12
+ require 'mechanize'
@@ -0,0 +1,96 @@
1
+ class InboundAPI
2
+ def initialize
3
+ @scraper = Mechanize.new do |a|
4
+ a.follow_meta_refresh = true
5
+ a.user_agent_alias = "Mac Safari 4"
6
+ a.verify_mode = OpenSSL::SSL::VERIFY_NONE
7
+ end
8
+ @root_url = "https://inbound.org/"
9
+ @search_members_api_url = "https://api.inbound.org/users?order=trending&"
10
+ @member_root_url = "https://inbound.org/in/"
11
+ @search_posts_url = ""
12
+ @followers_url = "/followers/"
13
+ @following_url = "/following/"
14
+ @login_post_url = "https://inbound.org/authenticate/check"
15
+ @login_url = "https://inbound.org/login"
16
+ @follow_user_url = "https://inbound.org/members/follow?follow=1&user_id="
17
+ @unfollow_user_url = "https://inbound.org/members/follow?follow=0&user_id="
18
+ @badges = JSON.parse(@scraper.get("https://api.inbound.org/badges").body)["data"]
19
+ @skills = JSON.parse(@scraper.get("https://api.inbound.org/skills").body)["data"]
20
+ end
21
+
22
+ def login email , password
23
+ client = Login.new()
24
+ output = client.login({"email" => email, "password" => password})
25
+ @scraper = (output["response"] == "Success") ? output["agent"] : @scraper
26
+ sleep 3
27
+ return {"response" => output["response"]}
28
+ end
29
+
30
+ def get_all_badges
31
+ return @badges
32
+ end
33
+
34
+ def get_all_skills
35
+ return @skills
36
+ end
37
+
38
+ def fetch_user_data username=""
39
+ client = FetchUserData.new()
40
+ data = client.fetch_user_data(username)
41
+ sleep 3
42
+ return data
43
+ end
44
+
45
+ def fetch_followers username=""
46
+ client = FetchFollowers.new()
47
+ data = client.fetch_followers(username)
48
+ sleep 3
49
+ return data
50
+ end
51
+
52
+ def fetch_following username=""
53
+ client = FetchFollowing.new()
54
+ data = client.fetch_following(username)
55
+ sleep 3
56
+ return data
57
+ end
58
+
59
+ def search_users query="" , location="" , badges=[] , skills=[] , pages = 1
60
+ client = SearchUsers.new()
61
+ query_url = @search_members_api_url
62
+ query_url = client.get_url_from_string(query,query_url)
63
+ query_url = client.get_url_from_string(location,query_url)
64
+ query_url = client.get_url_from_list(@badges,badges,query_url)
65
+ query_url = client.get_url_from_list(@skills,skills,query_url)
66
+ data = client.search_users(@scraper,query_url,pages)
67
+ sleep 3
68
+ return data
69
+ end
70
+
71
+ def follow_user username=""
72
+ client = FollowUser.new()
73
+ userid = fetch_user_data(username)[:userid]
74
+ sleep 3
75
+ return client.follow_user(userid,@scraper)
76
+ end
77
+
78
+ def unfollow_user username=""
79
+ client = UnfollowUser.new()
80
+ userid = fetch_user_data(username)[:userid]
81
+ sleep 3
82
+ return client.unfollow_user(userid,@scraper)
83
+ end
84
+ end
85
+
86
+
87
+ # Require other helper files too, and also add them to .gemspec file
88
+ require_relative 'helper/login'
89
+ require_relative 'helper/fetch_user_data'
90
+ require_relative 'helper/fetch_followers'
91
+ require_relative 'helper/fetch_following'
92
+ require_relative 'helper/search_users'
93
+ require_relative 'helper/follow_user'
94
+ require_relative 'helper/unfollow_user'
95
+ require 'mechanize'
96
+ require 'json'
metadata ADDED
@@ -0,0 +1,94 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: inbound_api
3
+ version: !ruby/object:Gem::Version
4
+ version: 1.0.3
5
+ platform: ruby
6
+ authors:
7
+ - Athitya Kumar
8
+ - ''
9
+ autorequire:
10
+ bindir: bin
11
+ cert_chain: []
12
+ date: 2017-01-12 00:00:00.000000000 Z
13
+ dependencies:
14
+ - !ruby/object:Gem::Dependency
15
+ name: mechanize
16
+ requirement: !ruby/object:Gem::Requirement
17
+ requirements:
18
+ - - "~>"
19
+ - !ruby/object:Gem::Version
20
+ version: 2.7.5
21
+ - - ">="
22
+ - !ruby/object:Gem::Version
23
+ version: 2.7.4
24
+ type: :runtime
25
+ prerelease: false
26
+ version_requirements: !ruby/object:Gem::Requirement
27
+ requirements:
28
+ - - "~>"
29
+ - !ruby/object:Gem::Version
30
+ version: 2.7.5
31
+ - - ">="
32
+ - !ruby/object:Gem::Version
33
+ version: 2.7.4
34
+ - !ruby/object:Gem::Dependency
35
+ name: json
36
+ requirement: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: 2.0.3
41
+ - - ">="
42
+ - !ruby/object:Gem::Version
43
+ version: 2.0.1
44
+ type: :runtime
45
+ prerelease: false
46
+ version_requirements: !ruby/object:Gem::Requirement
47
+ requirements:
48
+ - - "~>"
49
+ - !ruby/object:Gem::Version
50
+ version: 2.0.3
51
+ - - ">="
52
+ - !ruby/object:Gem::Version
53
+ version: 2.0.1
54
+ description: A Ruby Gem API for Inbound.org platform implemented with custom web scraping.
55
+ Have a look at the documentation, to get started.
56
+ email:
57
+ - athityakumar@gmail.com
58
+ executables: []
59
+ extensions: []
60
+ extra_rdoc_files: []
61
+ files:
62
+ - lib/helper/fetch_followers.rb
63
+ - lib/helper/fetch_following.rb
64
+ - lib/helper/fetch_user_data.rb
65
+ - lib/helper/follow_user.rb
66
+ - lib/helper/login.rb
67
+ - lib/helper/search_users.rb
68
+ - lib/helper/unfollow_user.rb
69
+ - lib/inbound_api.rb
70
+ homepage: http://rubygems.org/gems/inbound_api
71
+ licenses:
72
+ - MIT
73
+ metadata: {}
74
+ post_install_message:
75
+ rdoc_options: []
76
+ require_paths:
77
+ - lib
78
+ required_ruby_version: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - ">="
81
+ - !ruby/object:Gem::Version
82
+ version: '0'
83
+ required_rubygems_version: !ruby/object:Gem::Requirement
84
+ requirements:
85
+ - - ">="
86
+ - !ruby/object:Gem::Version
87
+ version: '0'
88
+ requirements: []
89
+ rubyforge_project:
90
+ rubygems_version: 2.6.4
91
+ signing_key:
92
+ specification_version: 4
93
+ summary: A Ruby Gem API for Inbound.org platform implemented with custom web scraping.
94
+ test_files: []