gh-akerl 0.15.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 66c0fe8b3672ab41b6013da9e62d1a04ae026d90402bcaf2843ed08ed3ad1807
4
+ data.tar.gz: b7514eaaff845c0683a109d4a496221b2f7ca1d2c3125794ef66909334f45779
5
+ SHA512:
6
+ metadata.gz: bdac11fcf07dedc0e5ab0212a23d11928ce86bbba6fb639cd0d44d0f31d47fcf5b9d15c55228c5e73c8c9a23fb8bfd00613518a55ba2fdbefde40983cd596c8b
7
+ data.tar.gz: bae91bdbfbc140dbeee491fe040765d6fca71fe9d7739f4f2b03fbd45ff001a8c7416d6aa458dfc5d4947f13ce2af9d740dcc21270ce66c97029360817c83e1d
data/LICENSE ADDED
@@ -0,0 +1,22 @@
1
+ Copyright (c) 2013 Konstantin Haase
2
+
3
+ Permission is hereby granted, free of charge, to any person
4
+ obtaining a copy of this software and associated documentation
5
+ files (the "Software"), to deal in the Software without
6
+ restriction, including without limitation the rights to use,
7
+ copy, modify, merge, publish, distribute, sublicense, and/or sell
8
+ copies of the Software, and to permit persons to whom the
9
+ Software is furnished to do so, subject to the following
10
+ conditions:
11
+
12
+ The above copyright notice and this permission notice shall be
13
+ included in all copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
17
+ OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
18
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
19
+ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
20
+ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22
+ OTHER DEALINGS IN THE SOFTWARE.
data/lib/gh/cache.rb ADDED
@@ -0,0 +1,65 @@
1
+ require 'gh'
2
+ require 'thread'
3
+
4
+ module GH
5
+ # Public: This class caches responses.
6
+ class Cache < Wrapper
7
+ # Public: Get/set cache to use. Compatible with Rails/ActiveSupport cache.
8
+ attr_accessor :cache
9
+
10
+ # Internal: Simple in-memory cache basically implementing a copying GC.
11
+ class SimpleCache
12
+ # Internal: Initializes a new SimpleCache.
13
+ #
14
+ # size - Number of objects to hold in cache.
15
+ def initialize(size = 2048)
16
+ @old, @new, @size, @mutex = {}, {}, size/2, Mutex.new
17
+ end
18
+
19
+ # Internal: Tries to fetch a value from the cache and if it doesn't exist, generates it from the
20
+ # block given.
21
+ def fetch(key)
22
+ @mutex.synchronize { @old, @new = @new, {} if @new.size > @size } if @new.size > @size
23
+ @new[key] ||= @old[key] || yield
24
+ end
25
+
26
+ # Internal: ...
27
+ def clear
28
+ @mutex.synchronize { @old, @new = {}, {} }
29
+ end
30
+ end
31
+
32
+ # Internal: Initializes a new Cache instance.
33
+ def setup(*)
34
+ #self.cache ||= Rails.cache if defined? Rails.cache and defined? RAILS_CACHE
35
+ #self.cache ||= ActiveSupport::Cache.lookup_store if defined? ActiveSupport::Cache.lookup_store
36
+ self.cache ||= SimpleCache.new
37
+ super
38
+ end
39
+
40
+ # Public: ...
41
+ def reset
42
+ super
43
+ clear_partial or clear_all
44
+ end
45
+
46
+ private
47
+
48
+ def fetch_resource(key)
49
+ cache.fetch(prefixed(key)) { super }
50
+ end
51
+
52
+ def clear_partial
53
+ return false unless cache.respond_to? :delete_matched
54
+ pattern = "^" << Regexp.escape(prefixed(""))
55
+ cache.delete_matched Regexp.new(pattern)
56
+ true
57
+ rescue NotImplementedError
58
+ false
59
+ end
60
+
61
+ def clear_all
62
+ cache.clear
63
+ end
64
+ end
65
+ end
data/lib/gh/case.rb ADDED
@@ -0,0 +1,8 @@
1
+ require 'gh'
2
+
3
+ module GH
4
+ module Case
5
+ def respond_to(method) proc { |o| o.respond_to? method } end
6
+ private :respond_to
7
+ end
8
+ end
@@ -0,0 +1,27 @@
1
+ module GH
2
+ # Adds Client info so even unauthenticated requests can use a custom request limit
3
+ class CustomLimit < Wrapper
4
+ attr_accessor :client_id, :client_secret
5
+
6
+ def setup(backend, options)
7
+ @client_id = options[:client_id]
8
+ @client_secret = options[:client_secret]
9
+ super
10
+ end
11
+
12
+ def full_url(key)
13
+ return super unless client_id
14
+
15
+ url = super
16
+ params = url.query_values || {}
17
+
18
+ unless params.include? 'client_id'
19
+ params['client_id'] = client_id
20
+ params['client_secret'] = client_secret
21
+ end
22
+
23
+ url.query_values = params
24
+ url
25
+ end
26
+ end
27
+ end
data/lib/gh/error.rb ADDED
@@ -0,0 +1,64 @@
1
+ require 'gh'
2
+
3
+ module GH
4
+ class Error < StandardError
5
+ attr_reader :info
6
+
7
+ def initialize(error = nil, payload = nil, info = {})
8
+ info = info.merge(error.info) if error.respond_to? :info and Hash === error.info
9
+ error = error.error while error.respond_to? :error
10
+ @info = info.merge(:error => error, :payload => payload)
11
+
12
+ if error
13
+ set_backtrace error.backtrace if error.respond_to? :backtrace
14
+ if error.respond_to? :response and error.response
15
+ case response = error.response
16
+ when Hash
17
+ @info[:response_status] = response[:status]
18
+ @info[:response_headers] = response[:headers]
19
+ @info[:response_body] = response[:body]
20
+ when Faraday::Response
21
+ @info[:response_status] = response.status
22
+ @info[:response_headers] = response.headers
23
+ @info[:response_body] = response.body
24
+ else
25
+ @info[:response] = response
26
+ end
27
+ end
28
+ end
29
+ end
30
+
31
+ def payload
32
+ info[:payload]
33
+ end
34
+
35
+ def error
36
+ info[:error]
37
+ end
38
+
39
+ def message
40
+ "GH request failed\n" + info.map { |k,v| entry(k,v) }.join("\n")
41
+ end
42
+
43
+ private
44
+
45
+ def entry(key, value)
46
+ value = "#{value.class}: #{value.message}" if Exception === value
47
+ value = value.inspect unless String === value
48
+ value.gsub!(/"Basic .+"|(client_(?:id|secret)=)[^&\s]+/, '\1[removed]')
49
+ (key.to_s + ": ").ljust(20) + value
50
+ end
51
+ end
52
+
53
+ class TokenInvalid < Error
54
+ end
55
+
56
+ def self.Error(conditions)
57
+ Module.new do
58
+ define_singleton_method(:===) do |exception|
59
+ return false unless Error === exception and not exception.info.nil?
60
+ conditions.all? { |k,v| v === exception.info[k]}
61
+ end
62
+ end
63
+ end
64
+ end
@@ -0,0 +1,36 @@
1
+ require 'gh'
2
+
3
+ module GH
4
+ # Public: This class caches responses.
5
+ class Instrumentation < Wrapper
6
+ # Public: Get/set instrumenter to use. Compatible with ActiveSupport::Notification and Travis::EventLogger.
7
+ attr_accessor :instrumenter
8
+
9
+ def setup(backend, options)
10
+ self.instrumenter ||= Travis::EventLogger.method(:notify) if defined? Travis::EventLogger
11
+ self.instrumenter ||= ActiveSupport::Notifications.method(:instrument) if defined? ActiveSupport::Notifications
12
+ super
13
+ end
14
+
15
+ def http(verb, url, *)
16
+ instrument(:http, :verb => verb, :url => url) { super }
17
+ end
18
+
19
+ def load(data)
20
+ instrument(:load, :data => data) { super }
21
+ end
22
+
23
+ def [](key)
24
+ instrument(:access, :key => key) { super }
25
+ end
26
+
27
+ private
28
+
29
+ def instrument(type, payload = {})
30
+ return yield unless instrumenter
31
+ result = nil
32
+ instrumenter.call("#{type}.gh", payload.merge(:gh => frontend)) { result = yield }
33
+ return result
34
+ end
35
+ end
36
+ end
@@ -0,0 +1,26 @@
1
+ require 'gh'
2
+
3
+ module GH
4
+ # Public: ...
5
+ class LazyLoader < Wrapper
6
+ wraps GH::Normalizer
7
+ double_dispatch
8
+
9
+ def modify_hash(hash, loaded = false)
10
+ hash = super(hash)
11
+ link = hash['_links']['self'] unless loaded or hash['_links'].nil?
12
+ setup_lazy_loading(hash, link['href']) if link
13
+ hash
14
+ rescue Exception => error
15
+ raise Error.new(error, hash)
16
+ end
17
+
18
+ private
19
+
20
+ def lazy_load(hash, key, link)
21
+ modify_hash(backend[link].data, true)
22
+ rescue Exception => error
23
+ raise Error.new(error, hash)
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,23 @@
1
+ module GH
2
+ class LinkFollower < Wrapper
3
+ wraps GH::Normalizer
4
+ double_dispatch
5
+
6
+ def modify_hash(hash)
7
+ hash = super
8
+ setup_lazy_loading(hash) if hash['_links']
9
+ hash
10
+ rescue Exception => error
11
+ raise Error.new(error, hash)
12
+ end
13
+
14
+ private
15
+
16
+ def lazy_load(hash, key)
17
+ link = hash['_links'][key]
18
+ { key => self[link['href']] } if link
19
+ rescue Exception => error
20
+ raise Error.new(error, hash)
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,86 @@
1
+ require 'gh'
2
+ require 'timeout'
3
+
4
+ module GH
5
+ # Public: ...
6
+ class MergeCommit < Wrapper
7
+ wraps GH::Normalizer
8
+ double_dispatch
9
+
10
+ def setup(backend, options)
11
+ @ssl = options[:ssl]
12
+ super
13
+ end
14
+
15
+ def modify_hash(hash)
16
+ setup_lazy_loading(super)
17
+ rescue Exception => error
18
+ raise Error.new(error, hash)
19
+ end
20
+
21
+ private
22
+
23
+ def lazy_load(hash, key)
24
+ return unless key =~ /^(merge|head|base)_commit$/ and hash.include? 'mergeable'
25
+ return unless has_merge_commit?(hash)
26
+ fields = pull_request_refs(hash)
27
+ fields['base_commit'] ||= commit_for hash, hash['base']
28
+ fields['head_commit'] ||= commit_for hash, hash['head']
29
+ fields
30
+ rescue Exception => error
31
+ raise Error.new(error, hash)
32
+ end
33
+
34
+ def commit_for(from, hash)
35
+ { 'sha' => hash['sha'], 'ref' => hash['ref'],
36
+ '_links' => { 'self' => { 'href' => git_url_for(from, hash['sha']) } } }
37
+ end
38
+
39
+ def git_url_for(hash, commitish)
40
+ hash['_links']['self']['href'].gsub(%r{/pulls/(\d+)$}, "/git/#{commitish}")
41
+ end
42
+
43
+ def pull_request_refs(hash)
44
+ link = git_url_for(hash, 'refs/pull/\1')
45
+ commits = self[link].map do |data|
46
+ ref = data['ref']
47
+ name = ref.split('/').last + "_commit"
48
+ object = data['object'].merge 'ref' => ref
49
+ [name, object]
50
+ end
51
+ Hash[commits]
52
+ end
53
+
54
+ def has_merge_commit?(hash)
55
+ force_merge_commit(hash)
56
+ hash['mergeable']
57
+ end
58
+
59
+ def github_done_checking?(hash)
60
+ case hash['mergeable_state']
61
+ when 'checking' then false
62
+ when 'unknown' then hash['merged']
63
+ when 'clean', 'dirty', 'unstable', 'stable', 'blocked', 'behind' then true
64
+ else fail "unknown mergeable_state #{hash['mergeable_state'].inspect} for #{url(hash)}"
65
+ end
66
+ end
67
+
68
+ def force_merge_commit(hash)
69
+ Timeout.timeout(180) do
70
+ update(hash) until github_done_checking? hash
71
+ end
72
+ rescue TimeoutError
73
+ status = hash['mergeable_state'].inspect
74
+ raise TimeoutError, "gave up waiting for github to check the merge status (current status is #{status})"
75
+ end
76
+
77
+ def update(hash)
78
+ hash.merge! backend[url(hash)]
79
+ sleep 0.5
80
+ end
81
+
82
+ def url(hash)
83
+ hash['_links']['self']['href']
84
+ end
85
+ end
86
+ end
@@ -0,0 +1,49 @@
1
+ require 'gh'
2
+
3
+ module GH
4
+ # Public: ...
5
+ class NestedResources < Wrapper
6
+ wraps GH::Normalizer
7
+ double_dispatch
8
+
9
+ def modify_hash(hash, loaded = false)
10
+ hash = super(hash)
11
+ link = hash['_links']['self'] unless loaded or hash['_links'].nil?
12
+ set_links hash, Addressable::URI.parse(link['href']) if link
13
+ hash
14
+ end
15
+
16
+ def add(hash, link, name, path = name)
17
+ hash["_links"][name] ||= { "href" => nested(link, path) }
18
+ end
19
+
20
+ def nested(link, path)
21
+ new_link = link.dup
22
+ if path.start_with? '/'
23
+ new_link.path = path
24
+ else
25
+ new_link.path += path
26
+ end
27
+ new_link
28
+ end
29
+
30
+ def set_links(hash, link)
31
+ case link.path
32
+ when '/gists'
33
+ add hash, link, 'public'
34
+ add hash, link, 'starred'
35
+ when %r{^/repos/[^/]+/[^/]+$}
36
+ add hash, link, 'commits', 'git/commits'
37
+ add hash, link, 'refs', 'git/refs'
38
+ add hash, link, 'tags', 'git/tags'
39
+ add hash, link, 'issues'
40
+ when %r{^/repos/[^/]+/[^/]+/issues/\d+$}
41
+ add hash, link, 'comments'
42
+ add hash, link, 'events'
43
+ when '/user'
44
+ add hash, link, 'gists', '/gists'
45
+ add hash, link, 'issues', '/issues'
46
+ end
47
+ end
48
+ end
49
+ end
@@ -0,0 +1,111 @@
1
+ require 'gh'
2
+ require 'time'
3
+
4
+ module GH
5
+ # Public: A Wrapper class that deals with normalizing Github responses.
6
+ class Normalizer < Wrapper
7
+ def generate_response(key, response)
8
+ result = super
9
+ links(result)['self'] ||= { 'href' => frontend.full_url(key).to_s } if result.respond_to? :to_hash
10
+ result
11
+ end
12
+
13
+ private
14
+
15
+ double_dispatch
16
+
17
+ def links(hash)
18
+ hash = hash.data if hash.respond_to? :data
19
+ hash["_links"] ||= {}
20
+ end
21
+
22
+ def set_link(hash, type, href)
23
+ links(hash)[type] = {"href" => href}
24
+ end
25
+
26
+ def modify_response(response)
27
+ response = response.dup
28
+ response.data = modify response.data
29
+ response
30
+ end
31
+
32
+ def modify_hash(hash)
33
+ corrected = {}
34
+ corrected.default_proc = hash.default_proc if hash.default_proc
35
+
36
+ hash.each_pair do |key, value|
37
+ key = modify_key(key, value)
38
+ next if modify_url(corrected, key, value)
39
+ next if modify_time(corrected, key, value)
40
+ corrected[key] = modify(value)
41
+ end
42
+
43
+ modify_user(corrected)
44
+ corrected
45
+ end
46
+
47
+ TIME_KEYS = %w[date timestamp committed_at created_at merged_at closed_at datetime time]
48
+ TIME_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\S*$/
49
+
50
+ def modify_time(hash, key, value)
51
+ return unless TIME_KEYS.include? key or TIME_PATTERN === value
52
+ should_be = key == 'timestamp' ? 'date' : key
53
+ raise ArgumentError if RUBY_VERSION < "1.9" and value == "" # TODO: remove this line. duh.
54
+ time = Time.at(value) rescue Time.parse(value.to_s)
55
+ hash[should_be] = time.utc.xmlschema if time
56
+ rescue ArgumentError, TypeError
57
+ hash[should_be] = value
58
+ end
59
+
60
+ def modify_user(hash)
61
+ hash['owner'] ||= hash.delete('user') if hash['created_at'] and hash['user']
62
+ hash['author'] ||= hash.delete('user') if hash['committed_at'] and hash['user']
63
+
64
+ hash['committer'] ||= hash['author'] if hash['author']
65
+ hash['author'] ||= hash['committer'] if hash['committer']
66
+
67
+ modify_user_fields hash['owner']
68
+ modify_user_fields hash['user']
69
+ end
70
+
71
+ def modify_user_fields(hash)
72
+ return unless Hash === hash
73
+ hash['login'] = hash.delete('name') if hash['name']
74
+ set_link hash, 'self', "users/#{hash['login']}" unless links(hash).include? 'self'
75
+ end
76
+
77
+ def modify_url(hash, key, value)
78
+ case key
79
+ when "blog"
80
+ set_link(hash, key, value)
81
+ when "url"
82
+ type = value.to_s.start_with?(api_host.to_s) ? "self" : "html"
83
+ set_link(hash, type, value)
84
+ when /^(.+)_url$/
85
+ set_link(hash, $1, value)
86
+ when "config"
87
+ hash[key] = value
88
+ end
89
+ end
90
+
91
+ def modify_key(key, value = nil)
92
+ case key
93
+ when 'gravatar_url' then 'avatar_url'
94
+ when 'org' then 'organization'
95
+ when 'orgs' then 'organizations'
96
+ when 'username' then 'login'
97
+ when 'repo' then 'repository'
98
+ when 'repos' then modify_key('repositories', value)
99
+ when /^repos?_(.*)$/ then "repository_#{$1}"
100
+ when /^(.*)_repo$/ then "#{$1}_repository"
101
+ when /^(.*)_repos$/ then "#{$1}_repositories"
102
+ when 'commit', 'commit_id', 'id' then value =~ /^\w{40}$/ ? 'sha' : key
103
+ when 'comments' then Numeric === value ? 'comment_count' : key
104
+ when 'forks' then Numeric === value ? 'fork_count' : key
105
+ when 'repositories' then Numeric === value ? 'repository_count' : key
106
+ when /^(.*)s_count$/ then "#{$1}_count"
107
+ else key
108
+ end
109
+ end
110
+ end
111
+ end
@@ -0,0 +1,57 @@
1
+ module GH
2
+ class Pagination < Wrapper
3
+ class Paginated
4
+ include Enumerable
5
+
6
+ def initialize(page, url, gh)
7
+ @page, @next_url, @gh = page, url, gh
8
+ end
9
+
10
+ def each(&block)
11
+ return enum_for(:each) unless block
12
+ @page.each(&block)
13
+ next_page.each(&block)
14
+ end
15
+
16
+ def inspect
17
+ "[#{first.inspect}, ...]"
18
+ end
19
+
20
+ def [](value)
21
+ raise TypeError, "index has to be an Integer, got #{value.class}" unless value.is_a? Integer
22
+ return @page[value] if value < @page.size
23
+ next_page[value - @page.size]
24
+ end
25
+
26
+ def to_ary
27
+ to_a # replace with better implementation (use in_parallel)
28
+ end
29
+
30
+ def headers
31
+ @page.headers
32
+ end
33
+
34
+ private
35
+
36
+ def next_page
37
+ @next_page ||= @gh[@next_url]
38
+ end
39
+ end
40
+
41
+ wraps GH::Normalizer
42
+ double_dispatch
43
+
44
+ def fetch_resource(key)
45
+ url = frontend.full_url(key)
46
+ params = url.query_values || {}
47
+ params['per_page'] ||= 100
48
+ url.query_values = params
49
+ super url.request_uri
50
+ end
51
+
52
+ def modify_response(response)
53
+ return response unless response.respond_to? :to_ary and response.headers['link'] =~ /<([^>]+)>;\s*rel=\"next\"/
54
+ Paginated.new(response, $1, self)
55
+ end
56
+ end
57
+ end
@@ -0,0 +1,59 @@
1
+ require 'gh'
2
+ require 'thread'
3
+ require 'backports/basic_object' unless defined? BasicObject
4
+
5
+ module GH
6
+ # Public: ...
7
+ class Parallel < Wrapper
8
+ attr_accessor :parallelize
9
+
10
+ class Dummy < BasicObject
11
+ attr_accessor :__delegate__
12
+ def method_missing(*args)
13
+ ::Kernel.raise ::RuntimeError, "response not yet loaded" if __delegate__.nil?
14
+ __delegate__.__send__(*args)
15
+ end
16
+ end
17
+
18
+ def setup(*)
19
+ @parallelize = true if @parallelize.nil?
20
+ @in_parallel = false
21
+ @mutex = Mutex.new
22
+ @queue = []
23
+ super
24
+ end
25
+
26
+ def generate_response(key, response)
27
+ return super unless in_parallel?
28
+ dummy = Dummy.new
29
+ @mutex.synchronize { @queue << [dummy, key, response] }
30
+ dummy
31
+ end
32
+
33
+ def in_parallel
34
+ return yield if in_parallel? or not @parallelize
35
+ was, @in_parallel = @in_parallel, true
36
+ result = nil
37
+ connection.in_parallel { result = yield }
38
+ @mutex.synchronize do
39
+ @queue.each { |dummy, key, response| dummy.__delegate__ = backend.generate_response(key, response) }
40
+ @queue.clear
41
+ end
42
+ result
43
+ ensure
44
+ @in_parallel = was unless was.nil?
45
+ end
46
+
47
+ def in_parallel?
48
+ @in_parallel
49
+ end
50
+
51
+ def connection
52
+ @connection ||= begin
53
+ layer = backend
54
+ layer = layer.backend until layer.respond_to? :connection
55
+ layer.connection
56
+ end
57
+ end
58
+ end
59
+ end