deploy_log 0.2.7 → 0.3.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/bin/deploy_log +4 -0
- data/lib/deploy_log.rb +2 -0
- data/lib/deploy_log/cache.rb +50 -0
- data/lib/deploy_log/github/api.rb +48 -0
- data/lib/deploy_log/github/helper.rb +52 -95
- data/lib/deploy_log/version.rb +1 -1
- metadata +4 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 2a41d5f5eec9fe419b26fb29d1b274bd8b4fb18e896258a2b61b193d8e7f9cb0
|
4
|
+
data.tar.gz: 063f3f25f4b06e99dc1c2158ee34b6e839a7d79884f9ef7ea7e7713b9638c277
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 70506d200f36f6c8d1edfdc5cde156a74d957103f38db0c17fb24fe909658cdbba42a6246f51fcc2efd412afc7df386bdf59224c0065f5be6332faaf13183827
|
7
|
+
data.tar.gz: 54959d81e9786443f1d8ce20089f2fc8d66f8fac709e362244c1365ca5e67ce0c170c2fa50e72278e26be48bdd474029ed4470b0d37b08d719ddfed61fd61da4
|
data/bin/deploy_log
CHANGED
@@ -26,6 +26,10 @@ OptionParser.new do |opt|
|
|
26
26
|
branch = br
|
27
27
|
end
|
28
28
|
|
29
|
+
opt.on('-u', '--user=USER', 'Name of the Github user you want to search by') do |br|
|
30
|
+
branch = br
|
31
|
+
end
|
32
|
+
|
29
33
|
opt.on('-w', '--week=WEEK', 'Get PRs merged during week number X') do |w|
|
30
34
|
week = w
|
31
35
|
end
|
data/lib/deploy_log.rb
CHANGED
@@ -0,0 +1,50 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'fileutils'
|
4
|
+
|
5
|
+
module DeployLog
|
6
|
+
class Cache
|
7
|
+
attr_reader :filename
|
8
|
+
|
9
|
+
class FileNotFound < StandardError; end
|
10
|
+
|
11
|
+
def initialize(fmt, options = {})
|
12
|
+
fmt ||= 'deploy_%s.log'
|
13
|
+
dir = options[:dir] || '/tmp'
|
14
|
+
|
15
|
+
@repo = options[:repo]
|
16
|
+
@file_name_template = "#{dir}/#{fmt}"
|
17
|
+
end
|
18
|
+
|
19
|
+
def create(*args)
|
20
|
+
hash = Digest::MD5.hexdigest(@repo + args.join('|'))
|
21
|
+
path = FileUtils.touch format(@file_name_template, hash)
|
22
|
+
|
23
|
+
@filename = path.first
|
24
|
+
end
|
25
|
+
|
26
|
+
def exists?
|
27
|
+
File.exist?(@filename) && !File.size(@filename).zero?
|
28
|
+
end
|
29
|
+
|
30
|
+
def contents
|
31
|
+
raise FileNotFound unless exists?
|
32
|
+
|
33
|
+
File.read(@filename)
|
34
|
+
end
|
35
|
+
|
36
|
+
def write_object(pool, message)
|
37
|
+
File.open(@filename, 'w+') do |file|
|
38
|
+
pool.each do |pr|
|
39
|
+
line = yield(pr)
|
40
|
+
|
41
|
+
file.write(line)
|
42
|
+
end
|
43
|
+
|
44
|
+
file.write "============================================================\n"
|
45
|
+
file.write "#{message}\n"
|
46
|
+
file.write "============================================================\n"
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
@@ -0,0 +1,48 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'octokit'
|
4
|
+
|
5
|
+
module DeployLog
|
6
|
+
module Github
|
7
|
+
attr_reader :repo
|
8
|
+
|
9
|
+
class Api
|
10
|
+
def initialize(repo)
|
11
|
+
@client = Octokit::Client.new(login: ENV['GITHUB_USER'], password: ENV['GITHUB_TOKEN'])
|
12
|
+
@client.auto_paginate = true
|
13
|
+
|
14
|
+
@repo = repo
|
15
|
+
end
|
16
|
+
|
17
|
+
def pull_requests(options = {})
|
18
|
+
default_opts = {
|
19
|
+
state: :closed,
|
20
|
+
per_page: 500,
|
21
|
+
sort: 'long-running'
|
22
|
+
}
|
23
|
+
|
24
|
+
begin
|
25
|
+
@client.pull_requests(@repo, default_opts.merge(options))
|
26
|
+
rescue Octokit::NotFound => e
|
27
|
+
Notify.error e.message
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
def pull_request(id)
|
32
|
+
begin
|
33
|
+
@client.pull_request(@repo, id)
|
34
|
+
rescue Octokit::NotFound => e
|
35
|
+
Notify.error e.message
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
def commits_for(id)
|
40
|
+
begin
|
41
|
+
@client.pull_request_commits(@repo, id)
|
42
|
+
rescue Octokit::NotFound => e
|
43
|
+
Notify.error e.message
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
@@ -1,132 +1,89 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require 'octokit'
|
4
3
|
require 'fileutils'
|
5
4
|
|
6
5
|
module DeployLog
|
7
6
|
module Github
|
8
|
-
class FileNotFound < StandardError; end
|
9
|
-
|
10
7
|
class Helper
|
11
8
|
LINE_FORMAT = "%s (%s)\n - Created by %s\n - Branch: %s\n - Merged by %s on %s\n - Changes: %s\n -- %s\n\n"
|
12
9
|
|
13
10
|
def initialize(user_repo)
|
14
|
-
@
|
15
|
-
@
|
11
|
+
@api = Api.new(user_repo)
|
12
|
+
@cache = DeployLog::Cache.new('github-deploys-%s.log', repo: user_repo)
|
16
13
|
end
|
17
14
|
|
18
|
-
def pulls_in_timeframe(date_start
|
19
|
-
|
20
|
-
return
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
f.write(
|
38
|
-
sprintf(
|
39
|
-
LINE_FORMAT,
|
40
|
-
pr.title,
|
41
|
-
pr.html_url,
|
42
|
-
pr.user.login,
|
43
|
-
pr.head.ref,
|
44
|
-
user_who_merged(pr.number),
|
45
|
-
formatted_time(pr.merged_at, true),
|
46
|
-
pr.diff_url,
|
47
|
-
committers_for(pr.number).join("\n -- ")
|
48
|
-
)
|
49
|
-
)
|
50
|
-
end
|
51
|
-
|
52
|
-
f.write("============================================================\n#{prs_covered} PR(s) merged from #{date_start} to #{date_end}\n============================================================\n")
|
15
|
+
def pulls_in_timeframe(date_start, date_end)
|
16
|
+
@cache.create(date_start, date_end)
|
17
|
+
return @cache.contents if @cache.exists?
|
18
|
+
|
19
|
+
pool = timeframe_pool(date_start, date_end)
|
20
|
+
message = "#{pool.size} PR(s) merged from #{date_start} to #{date_end}"
|
21
|
+
|
22
|
+
@cache.write_object(pool, message) do |item|
|
23
|
+
format(LINE_FORMAT,
|
24
|
+
item.title,
|
25
|
+
item.html_url,
|
26
|
+
item.user.login,
|
27
|
+
item.head.ref,
|
28
|
+
user_who_merged(item.number),
|
29
|
+
formatted_time(item.merged_at, true),
|
30
|
+
item.diff_url,
|
31
|
+
committers_for(item.number).join("\n -- ")
|
32
|
+
)
|
53
33
|
end
|
54
34
|
|
55
|
-
|
56
|
-
|
57
|
-
cat(cache_path)
|
35
|
+
@cache.contents
|
58
36
|
end
|
59
37
|
|
60
38
|
def search_pulls_by(value, field = :title)
|
61
|
-
|
62
|
-
return
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
39
|
+
@cache.create(field, value)
|
40
|
+
return @cache.contents if @cache.exists?
|
41
|
+
|
42
|
+
pool = search_pool(field, value)
|
43
|
+
message = "#{pool.size} PR(s) matched"
|
44
|
+
|
45
|
+
@cache.write_object(pool, message) do |item|
|
46
|
+
format(LINE_FORMAT,
|
47
|
+
item.title,
|
48
|
+
item.html_url,
|
49
|
+
item.user.login,
|
50
|
+
item.head.ref,
|
51
|
+
user_who_merged(item.number),
|
52
|
+
formatted_time(item.merged_at, true),
|
53
|
+
item.diff_url,
|
54
|
+
committers_for(item.number).join("\n -- ")
|
67
55
|
)
|
68
|
-
prs_covered = 0
|
69
|
-
|
70
|
-
File.open(cache_path, 'w+') do |f|
|
71
|
-
list.each do |pr|
|
72
|
-
next unless nested_hash_value(pr, field).match?(/#{value}\b/)
|
73
|
-
|
74
|
-
prs_covered += 1
|
75
|
-
|
76
|
-
f.write(
|
77
|
-
sprintf(
|
78
|
-
LINE_FORMAT,
|
79
|
-
pr.title,
|
80
|
-
pr.html_url,
|
81
|
-
pr.user.login,
|
82
|
-
pr.head.ref,
|
83
|
-
user_who_merged(pr.number),
|
84
|
-
formatted_time(pr.merged_at, true),
|
85
|
-
pr.diff_url,
|
86
|
-
committers_for(pr.number).join("\n -- ")
|
87
|
-
)
|
88
|
-
)
|
89
|
-
end
|
90
|
-
|
91
|
-
f.write("============================================================\n#{prs_covered} PR(s) matched\n============================================================\n")
|
92
56
|
end
|
93
57
|
|
94
|
-
|
95
|
-
|
96
|
-
cat(cache_path)
|
58
|
+
@cache.contents
|
97
59
|
end
|
98
60
|
|
99
61
|
private
|
100
62
|
|
63
|
+
def timeframe_pool(date_start, date_end)
|
64
|
+
pool = @api.pull_requests
|
65
|
+
pool.select! { |pr| (date_start..date_end).cover?(pr.merged_at) }
|
66
|
+
pool
|
67
|
+
end
|
68
|
+
|
69
|
+
def search_pool(field, value)
|
70
|
+
pool = @api.pull_requests(state: :all, per_page: 100)
|
71
|
+
pool.select! { |pr| nested_hash_value(pr, field).match?(/#{value}\b/) }
|
72
|
+
pool
|
73
|
+
end
|
74
|
+
|
101
75
|
def user_who_merged(num)
|
102
|
-
pr = @
|
76
|
+
pr = @api.pull_request(num)
|
103
77
|
pr.merged_by.login
|
104
78
|
end
|
105
79
|
|
106
80
|
def committers_for(num)
|
107
|
-
commits = @
|
81
|
+
commits = @api.commits_for(num)
|
108
82
|
commits.map do |c|
|
109
83
|
"#{c.author.login} committed '#{c.commit.message}' at #{formatted_time(c.commit.committer.date, true)}"
|
110
84
|
end
|
111
85
|
end
|
112
86
|
|
113
|
-
def cache(*args)
|
114
|
-
hash = Digest::MD5.hexdigest(@repo_location + args.join('|'))
|
115
|
-
path = FileUtils.touch "/tmp/github-deploys-#{hash}.log"
|
116
|
-
|
117
|
-
path.first
|
118
|
-
end
|
119
|
-
|
120
|
-
def should_show_cache(cache_file_path)
|
121
|
-
File.exist?(cache_file_path) && !File.size(cache_file_path).zero?
|
122
|
-
end
|
123
|
-
|
124
|
-
def cat(path)
|
125
|
-
raise FileNotFound unless should_show_cache(path)
|
126
|
-
|
127
|
-
File.read(path)
|
128
|
-
end
|
129
|
-
|
130
87
|
def formatted_time(time, use_local_time = false)
|
131
88
|
time = Time.now if time.nil?
|
132
89
|
time = time.localtime if use_local_time
|
data/lib/deploy_log/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: deploy_log
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.3.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Ryan Priebe
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2019-06-
|
11
|
+
date: 2019-06-14 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|
@@ -161,7 +161,9 @@ files:
|
|
161
161
|
- docs/js/searcher.js.gz
|
162
162
|
- docs/table_of_contents.html
|
163
163
|
- lib/deploy_log.rb
|
164
|
+
- lib/deploy_log/cache.rb
|
164
165
|
- lib/deploy_log/calendar.rb
|
166
|
+
- lib/deploy_log/github/api.rb
|
165
167
|
- lib/deploy_log/github/deploys.rb
|
166
168
|
- lib/deploy_log/github/helper.rb
|
167
169
|
- lib/deploy_log/version.rb
|