github_changelog_generator 1.15.2 → 1.16.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +8 -6
  3. data/Rakefile +1 -1
  4. data/lib/github_changelog_generator/argv_parser.rb +225 -0
  5. data/lib/github_changelog_generator/generator/entry.rb +10 -10
  6. data/lib/github_changelog_generator/generator/generator.rb +41 -19
  7. data/lib/github_changelog_generator/generator/generator_fetcher.rb +5 -9
  8. data/lib/github_changelog_generator/generator/generator_processor.rb +23 -20
  9. data/lib/github_changelog_generator/generator/generator_tags.rb +15 -9
  10. data/lib/github_changelog_generator/generator/section.rb +27 -7
  11. data/lib/github_changelog_generator/helper.rb +1 -1
  12. data/lib/github_changelog_generator/octo_fetcher.rb +196 -149
  13. data/lib/github_changelog_generator/options.rb +4 -0
  14. data/lib/github_changelog_generator/parser.rb +70 -248
  15. data/lib/github_changelog_generator/parser_file.rb +29 -14
  16. data/lib/github_changelog_generator/reader.rb +4 -2
  17. data/lib/github_changelog_generator/ssl_certs/cacert.pem +851 -1756
  18. data/lib/github_changelog_generator/task.rb +3 -2
  19. data/lib/github_changelog_generator/version.rb +1 -1
  20. data/man/git-generate-changelog.1 +46 -34
  21. data/man/git-generate-changelog.1.html +39 -31
  22. data/man/git-generate-changelog.html +19 -19
  23. data/man/git-generate-changelog.md +39 -31
  24. data/spec/files/config_example +5 -0
  25. data/spec/spec_helper.rb +1 -1
  26. data/spec/unit/generator/entry_spec.rb +37 -31
  27. data/spec/unit/generator/generator_processor_spec.rb +99 -44
  28. data/spec/unit/generator/generator_spec.rb +47 -0
  29. data/spec/unit/generator/generator_tags_spec.rb +46 -3
  30. data/spec/unit/generator/section_spec.rb +34 -0
  31. data/spec/unit/octo_fetcher_spec.rb +45 -5
  32. data/spec/unit/parser_spec.rb +50 -0
  33. data/spec/unit/reader_spec.rb +9 -0
  34. metadata +44 -23
@@ -69,6 +69,9 @@ module GitHubChangelogGenerator
69
69
  # leave issues without milestones
70
70
  if issue["milestone"].nil?
71
71
  true
72
+ # remove issues of open milestones if option is set
73
+ elsif issue["milestone"]["state"] == "open"
74
+ @options[:issues_of_open_milestones]
72
75
  else
73
76
  # check, that this milestone in tag list:
74
77
  @filtered_tags.find { |tag| tag["name"] == issue["milestone"]["title"] }.nil?
@@ -130,21 +133,19 @@ module GitHubChangelogGenerator
130
133
  end
131
134
 
132
135
  def tag_older_new_tag?(newer_tag_time, time)
133
- tag_in_range_new = if newer_tag_time.nil?
134
- true
135
- else
136
- time <= newer_tag_time
137
- end
138
- tag_in_range_new
136
+ if newer_tag_time.nil?
137
+ true
138
+ else
139
+ time <= newer_tag_time
140
+ end
139
141
  end
140
142
 
141
143
  def tag_newer_old_tag?(older_tag_time, time)
142
- tag_in_range_old = if older_tag_time.nil?
143
- true
144
- else
145
- time > older_tag_time
146
- end
147
- tag_in_range_old
144
+ if older_tag_time.nil?
145
+ true
146
+ else
147
+ time > older_tag_time
148
+ end
148
149
  end
149
150
 
150
151
  # Include issues with labels, specified in :include_labels
@@ -152,22 +153,24 @@ module GitHubChangelogGenerator
152
153
  # @return [Array] filtered array of issues
153
154
  def include_issues_by_labels(issues)
154
155
  filtered_issues = filter_by_include_labels(issues)
155
- filtered_issues = filter_wo_labels(filtered_issues)
156
- filtered_issues
156
+ filter_wo_labels(filtered_issues)
157
157
  end
158
158
 
159
- # @param [Array] issues Issues & PRs to filter when without labels
159
+ # @param [Array] items Issues & PRs to filter when without labels
160
160
  # @return [Array] Issues & PRs without labels or empty array if
161
161
  # add_issues_wo_labels or add_pr_wo_labels are false
162
- def filter_wo_labels(issues)
163
- if (!issues.empty? && issues.first.key?("pull_requests") && options[:add_pr_wo_labels]) || options[:add_issues_wo_labels]
164
- issues
165
- else
166
- issues.select { |issue| issue["labels"].map { |l| l["name"] }.any? }
162
+ def filter_wo_labels(items)
163
+ if items.any? && items.first.key?("pull_request")
164
+ return items if options[:add_pr_wo_labels]
165
+ elsif options[:add_issues_wo_labels]
166
+ return items
167
167
  end
168
+ # The default is to filter items without labels
169
+ items.select { |item| item["labels"].map { |l| l["name"] }.any? }
168
170
  end
169
171
 
170
172
  # @todo Document this
173
+ # @param [Object] issues
171
174
  def filter_by_include_labels(issues)
172
175
  if options[:include_labels].nil?
173
176
  issues
@@ -11,15 +11,10 @@ module GitHubChangelogGenerator
11
11
  fetch_tags_dates(all_tags) # Creates a Hash @tag_times_hash
12
12
  all_sorted_tags = sort_tags_by_date(all_tags)
13
13
 
14
- @sorted_tags = filter_excluded_tags(all_sorted_tags)
14
+ @sorted_tags = filter_included_tags(all_sorted_tags)
15
+ @sorted_tags = filter_excluded_tags(@sorted_tags)
15
16
  @filtered_tags = get_filtered_tags(@sorted_tags)
16
-
17
- # Because we need to properly create compare links, we need a sorted list
18
- # of all filtered tags (including the excluded ones). We'll exclude those
19
- # tags from section headers inside the mapping function.
20
- section_tags = get_filtered_tags(all_sorted_tags)
21
-
22
- @tag_section_mapping = build_tag_section_mapping(section_tags, @filtered_tags)
17
+ @tag_section_mapping = build_tag_section_mapping(@filtered_tags, @filtered_tags)
23
18
 
24
19
  @filtered_tags
25
20
  end
@@ -83,7 +78,7 @@ module GitHubChangelogGenerator
83
78
  # @return [Array] link, name and time of the tag
84
79
  def detect_link_tag_time(newer_tag)
85
80
  # if tag is nil - set current time
86
- newer_tag_time = newer_tag.nil? ? Time.new : get_time_of_tag(newer_tag)
81
+ newer_tag_time = newer_tag.nil? ? Time.new.getutc : get_time_of_tag(newer_tag)
87
82
 
88
83
  # if it's future release tag - set this value
89
84
  if newer_tag.nil? && options[:future_release]
@@ -161,6 +156,17 @@ module GitHubChangelogGenerator
161
156
  filtered_tags
162
157
  end
163
158
 
159
+ # @param [Array] all_tags all tags
160
+ # @return [Array] filtered tags according to :include_tags_regex option
161
+ def filter_included_tags(all_tags)
162
+ if options[:include_tags_regex]
163
+ regex = Regexp.new(options[:include_tags_regex])
164
+ all_tags.select { |tag| regex =~ tag["name"] }
165
+ else
166
+ all_tags
167
+ end
168
+ end
169
+
164
170
  # @param [Array] all_tags all tags
165
171
  # @return [Array] filtered tags according :exclude_tags or :exclude_tags_regex option
166
172
  def filter_excluded_tags(all_tags)
@@ -7,7 +7,23 @@ module GitHubChangelogGenerator
7
7
  #
8
8
  # @see GitHubChangelogGenerator::Entry
9
9
  class Section
10
- attr_accessor :name, :prefix, :issues, :labels, :body_only
10
+ # @return [String]
11
+ attr_accessor :name
12
+
13
+ # @return [String] a merge prefix, or an issue prefix
14
+ attr_reader :prefix
15
+
16
+ # @return [Array<Hash>]
17
+ attr_reader :issues
18
+
19
+ # @return [Array<String>]
20
+ attr_reader :labels
21
+
22
+ # @return [Boolean]
23
+ attr_reader :body_only
24
+
25
+ # @return [Options]
26
+ attr_reader :options
11
27
 
12
28
  def initialize(opts = {})
13
29
  @name = opts[:name]
@@ -16,11 +32,12 @@ module GitHubChangelogGenerator
16
32
  @issues = opts[:issues] || []
17
33
  @options = opts[:options] || Options.new({})
18
34
  @body_only = opts[:body_only] || false
35
+ @entry = Entry.new(options)
19
36
  end
20
37
 
21
38
  # Returns the content of a section.
22
39
  #
23
- # @return [String] Generate section content
40
+ # @return [String] Generated section content
24
41
  def generate_content
25
42
  content = ""
26
43
 
@@ -49,7 +66,7 @@ module GitHubChangelogGenerator
49
66
  encapsulated_title = encapsulate_string issue["title"]
50
67
 
51
68
  title_with_number = "#{encapsulated_title} [\\##{issue['number']}](#{issue['html_url']})"
52
- title_with_number = "#{title_with_number}#{line_labels_for(issue)}" if @options[:issue_line_labels].present?
69
+ title_with_number = "#{title_with_number}#{@entry.line_labels_for(issue)}" if @options[:issue_line_labels].present?
53
70
  line = issue_line_with_user(title_with_number, issue)
54
71
  issue_line_with_body(line, issue)
55
72
  end
@@ -60,16 +77,16 @@ module GitHubChangelogGenerator
60
77
 
61
78
  # get issue body till first line break
62
79
  body_paragraph = body_till_first_break(issue["body"])
63
- # remove spaces from begining and end of the string
80
+ # remove spaces from beginning of the string
64
81
  body_paragraph.rstrip!
65
82
  # encapsulate to md
66
- encapsulated_body = "\s\s\n" + encapsulate_string(body_paragraph)
83
+ encapsulated_body = " \n#{encapsulate_string(body_paragraph)}"
67
84
 
68
85
  "**#{line}** #{encapsulated_body}"
69
86
  end
70
87
 
71
88
  def body_till_first_break(body)
72
- body.split(/\n/).first
89
+ body.split(/\n/, 2).first
73
90
  end
74
91
 
75
92
  def issue_line_with_user(line, issue)
@@ -95,7 +112,10 @@ module GitHubChangelogGenerator
95
112
  string = string.gsub('\\', '\\\\')
96
113
 
97
114
  ENCAPSULATED_CHARACTERS.each do |char|
98
- string = string.gsub(char, "\\#{char}")
115
+ # Only replace char with escaped version if it isn't inside backticks (markdown inline code).
116
+ # This relies on each opening '`' being closed (ie an even number in total).
117
+ # A char is *outside* backticks if there is an even number of backticks following it.
118
+ string = string.gsub(%r{#{Regexp.escape(char)}(?=([^`]*`[^`]*`)*[^`]*$)}, "\\#{char}")
99
119
  end
100
120
 
101
121
  string
@@ -14,7 +14,7 @@ module GitHubChangelogGenerator
14
14
  @log ||= if test?
15
15
  Logger.new(nil) # don't show any logs when running tests
16
16
  else
17
- Logger.new(STDOUT)
17
+ Logger.new($stdout)
18
18
  end
19
19
  @log.formatter = proc do |severity, _datetime, _progname, msg|
20
20
  string = "#{msg}\n"
@@ -1,7 +1,12 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require "tmpdir"
4
- require "retriable"
4
+ require "set"
5
+ require "async"
6
+ require "async/barrier"
7
+ require "async/semaphore"
8
+ require "async/http/faraday"
9
+
5
10
  module GitHubChangelogGenerator
6
11
  # A Fetcher responsible for all requests to GitHub and all basic manipulation with related data
7
12
  # (such as filtering, validating, e.t.c)
@@ -9,8 +14,8 @@ module GitHubChangelogGenerator
9
14
  # Example:
10
15
  # fetcher = GitHubChangelogGenerator::OctoFetcher.new(options)
11
16
  class OctoFetcher
12
- PER_PAGE_NUMBER = 100
13
- MAX_THREAD_NUMBER = 10
17
+ PER_PAGE_NUMBER = 100
18
+ MAXIMUM_CONNECTIONS = 50
14
19
  MAX_FORBIDDEN_RETRIES = 100
15
20
  CHANGELOG_GITHUB_TOKEN = "CHANGELOG_GITHUB_TOKEN"
16
21
  GH_RATE_LIMIT_EXCEEDED_MSG = "Warning: Can't finish operation: GitHub API rate limit exceeded, changelog may be " \
@@ -31,47 +36,58 @@ module GitHubChangelogGenerator
31
36
  @project = @options[:project]
32
37
  @since = @options[:since]
33
38
  @http_cache = @options[:http_cache]
34
- @cache_file = nil
35
- @cache_log = nil
36
39
  @commits = []
37
- @compares = {}
38
- prepare_cache
39
- configure_octokit_ssl
40
- @client = Octokit::Client.new(github_options)
40
+ @branches = nil
41
+ @graph = nil
42
+ @client = nil
43
+ @commits_in_tag_cache = {}
44
+ end
45
+
46
+ def middleware
47
+ Faraday::RackBuilder.new do |builder|
48
+ if @http_cache
49
+ cache_file = @options.fetch(:cache_file) { File.join(Dir.tmpdir, "github-changelog-http-cache") }
50
+ cache_log = @options.fetch(:cache_log) { File.join(Dir.tmpdir, "github-changelog-logger.log") }
51
+
52
+ builder.use(
53
+ Faraday::HttpCache,
54
+ serializer: Marshal,
55
+ store: ActiveSupport::Cache::FileStore.new(cache_file),
56
+ logger: Logger.new(cache_log),
57
+ shared_cache: false
58
+ )
59
+ end
60
+
61
+ builder.use Octokit::Response::RaiseError
62
+ builder.adapter :async_http
63
+ end
41
64
  end
42
65
 
43
- def prepare_cache
44
- return unless @http_cache
66
+ def connection_options
67
+ ca_file = @options[:ssl_ca_file] || ENV["SSL_CA_FILE"] || File.expand_path("ssl_certs/cacert.pem", __dir__)
45
68
 
46
- @cache_file = @options.fetch(:cache_file) { File.join(Dir.tmpdir, "github-changelog-http-cache") }
47
- @cache_log = @options.fetch(:cache_log) { File.join(Dir.tmpdir, "github-changelog-logger.log") }
48
- init_cache
69
+ Octokit.connection_options.merge({ ssl: { ca_file: ca_file } })
49
70
  end
50
71
 
51
- def github_options
52
- result = {}
53
- github_token = fetch_github_token
54
- result[:access_token] = github_token if github_token
55
- endpoint = @options[:github_endpoint]
56
- result[:api_endpoint] = endpoint if endpoint
57
- result
58
- end
72
+ def client_options
73
+ options = {
74
+ middleware: middleware,
75
+ connection_options: connection_options
76
+ }
59
77
 
60
- def configure_octokit_ssl
61
- ca_file = @options[:ssl_ca_file] || ENV["SSL_CA_FILE"] || File.expand_path("ssl_certs/cacert.pem", __dir__)
62
- Octokit.connection_options = { ssl: { ca_file: ca_file } }
63
- end
78
+ if (github_token = fetch_github_token)
79
+ options[:access_token] = github_token
80
+ end
64
81
 
65
- def init_cache
66
- Octokit.middleware = Faraday::RackBuilder.new do |builder|
67
- builder.use(Faraday::HttpCache, serializer: Marshal,
68
- store: ActiveSupport::Cache::FileStore.new(@cache_file),
69
- logger: Logger.new(@cache_log),
70
- shared_cache: false)
71
- builder.use Octokit::Response::RaiseError
72
- builder.adapter Faraday.default_adapter
73
- # builder.response :logger
82
+ if (endpoint = @options[:github_endpoint])
83
+ options[:api_endpoint] = endpoint
74
84
  end
85
+
86
+ options
87
+ end
88
+
89
+ def client
90
+ @client ||= Octokit::Client.new(client_options)
75
91
  end
76
92
 
77
93
  DEFAULT_REQUEST_OPTIONS = { per_page: PER_PAGE_NUMBER }
@@ -88,6 +104,9 @@ module GitHubChangelogGenerator
88
104
  # Returns the number of pages for a API call
89
105
  #
90
106
  # @return [Integer] number of pages for this API call in total
107
+ # @param [Object] request_options
108
+ # @param [Object] method
109
+ # @param [Object] client
91
110
  def calculate_pages(client, method, request_options)
92
111
  # Makes the first API call so that we can call last_response
93
112
  check_github_response do
@@ -107,11 +126,11 @@ module GitHubChangelogGenerator
107
126
  #
108
127
  # @return [Array <Hash>] array of tags in repo
109
128
  def github_fetch_tags
110
- tags = []
111
- page_i = 0
112
- count_pages = calculate_pages(@client, "tags", {})
129
+ tags = []
130
+ page_i = 0
131
+ count_pages = calculate_pages(client, "tags", {})
113
132
 
114
- iterate_pages(@client, "tags") do |new_tags|
133
+ iterate_pages(client, "tags") do |new_tags|
115
134
  page_i += PER_PAGE_NUMBER
116
135
  print_in_same_line("Fetching tags... #{page_i}/#{count_pages * PER_PAGE_NUMBER}")
117
136
  tags.concat(new_tags)
@@ -142,9 +161,9 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
142
161
  print "Fetching closed issues...\r" if @options[:verbose]
143
162
  issues = []
144
163
  page_i = 0
145
- count_pages = calculate_pages(@client, "issues", closed_pr_options)
164
+ count_pages = calculate_pages(client, "issues", closed_pr_options)
146
165
 
147
- iterate_pages(@client, "issues", closed_pr_options) do |new_issues|
166
+ iterate_pages(client, "issues", **closed_pr_options) do |new_issues|
148
167
  page_i += PER_PAGE_NUMBER
149
168
  print_in_same_line("Fetching issues... #{page_i}/#{count_pages * PER_PAGE_NUMBER}")
150
169
  issues.concat(new_issues)
@@ -165,10 +184,10 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
165
184
  pull_requests = []
166
185
  options = { state: "closed" }
167
186
 
168
- page_i = 0
169
- count_pages = calculate_pages(@client, "pull_requests", options)
187
+ page_i = 0
188
+ count_pages = calculate_pages(client, "pull_requests", options)
170
189
 
171
- iterate_pages(@client, "pull_requests", options) do |new_pr|
190
+ iterate_pages(client, "pull_requests", **options) do |new_pr|
172
191
  page_i += PER_PAGE_NUMBER
173
192
  log_string = "Fetching merged dates... #{page_i}/#{count_pages * PER_PAGE_NUMBER}"
174
193
  print_in_same_line(log_string)
@@ -185,16 +204,20 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
185
204
  # @param [Array] issues
186
205
  # @return [Void]
187
206
  def fetch_events_async(issues)
188
- i = 0
189
- threads = []
207
+ i = 0
190
208
  # Add accept option explicitly for disabling the warning of preview API.
191
209
  preview = { accept: Octokit::Preview::PREVIEW_TYPES[:project_card_events] }
192
210
 
193
- issues.each_slice(MAX_THREAD_NUMBER) do |issues_slice|
194
- issues_slice.each do |issue|
195
- threads << Thread.new do
211
+ barrier = Async::Barrier.new
212
+ semaphore = Async::Semaphore.new(MAXIMUM_CONNECTIONS, parent: barrier)
213
+
214
+ Sync do
215
+ client = self.client
216
+
217
+ issues.each do |issue|
218
+ semaphore.async do
196
219
  issue["events"] = []
197
- iterate_pages(@client, "issue_events", issue["number"], preview) do |new_event|
220
+ iterate_pages(client, "issue_events", issue["number"], **preview) do |new_event|
198
221
  issue["events"].concat(new_event)
199
222
  end
200
223
  issue["events"] = issue["events"].map { |event| stringify_keys_deep(event.to_hash) }
@@ -202,12 +225,12 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
202
225
  i += 1
203
226
  end
204
227
  end
205
- threads.each(&:join)
206
- threads = []
207
- end
208
228
 
209
- # to clear line from prev print
210
- print_empty_line
229
+ barrier.wait
230
+
231
+ # to clear line from prev print
232
+ print_empty_line
233
+ end
211
234
 
212
235
  Helper.log.info "Fetching events for issues and PR: #{i}"
213
236
  end
@@ -217,21 +240,25 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
217
240
  # @param [Array] prs The array of PRs.
218
241
  # @return [Void] No return; PRs are updated in-place.
219
242
  def fetch_comments_async(prs)
220
- threads = []
243
+ barrier = Async::Barrier.new
244
+ semaphore = Async::Semaphore.new(MAXIMUM_CONNECTIONS, parent: barrier)
221
245
 
222
- prs.each_slice(MAX_THREAD_NUMBER) do |prs_slice|
223
- prs_slice.each do |pr|
224
- threads << Thread.new do
246
+ Sync do
247
+ client = self.client
248
+
249
+ prs.each do |pr|
250
+ semaphore.async do
225
251
  pr["comments"] = []
226
- iterate_pages(@client, "issue_comments", pr["number"]) do |new_comment|
252
+ iterate_pages(client, "issue_comments", pr["number"]) do |new_comment|
227
253
  pr["comments"].concat(new_comment)
228
254
  end
229
255
  pr["comments"] = pr["comments"].map { |comment| stringify_keys_deep(comment.to_hash) }
230
256
  end
231
257
  end
232
- threads.each(&:join)
233
- threads = []
258
+
259
+ barrier.wait
234
260
  end
261
+
235
262
  nil
236
263
  end
237
264
 
@@ -247,21 +274,6 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
247
274
  commit_data["commit"]["committer"]["date"]
248
275
  end
249
276
 
250
- # Fetch and cache comparison between two github refs
251
- #
252
- # @param [String] older The older sha/tag/branch.
253
- # @param [String] newer The newer sha/tag/branch.
254
- # @return [Hash] Github api response for comparison.
255
- def fetch_compare(older, newer)
256
- unless @compares["#{older}...#{newer}"]
257
- compare_data = check_github_response { @client.compare(user_project, older, newer || "HEAD") }
258
- raise StandardError, "Sha #{older} and sha #{newer} are not related; please file a github-changelog-generator issues and describe how to replicate this issue." if compare_data["status"] == "diverged"
259
-
260
- @compares["#{older}...#{newer}"] = stringify_keys_deep(compare_data.to_hash)
261
- end
262
- @compares["#{older}...#{newer}"]
263
- end
264
-
265
277
  # Fetch commit for specified event
266
278
  #
267
279
  # @param [String] commit_id the SHA of a commit to fetch
@@ -273,9 +285,11 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
273
285
  if found
274
286
  stringify_keys_deep(found.to_hash)
275
287
  else
288
+ client = self.client
289
+
276
290
  # cache miss; don't add to @commits because unsure of order.
277
291
  check_github_response do
278
- commit = @client.commit(user_project, commit_id)
292
+ commit = client.commit(user_project, commit_id)
279
293
  commit = stringify_keys_deep(commit.to_hash)
280
294
  commit
281
295
  end
@@ -287,8 +301,25 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
287
301
  # @return [Array] Commits in a repo.
288
302
  def commits
289
303
  if @commits.empty?
290
- iterate_pages(@client, "commits") do |new_commits|
291
- @commits.concat(new_commits)
304
+ Sync do
305
+ barrier = Async::Barrier.new
306
+ semaphore = Async::Semaphore.new(MAXIMUM_CONNECTIONS, parent: barrier)
307
+
308
+ if (since_commit = @options[:since_commit])
309
+ iterate_pages(client, "commits_since", since_commit, parent: semaphore) do |new_commits|
310
+ @commits.concat(new_commits)
311
+ end
312
+ else
313
+ iterate_pages(client, "commits", parent: semaphore) do |new_commits|
314
+ @commits.concat(new_commits)
315
+ end
316
+ end
317
+
318
+ barrier.wait
319
+
320
+ @commits.sort! do |b, a|
321
+ a[:commit][:author][:date] <=> b[:commit][:author][:date]
322
+ end
292
323
  end
293
324
  end
294
325
  @commits
@@ -303,42 +334,63 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
303
334
 
304
335
  # @return [String] Default branch of the repo
305
336
  def default_branch
306
- @default_branch ||= @client.repository(user_project)[:default_branch]
337
+ @default_branch ||= client.repository(user_project)[:default_branch]
338
+ end
339
+
340
+ # @param [String] name
341
+ # @return [Array<String>]
342
+ def commits_in_branch(name)
343
+ @branches ||= client.branches(user_project).map { |branch| [branch[:name], branch] }.to_h
344
+
345
+ if (branch = @branches[name])
346
+ commits_in_tag(branch[:commit][:sha])
347
+ else
348
+ []
349
+ end
307
350
  end
308
351
 
309
352
  # Fetch all SHAs occurring in or before a given tag and add them to
310
353
  # "shas_in_tag"
311
354
  #
312
355
  # @param [Array] tags The array of tags.
313
- # @return [Nil] No return; tags are updated in-place.
314
- def fetch_tag_shas_async(tags)
315
- i = 0
316
- threads = []
317
- print_in_same_line("Fetching SHAs for tags: #{i}/#{tags.count}\r") if @options[:verbose]
318
-
319
- tags.each_slice(MAX_THREAD_NUMBER) do |tags_slice|
320
- tags_slice.each do |tag|
321
- threads << Thread.new do
322
- # Use oldest commit because comparing two arbitrary tags may be diverged
323
- commits_in_tag = fetch_compare(oldest_commit["sha"], tag["name"])
324
- tag["shas_in_tag"] = commits_in_tag["commits"].collect { |commit| commit["sha"] }
325
- print_in_same_line("Fetching SHAs for tags: #{i + 1}/#{tags.count}") if @options[:verbose]
326
- i += 1
356
+ # @return void
357
+ def fetch_tag_shas(tags)
358
+ # Reverse the tags array to gain max benefit from the @commits_in_tag_cache
359
+ tags.reverse_each do |tag|
360
+ tag["shas_in_tag"] = commits_in_tag(tag["commit"]["sha"])
361
+ end
362
+ end
363
+
364
+ private
365
+
366
+ # @param [Set] shas
367
+ # @param [Object] sha
368
+ def commits_in_tag(sha, shas = Set.new)
369
+ # Reduce multiple runs for the same tag
370
+ return @commits_in_tag_cache[sha] if @commits_in_tag_cache.key?(sha)
371
+
372
+ @graph ||= commits.map { |commit| [commit[:sha], commit] }.to_h
373
+ return shas unless (current = @graph[sha])
374
+
375
+ queue = [current]
376
+ while queue.any?
377
+ commit = queue.shift
378
+ # If we've already processed this sha, just grab it's parents from the cache
379
+ if @commits_in_tag_cache.key?(commit[:sha])
380
+ shas.merge(@commits_in_tag_cache[commit[:sha]])
381
+ else
382
+ shas.add(commit[:sha])
383
+ commit[:parents].each do |p|
384
+ queue.push(@graph[p[:sha]]) unless shas.include?(p[:sha])
327
385
  end
328
386
  end
329
- threads.each(&:join)
330
- threads = []
331
387
  end
332
388
 
333
- # to clear line from prev print
334
- print_empty_line
335
-
336
- Helper.log.info "Fetching SHAs for tags: #{i}"
337
- nil
389
+ @commits_in_tag_cache[sha] = shas
390
+ shas
338
391
  end
339
392
 
340
- private
341
-
393
+ # @param [Object] indata
342
394
  def stringify_keys_deep(indata)
343
395
  case indata
344
396
  when Array
@@ -362,45 +414,62 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
362
414
  #
363
415
  # @param [Octokit::Client] client
364
416
  # @param [String] method (eg. 'tags')
417
+ # @param [Array] arguments
418
+ # @param [Async::Semaphore] parent
365
419
  #
366
420
  # @yield [Sawyer::Resource] An OctoKit-provided response (which can be empty)
367
421
  #
368
422
  # @return [void]
369
- def iterate_pages(client, method, *args)
370
- args << DEFAULT_REQUEST_OPTIONS.merge(extract_request_args(args))
423
+ # @param [Hash] options
424
+ def iterate_pages(client, method, *arguments, parent: nil, **options)
425
+ options = DEFAULT_REQUEST_OPTIONS.merge(options)
371
426
 
372
- check_github_response { client.send(method, user_project, *args) }
427
+ check_github_response { client.send(method, user_project, *arguments, **options) }
373
428
  last_response = client.last_response.tap do |response|
374
429
  raise(MovedPermanentlyError, response.data[:url]) if response.status == 301
375
430
  end
376
431
 
377
432
  yield(last_response.data)
378
433
 
379
- until (next_one = last_response.rels[:next]).nil?
380
- last_response = check_github_response { next_one.get }
381
- yield(last_response.data)
382
- end
383
- end
384
-
385
- def extract_request_args(args)
386
- if args.size == 1 && args.first.is_a?(Hash)
387
- args.delete_at(0)
388
- elsif args.size > 1 && args.last.is_a?(Hash)
389
- args.delete_at(args.length - 1)
390
- else
391
- {}
434
+ if parent.nil?
435
+ # The snail visits one leaf at a time:
436
+ until (next_one = last_response.rels[:next]).nil?
437
+ last_response = check_github_response { next_one.get }
438
+ yield(last_response.data)
439
+ end
440
+ elsif (last = last_response.rels[:last])
441
+ # OR we bring out the gatling gun:
442
+ parameters = querystring_as_hash(last.href)
443
+ last_page = Integer(parameters["page"])
444
+
445
+ (2..last_page).each do |page|
446
+ parent.async do
447
+ data = check_github_response { client.send(method, user_project, *arguments, page: page, **options) }
448
+ yield data
449
+ end
450
+ end
392
451
  end
393
452
  end
394
453
 
395
454
  # This is wrapper with rescue block
396
455
  #
397
456
  # @return [Object] returns exactly the same, what you put in the block, but wrap it with begin-rescue block
457
+ # @param [Proc] block
398
458
  def check_github_response
399
- Retriable.retriable(retry_options) do
400
- yield
401
- end
459
+ yield
402
460
  rescue MovedPermanentlyError => e
403
461
  fail_with_message(e, "The repository has moved, update your configuration")
462
+ rescue Octokit::TooManyRequests => e
463
+ resets_in = client.rate_limit.resets_in
464
+ Helper.log.error("#{e.class} #{e.message}; sleeping for #{resets_in}s...")
465
+
466
+ if (task = Async::Task.current?)
467
+ task.sleep(resets_in)
468
+ else
469
+ sleep(resets_in)
470
+ end
471
+
472
+ retry
404
473
  rescue Octokit::Forbidden => e
405
474
  fail_with_message(e, "Exceeded retry limit")
406
475
  rescue Octokit::Unauthorized => e
@@ -408,36 +477,14 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
408
477
  end
409
478
 
410
479
  # Presents the exception, and the aborts with the message.
480
+ # @param [Object] message
481
+ # @param [Object] error
411
482
  def fail_with_message(error, message)
412
483
  Helper.log.error("#{error.class}: #{error.message}")
413
484
  sys_abort(message)
414
485
  end
415
486
 
416
- # Exponential backoff
417
- def retry_options
418
- {
419
- on: [Octokit::Forbidden],
420
- tries: MAX_FORBIDDEN_RETRIES,
421
- base_interval: sleep_base_interval,
422
- multiplier: 1.0,
423
- rand_factor: 0.0,
424
- on_retry: retry_callback
425
- }
426
- end
427
-
428
- def sleep_base_interval
429
- 1.0
430
- end
431
-
432
- def retry_callback
433
- proc do |exception, try, elapsed_time, next_interval|
434
- Helper.log.warn("RETRY - #{exception.class}: '#{exception.message}'")
435
- Helper.log.warn("#{try} tries in #{elapsed_time} seconds and #{next_interval} seconds until the next try")
436
- Helper.log.warn GH_RATE_LIMIT_EXCEEDED_MSG
437
- Helper.log.warn @client.rate_limit
438
- end
439
- end
440
-
487
+ # @param [Object] msg
441
488
  def sys_abort(msg)
442
489
  abort(msg)
443
490
  end
@@ -446,7 +493,7 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
446
493
  #
447
494
  # @param [String] log_string
448
495
  def print_in_same_line(log_string)
449
- print log_string + "\r"
496
+ print "#{log_string}\r"
450
497
  end
451
498
 
452
499
  # Print long line with spaces on same line to clear prev message