github_changelog_generator 1.15.0.pre.alpha → 1.16.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (63) hide show
  1. checksums.yaml +5 -5
  2. data/LICENSE +1 -1
  3. data/README.md +332 -275
  4. data/Rakefile +3 -4
  5. data/bin/git-generate-changelog +1 -1
  6. data/lib/github_changelog_generator.rb +10 -6
  7. data/lib/github_changelog_generator/generator/entry.rb +218 -0
  8. data/lib/github_changelog_generator/generator/generator.rb +126 -104
  9. data/lib/github_changelog_generator/generator/generator_fetcher.rb +139 -23
  10. data/lib/github_changelog_generator/generator/generator_processor.rb +59 -27
  11. data/lib/github_changelog_generator/generator/generator_tags.rb +26 -22
  12. data/lib/github_changelog_generator/generator/section.rb +124 -0
  13. data/lib/github_changelog_generator/helper.rb +1 -1
  14. data/lib/github_changelog_generator/octo_fetcher.rb +261 -130
  15. data/lib/github_changelog_generator/options.rb +74 -1
  16. data/lib/github_changelog_generator/parser.rb +120 -176
  17. data/lib/github_changelog_generator/parser_file.rb +8 -3
  18. data/lib/github_changelog_generator/reader.rb +2 -2
  19. data/lib/github_changelog_generator/task.rb +5 -6
  20. data/lib/github_changelog_generator/version.rb +1 -1
  21. data/man/git-generate-changelog.1 +144 -45
  22. data/man/git-generate-changelog.1.html +157 -84
  23. data/man/git-generate-changelog.html +19 -7
  24. data/man/git-generate-changelog.md +151 -84
  25. data/spec/files/github-changelog-generator.md +114 -114
  26. data/spec/{install-gem-in-bundler.gemfile → install_gem_in_bundler.gemfile} +2 -0
  27. data/spec/spec_helper.rb +2 -6
  28. data/spec/unit/generator/entry_spec.rb +766 -0
  29. data/spec/unit/generator/generator_processor_spec.rb +103 -41
  30. data/spec/unit/generator/generator_spec.rb +47 -0
  31. data/spec/unit/generator/generator_tags_spec.rb +56 -24
  32. data/spec/unit/generator/section_spec.rb +34 -0
  33. data/spec/unit/octo_fetcher_spec.rb +247 -197
  34. data/spec/unit/options_spec.rb +28 -4
  35. data/spec/unit/parse_file_spec.rb +2 -2
  36. data/spec/unit/parser_spec.rb +0 -79
  37. data/spec/unit/reader_spec.rb +4 -4
  38. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_commits/when_API_is_valid/returns_commits.json +1 -0
  39. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_commits_before/when_API_is_valid/returns_commits.json +1 -1
  40. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_fetch_closed_issues_and_pr/when_API_call_is_valid.json +1 -1
  41. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_fetch_closed_issues_and_pr/when_API_call_is_valid/returns_issue_with_proper_key/values.json +1 -1
  42. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_fetch_closed_issues_and_pr/when_API_call_is_valid/returns_issues.json +1 -1
  43. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_fetch_closed_issues_and_pr/when_API_call_is_valid/returns_issues_with_labels.json +1 -1
  44. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_fetch_closed_issues_and_pr/when_API_call_is_valid/returns_pull_request_with_proper_key/values.json +1 -1
  45. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_fetch_closed_issues_and_pr/when_API_call_is_valid/returns_pull_requests_with_labels.json +1 -1
  46. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_fetch_closed_pull_requests/when_API_call_is_valid.json +1 -1
  47. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_fetch_closed_pull_requests/when_API_call_is_valid/returns_correct_pull_request_keys.json +1 -1
  48. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_fetch_closed_pull_requests/when_API_call_is_valid/returns_pull_requests.json +1 -1
  49. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_fetch_commit/when_API_call_is_valid.json +1 -1
  50. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_fetch_commit/when_API_call_is_valid/returns_commit.json +1 -1
  51. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_fetch_date_of_tag/when_API_call_is_valid.json +1 -1
  52. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_fetch_date_of_tag/when_API_call_is_valid/returns_date.json +1 -1
  53. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_fetch_events_async/when_API_call_is_valid.json +1 -1
  54. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_fetch_events_async/when_API_call_is_valid/populates_issues.json +1 -1
  55. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_github_fetch_tags/when_API_call_is_valid.json +1 -1
  56. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_github_fetch_tags/when_API_call_is_valid/should_return_tags.json +1 -1
  57. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_github_fetch_tags/when_API_call_is_valid/should_return_tags_count.json +1 -1
  58. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_github_fetch_tags/when_wrong_token_provided.json +1 -1
  59. data/spec/vcr/GitHubChangelogGenerator_OctoFetcher/_github_fetch_tags/when_wrong_token_provided/should_raise_Unauthorized_error.json +1 -1
  60. metadata +71 -38
  61. data/bin/ghclgen +0 -5
  62. data/lib/github_changelog_generator/generator/generator_generation.rb +0 -180
  63. data/spec/unit/generator/generator_generation_spec.rb +0 -17
@@ -0,0 +1,124 @@
1
+ # frozen_string_literal: true
2
+
3
+ module GitHubChangelogGenerator
4
+ # This class generates the content for a single section of a changelog entry.
5
+ # It turns the tagged issues and PRs into a well-formatted list of changes to
6
+ # be later incorporated into a changelog entry.
7
+ #
8
+ # @see GitHubChangelogGenerator::Entry
9
+ class Section
10
+ # @return [String]
11
+ attr_accessor :name
12
+
13
+ # @return [String] a merge prefix, or an issue prefix
14
+ attr_reader :prefix
15
+
16
+ # @return [Array<Hash>]
17
+ attr_reader :issues
18
+
19
+ # @return [Array<String>]
20
+ attr_reader :labels
21
+
22
+ # @return [Boolean]
23
+ attr_reader :body_only
24
+
25
+ # @return [Options]
26
+ attr_reader :options
27
+
28
+ def initialize(opts = {})
29
+ @name = opts[:name]
30
+ @prefix = opts[:prefix]
31
+ @labels = opts[:labels] || []
32
+ @issues = opts[:issues] || []
33
+ @options = opts[:options] || Options.new({})
34
+ @body_only = opts[:body_only] || false
35
+ @entry = Entry.new(options)
36
+ end
37
+
38
+ # Returns the content of a section.
39
+ #
40
+ # @return [String] Generated section content
41
+ def generate_content
42
+ content = ""
43
+
44
+ if @issues.any?
45
+ content += "#{@prefix}\n\n" unless @options[:simple_list] || @prefix.blank?
46
+ @issues.each do |issue|
47
+ merge_string = get_string_for_issue(issue)
48
+ content += "- " unless @body_only
49
+ content += "#{merge_string}\n"
50
+ end
51
+ content += "\n"
52
+ end
53
+ content
54
+ end
55
+
56
+ private
57
+
58
+ # Parse issue and generate single line formatted issue line.
59
+ #
60
+ # Example output:
61
+ # - Add coveralls integration [\#223](https://github.com/github-changelog-generator/github-changelog-generator/pull/223) (@github-changelog-generator)
62
+ #
63
+ # @param [Hash] issue Fetched issue from GitHub
64
+ # @return [String] Markdown-formatted single issue
65
+ def get_string_for_issue(issue)
66
+ encapsulated_title = encapsulate_string issue["title"]
67
+
68
+ title_with_number = "#{encapsulated_title} [\\##{issue['number']}](#{issue['html_url']})"
69
+ title_with_number = "#{title_with_number}#{@entry.line_labels_for(issue)}" if @options[:issue_line_labels].present?
70
+ line = issue_line_with_user(title_with_number, issue)
71
+ issue_line_with_body(line, issue)
72
+ end
73
+
74
+ def issue_line_with_body(line, issue)
75
+ return issue["body"] if @body_only && issue["body"].present?
76
+ return line if !@options[:issue_line_body] || issue["body"].blank?
77
+
78
+ # get issue body till first line break
79
+ body_paragraph = body_till_first_break(issue["body"])
80
+ # remove spaces from beginning of the string
81
+ body_paragraph.rstrip!
82
+ # encapsulate to md
83
+ encapsulated_body = " \n#{encapsulate_string(body_paragraph)}"
84
+
85
+ "**#{line}** #{encapsulated_body}"
86
+ end
87
+
88
+ def body_till_first_break(body)
89
+ body.split(/\n/, 2).first
90
+ end
91
+
92
+ def issue_line_with_user(line, issue)
93
+ return line if !@options[:author] || issue["pull_request"].nil?
94
+
95
+ user = issue["user"]
96
+ return "#{line} ({Null user})" unless user
97
+
98
+ if @options[:usernames_as_github_logins]
99
+ "#{line} (@#{user['login']})"
100
+ else
101
+ "#{line} ([#{user['login']}](#{user['html_url']}))"
102
+ end
103
+ end
104
+
105
+ ENCAPSULATED_CHARACTERS = %w(< > * _ \( \) [ ] #)
106
+
107
+ # Encapsulate characters to make Markdown look as expected.
108
+ #
109
+ # @param [String] string
110
+ # @return [String] encapsulated input string
111
+ def encapsulate_string(string)
112
+ string = string.gsub('\\', '\\\\')
113
+
114
+ ENCAPSULATED_CHARACTERS.each do |char|
115
+ # Only replace char with escaped version if it isn't inside backticks (markdown inline code).
116
+ # This relies on each opening '`' being closed (ie an even number in total).
117
+ # A char is *outside* backticks if there is an even number of backticks following it.
118
+ string = string.gsub(%r{#{Regexp.escape(char)}(?=([^`]*`[^`]*`)*[^`]*$)}, "\\#{char}")
119
+ end
120
+
121
+ string
122
+ end
123
+ end
124
+ end
@@ -14,7 +14,7 @@ module GitHubChangelogGenerator
14
14
  @log ||= if test?
15
15
  Logger.new(nil) # don't show any logs when running tests
16
16
  else
17
- Logger.new(STDOUT)
17
+ Logger.new($stdout)
18
18
  end
19
19
  @log.formatter = proc do |severity, _datetime, _progname, msg|
20
20
  string = "#{msg}\n"
@@ -2,6 +2,12 @@
2
2
 
3
3
  require "tmpdir"
4
4
  require "retriable"
5
+ require "set"
6
+ require "async"
7
+ require "async/barrier"
8
+ require "async/semaphore"
9
+ require "async/http/faraday"
10
+
5
11
  module GitHubChangelogGenerator
6
12
  # A Fetcher responsible for all requests to GitHub and all basic manipulation with related data
7
13
  # (such as filtering, validating, e.t.c)
@@ -9,14 +15,14 @@ module GitHubChangelogGenerator
9
15
  # Example:
10
16
  # fetcher = GitHubChangelogGenerator::OctoFetcher.new(options)
11
17
  class OctoFetcher
12
- PER_PAGE_NUMBER = 100
13
- MAX_THREAD_NUMBER = 25
18
+ PER_PAGE_NUMBER = 100
19
+ MAXIMUM_CONNECTIONS = 50
14
20
  MAX_FORBIDDEN_RETRIES = 100
15
21
  CHANGELOG_GITHUB_TOKEN = "CHANGELOG_GITHUB_TOKEN"
16
- GH_RATE_LIMIT_EXCEEDED_MSG = "Warning: Can't finish operation: GitHub API rate limit exceeded, change log may be " \
22
+ GH_RATE_LIMIT_EXCEEDED_MSG = "Warning: Can't finish operation: GitHub API rate limit exceeded, changelog may be " \
17
23
  "missing some issues. You can limit the number of issues fetched using the `--max-issues NUM` argument."
18
24
  NO_TOKEN_PROVIDED = "Warning: No token provided (-t option) and variable $CHANGELOG_GITHUB_TOKEN was not found. " \
19
- "This script can make only 50 requests to GitHub API per hour without token!"
25
+ "This script can make only 50 requests to GitHub API per hour without a token!"
20
26
 
21
27
  # @param options [Hash] Options passed in
22
28
  # @option options [String] :user GitHub username
@@ -31,44 +37,62 @@ module GitHubChangelogGenerator
31
37
  @project = @options[:project]
32
38
  @since = @options[:since]
33
39
  @http_cache = @options[:http_cache]
34
- if @http_cache
35
- @cache_file = @options.fetch(:cache_file) { File.join(Dir.tmpdir, "github-changelog-http-cache") }
36
- @cache_log = @options.fetch(:cache_log) { File.join(Dir.tmpdir, "github-changelog-logger.log") }
37
- init_cache
38
- end
39
- @github_token = fetch_github_token
40
-
41
- @request_options = { per_page: PER_PAGE_NUMBER }
42
- @github_options = {}
43
- @github_options[:access_token] = @github_token unless @github_token.nil?
44
- @github_options[:api_endpoint] = @options[:github_endpoint] unless @options[:github_endpoint].nil?
40
+ @commits = []
41
+ @branches = nil
42
+ @graph = nil
43
+ @client = nil
44
+ @commits_in_tag_cache = {}
45
+ end
45
46
 
46
- configure_octokit_ssl
47
+ def middleware
48
+ Faraday::RackBuilder.new do |builder|
49
+ if @http_cache
50
+ cache_file = @options.fetch(:cache_file) { File.join(Dir.tmpdir, "github-changelog-http-cache") }
51
+ cache_log = @options.fetch(:cache_log) { File.join(Dir.tmpdir, "github-changelog-logger.log") }
52
+
53
+ builder.use(
54
+ Faraday::HttpCache,
55
+ serializer: Marshal,
56
+ store: ActiveSupport::Cache::FileStore.new(cache_file),
57
+ logger: Logger.new(cache_log),
58
+ shared_cache: false
59
+ )
60
+ end
47
61
 
48
- @client = Octokit::Client.new(@github_options)
62
+ builder.use Octokit::Response::RaiseError
63
+ builder.adapter :async_http
64
+ end
49
65
  end
50
66
 
51
- def configure_octokit_ssl
52
- ca_file = @options[:ssl_ca_file] || ENV["SSL_CA_FILE"] || File.expand_path("../ssl_certs/cacert.pem", __FILE__)
53
- Octokit.connection_options = { ssl: { ca_file: ca_file } }
67
+ def connection_options
68
+ ca_file = @options[:ssl_ca_file] || ENV["SSL_CA_FILE"] || File.expand_path("ssl_certs/cacert.pem", __dir__)
69
+
70
+ Octokit.connection_options.merge({ ssl: { ca_file: ca_file } })
54
71
  end
55
72
 
56
- def init_cache
57
- middleware_opts = {
58
- serializer: Marshal,
59
- store: ActiveSupport::Cache::FileStore.new(@cache_file),
60
- logger: Logger.new(@cache_log),
61
- shared_cache: false
73
+ def client_options
74
+ options = {
75
+ middleware: middleware,
76
+ connection_options: connection_options
62
77
  }
63
- stack = Faraday::RackBuilder.new do |builder|
64
- builder.use Faraday::HttpCache, middleware_opts
65
- builder.use Octokit::Response::RaiseError
66
- builder.adapter Faraday.default_adapter
67
- # builder.response :logger
78
+
79
+ if (github_token = fetch_github_token)
80
+ options[:access_token] = github_token
68
81
  end
69
- Octokit.middleware = stack
82
+
83
+ if (endpoint = @options[:github_endpoint])
84
+ options[:api_endpoint] = endpoint
85
+ end
86
+
87
+ options
70
88
  end
71
89
 
90
+ def client
91
+ @client ||= Octokit::Client.new(client_options)
92
+ end
93
+
94
+ DEFAULT_REQUEST_OPTIONS = { per_page: PER_PAGE_NUMBER }
95
+
72
96
  # Fetch all tags from repo
73
97
  #
74
98
  # @return [Array <Hash>] array of tags
@@ -84,7 +108,7 @@ module GitHubChangelogGenerator
84
108
  def calculate_pages(client, method, request_options)
85
109
  # Makes the first API call so that we can call last_response
86
110
  check_github_response do
87
- client.send(method, user_project, @request_options.merge(request_options))
111
+ client.send(method, user_project, DEFAULT_REQUEST_OPTIONS.merge(request_options))
88
112
  end
89
113
 
90
114
  last_response = client.last_response
@@ -100,11 +124,11 @@ module GitHubChangelogGenerator
100
124
  #
101
125
  # @return [Array <Hash>] array of tags in repo
102
126
  def github_fetch_tags
103
- tags = []
104
- page_i = 0
105
- count_pages = calculate_pages(@client, "tags", {})
127
+ tags = []
128
+ page_i = 0
129
+ count_pages = calculate_pages(client, "tags", {})
106
130
 
107
- iterate_pages(@client, "tags", {}) do |new_tags|
131
+ iterate_pages(client, "tags") do |new_tags|
108
132
  page_i += PER_PAGE_NUMBER
109
133
  print_in_same_line("Fetching tags... #{page_i}/#{count_pages * PER_PAGE_NUMBER}")
110
134
  tags.concat(new_tags)
@@ -118,8 +142,13 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
118
142
  Helper.log.info "Found #{tags.count} tags"
119
143
  end
120
144
  # tags are a Sawyer::Resource. Convert to hash
121
- tags = tags.map { |h| stringify_keys_deep(h.to_hash) }
122
- tags
145
+ tags.map { |resource| stringify_keys_deep(resource.to_hash) }
146
+ end
147
+
148
+ def closed_pr_options
149
+ @closed_pr_options ||= {
150
+ filter: "all", labels: nil, state: "closed"
151
+ }.tap { |options| options[:since] = @since if @since }
123
152
  end
124
153
 
125
154
  # This method fetch all closed issues and separate them to pull requests and pure issues
@@ -129,17 +158,10 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
129
158
  def fetch_closed_issues_and_pr
130
159
  print "Fetching closed issues...\r" if @options[:verbose]
131
160
  issues = []
132
- options = {
133
- state: "closed",
134
- filter: "all",
135
- labels: nil
136
- }
137
- options[:since] = @since unless @since.nil?
138
-
139
- page_i = 0
140
- count_pages = calculate_pages(@client, "issues", options)
161
+ page_i = 0
162
+ count_pages = calculate_pages(client, "issues", closed_pr_options)
141
163
 
142
- iterate_pages(@client, "issues", options) do |new_issues|
164
+ iterate_pages(client, "issues", closed_pr_options) do |new_issues|
143
165
  page_i += PER_PAGE_NUMBER
144
166
  print_in_same_line("Fetching issues... #{page_i}/#{count_pages * PER_PAGE_NUMBER}")
145
167
  issues.concat(new_issues)
@@ -148,12 +170,9 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
148
170
  print_empty_line
149
171
  Helper.log.info "Received issues: #{issues.count}"
150
172
 
151
- issues = issues.map { |h| stringify_keys_deep(h.to_hash) }
152
-
153
173
  # separate arrays of issues and pull requests:
154
- issues.partition do |x|
155
- x["pull_request"].nil?
156
- end
174
+ issues.map { |issue| stringify_keys_deep(issue.to_hash) }
175
+ .partition { |issue_or_pr| issue_or_pr["pull_request"].nil? }
157
176
  end
158
177
 
159
178
  # Fetch all pull requests. We need them to detect :merged_at parameter
@@ -163,14 +182,10 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
163
182
  pull_requests = []
164
183
  options = { state: "closed" }
165
184
 
166
- unless @options[:release_branch].nil?
167
- options[:base] = @options[:release_branch]
168
- end
169
-
170
- page_i = 0
171
- count_pages = calculate_pages(@client, "pull_requests", options)
185
+ page_i = 0
186
+ count_pages = calculate_pages(client, "pull_requests", options)
172
187
 
173
- iterate_pages(@client, "pull_requests", options) do |new_pr|
188
+ iterate_pages(client, "pull_requests", options) do |new_pr|
174
189
  page_i += PER_PAGE_NUMBER
175
190
  log_string = "Fetching merged dates... #{page_i}/#{count_pages * PER_PAGE_NUMBER}"
176
191
  print_in_same_line(log_string)
@@ -179,8 +194,7 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
179
194
  print_empty_line
180
195
 
181
196
  Helper.log.info "Pull Request count: #{pull_requests.count}"
182
- pull_requests = pull_requests.map { |h| stringify_keys_deep(h.to_hash) }
183
- pull_requests
197
+ pull_requests.map { |pull_request| stringify_keys_deep(pull_request.to_hash) }
184
198
  end
185
199
 
186
200
  # Fetch event for all issues and add them to 'events'
@@ -188,38 +202,71 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
188
202
  # @param [Array] issues
189
203
  # @return [Void]
190
204
  def fetch_events_async(issues)
191
- i = 0
192
- threads = []
205
+ i = 0
206
+ # Add accept option explicitly for disabling the warning of preview API.
207
+ preview = { accept: Octokit::Preview::PREVIEW_TYPES[:project_card_events] }
208
+
209
+ barrier = Async::Barrier.new
210
+ semaphore = Async::Semaphore.new(MAXIMUM_CONNECTIONS, parent: barrier)
211
+
212
+ Sync do
213
+ client = self.client
193
214
 
194
- issues.each_slice(MAX_THREAD_NUMBER) do |issues_slice|
195
- issues_slice.each do |issue|
196
- threads << Thread.new do
215
+ issues.each do |issue|
216
+ semaphore.async do
197
217
  issue["events"] = []
198
- iterate_pages(@client, "issue_events", issue["number"], {}) do |new_event|
218
+ iterate_pages(client, "issue_events", issue["number"], preview) do |new_event|
199
219
  issue["events"].concat(new_event)
200
220
  end
201
- issue["events"] = issue["events"].map { |h| stringify_keys_deep(h.to_hash) }
221
+ issue["events"] = issue["events"].map { |event| stringify_keys_deep(event.to_hash) }
202
222
  print_in_same_line("Fetching events for issues and PR: #{i + 1}/#{issues.count}")
203
223
  i += 1
204
224
  end
205
225
  end
206
- threads.each(&:join)
207
- threads = []
208
- end
209
226
 
210
- # to clear line from prev print
211
- print_empty_line
227
+ barrier.wait
228
+
229
+ # to clear line from prev print
230
+ print_empty_line
231
+ end
212
232
 
213
233
  Helper.log.info "Fetching events for issues and PR: #{i}"
214
234
  end
215
235
 
236
+ # Fetch comments for PRs and add them to "comments"
237
+ #
238
+ # @param [Array] prs The array of PRs.
239
+ # @return [Void] No return; PRs are updated in-place.
240
+ def fetch_comments_async(prs)
241
+ barrier = Async::Barrier.new
242
+ semaphore = Async::Semaphore.new(MAXIMUM_CONNECTIONS, parent: barrier)
243
+
244
+ Sync do
245
+ client = self.client
246
+
247
+ prs.each do |pr|
248
+ semaphore.async do
249
+ pr["comments"] = []
250
+ iterate_pages(client, "issue_comments", pr["number"]) do |new_comment|
251
+ pr["comments"].concat(new_comment)
252
+ end
253
+ pr["comments"] = pr["comments"].map { |comment| stringify_keys_deep(comment.to_hash) }
254
+ end
255
+ end
256
+
257
+ barrier.wait
258
+ end
259
+
260
+ nil
261
+ end
262
+
216
263
  # Fetch tag time from repo
217
264
  #
218
265
  # @param [Hash] tag GitHub data item about a Tag
219
266
  #
220
267
  # @return [Time] time of specified tag
221
268
  def fetch_date_of_tag(tag)
222
- commit_data = check_github_response { @client.commit(user_project, tag["commit"]["sha"]) }
269
+ commit_data = fetch_commit(tag["commit"]["sha"])
223
270
  commit_data = stringify_keys_deep(commit_data.to_hash)
224
271
 
225
272
  commit_data["commit"]["committer"]["date"]
@@ -227,28 +274,114 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
227
274
 
228
275
  # Fetch commit for specified event
229
276
  #
277
+ # @param [String] commit_id the SHA of a commit to fetch
230
278
  # @return [Hash]
231
- def fetch_commit(event)
232
- check_github_response do
233
- commit = @client.commit(user_project, event["commit_id"])
234
- commit = stringify_keys_deep(commit.to_hash)
235
- commit
279
+ def fetch_commit(commit_id)
280
+ found = commits.find do |commit|
281
+ commit["sha"] == commit_id
282
+ end
283
+ if found
284
+ stringify_keys_deep(found.to_hash)
285
+ else
286
+ client = self.client
287
+
288
+ # cache miss; don't add to @commits because unsure of order.
289
+ check_github_response do
290
+ commit = client.commit(user_project, commit_id)
291
+ commit = stringify_keys_deep(commit.to_hash)
292
+ commit
293
+ end
236
294
  end
237
295
  end
238
296
 
239
- # Fetch all commits before certain point
297
+ # Fetch all commits
240
298
  #
241
- # @return [String]
242
- def commits_before(start_time)
243
- commits = []
244
- iterate_pages(@client, "commits_before", start_time.to_datetime.to_s) do |new_commits|
245
- commits.concat(new_commits)
299
+ # @return [Array] Commits in a repo.
300
+ def commits
301
+ if @commits.empty?
302
+ Sync do
303
+ barrier = Async::Barrier.new
304
+ semaphore = Async::Semaphore.new(MAXIMUM_CONNECTIONS, parent: barrier)
305
+
306
+ if (since_commit = @options[:since_commit])
307
+ iterate_pages(client, "commits_since", since_commit, parent: semaphore) do |new_commits|
308
+ @commits.concat(new_commits)
309
+ end
310
+ else
311
+ iterate_pages(client, "commits", parent: semaphore) do |new_commits|
312
+ @commits.concat(new_commits)
313
+ end
314
+ end
315
+
316
+ barrier.wait
317
+
318
+ @commits.sort! do |b, a|
319
+ a[:commit][:author][:date] <=> b[:commit][:author][:date]
320
+ end
321
+ end
322
+ end
323
+ @commits
324
+ end
325
+
326
+ # Return the oldest commit in a repo
327
+ #
328
+ # @return [Hash] Oldest commit in the github git history.
329
+ def oldest_commit
330
+ commits.last
331
+ end
332
+
333
+ # @return [String] Default branch of the repo
334
+ def default_branch
335
+ @default_branch ||= client.repository(user_project)[:default_branch]
336
+ end
337
+
338
+ def commits_in_branch(name)
339
+ @branches ||= client.branches(user_project).map { |branch| [branch[:name], branch] }.to_h
340
+
341
+ if (branch = @branches[name])
342
+ commits_in_tag(branch[:commit][:sha])
343
+ end
344
+ end
345
+
346
+ # Fetch all SHAs occurring in or before a given tag and add them to
347
+ # "shas_in_tag"
348
+ #
349
+ # @param [Array] tags The array of tags.
350
+ # @return [Nil] No return; tags are updated in-place.
351
+ def fetch_tag_shas(tags)
352
+ # Reverse the tags array to gain max benefit from the @commits_in_tag_cache
353
+ tags.reverse_each do |tag|
354
+ tag["shas_in_tag"] = commits_in_tag(tag["commit"]["sha"])
246
355
  end
247
- commits
248
356
  end
249
357
 
250
358
  private
251
359
 
360
+ def commits_in_tag(sha, shas = Set.new)
361
+ # Reduce multiple runs for the same tag
362
+ return @commits_in_tag_cache[sha] if @commits_in_tag_cache.key?(sha)
363
+
364
+ @graph ||= commits.map { |commit| [commit[:sha], commit] }.to_h
365
+ return shas unless (current = @graph[sha])
366
+
367
+ queue = [current]
368
+ while queue.any?
369
+ commit = queue.shift
370
+ # If we've already processed this sha, just grab it's parents from the cache
371
+ if @commits_in_tag_cache.key?(commit[:sha])
372
+ shas.merge(@commits_in_tag_cache[commit[:sha]])
373
+ else
374
+ shas.add(commit[:sha])
375
+ commit[:parents].each do |p|
376
+ queue.push(@graph[p[:sha]]) unless shas.include?(p[:sha])
377
+ end
378
+ end
379
+ end
380
+
381
+ @commits_in_tag_cache[sha] = shas
382
+ shas
383
+ end
384
+
252
385
  def stringify_keys_deep(indata)
253
386
  case indata
254
387
  when Array
@@ -256,14 +389,15 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
256
389
  stringify_keys_deep(value)
257
390
  end
258
391
  when Hash
259
- indata.each_with_object({}) do |(k, v), output|
260
- output[k.to_s] = stringify_keys_deep(v)
392
+ indata.each_with_object({}) do |(key, value), output|
393
+ output[key.to_s] = stringify_keys_deep(value)
261
394
  end
262
395
  else
263
396
  indata
264
397
  end
265
398
  end
266
399
 
400
+ # Exception raised to warn about moved repositories.
267
401
  MovedPermanentlyError = Class.new(RuntimeError)
268
402
 
269
403
  # Iterates through all pages until there are no more :next pages to follow
@@ -274,57 +408,54 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
274
408
  #
275
409
  # @yield [Sawyer::Resource] An OctoKit-provided response (which can be empty)
276
410
  #
277
- # @return [Integer] total number of pages
278
- def iterate_pages(client, method, *args)
279
- request_opts = extract_request_args(args)
280
- args.push(@request_options.merge(request_opts))
281
-
282
- number_of_pages = 1
411
+ # @return [void]
412
+ def iterate_pages(client, method, *arguments, parent: nil, **options)
413
+ options = DEFAULT_REQUEST_OPTIONS.merge(options)
283
414
 
284
- check_github_response { client.send(method, user_project, *args) }
285
- last_response = client.last_response
286
- if last_response.status == 301
287
- raise MovedPermanentlyError, last_response.data[:url]
415
+ check_github_response { client.send(method, user_project, *arguments, **options) }
416
+ last_response = client.last_response.tap do |response|
417
+ raise(MovedPermanentlyError, response.data[:url]) if response.status == 301
288
418
  end
289
419
 
290
420
  yield(last_response.data)
291
421
 
292
- until (next_one = last_response.rels[:next]).nil?
293
- number_of_pages += 1
294
-
295
- last_response = check_github_response { next_one.get }
296
- yield(last_response.data)
297
- end
298
-
299
- number_of_pages
300
- end
301
-
302
- def extract_request_args(args)
303
- if args.size == 1 && args.first.is_a?(Hash)
304
- args.delete_at(0)
305
- elsif args.size > 1 && args.last.is_a?(Hash)
306
- args.delete_at(args.length - 1)
307
- else
308
- {}
422
+ if parent.nil?
423
+ # The snail visits one leaf at a time:
424
+ until (next_one = last_response.rels[:next]).nil?
425
+ last_response = check_github_response { next_one.get }
426
+ yield(last_response.data)
427
+ end
428
+ elsif (last = last_response.rels[:last])
429
+ # OR we bring out the gatling gun:
430
+ parameters = querystring_as_hash(last.href)
431
+ last_page = Integer(parameters["page"])
432
+
433
+ (2..last_page).each do |page|
434
+ parent.async do
435
+ data = check_github_response { client.send(method, user_project, *arguments, page: page, **options) }
436
+ yield data
437
+ end
438
+ end
309
439
  end
310
440
  end
311
441
 
312
442
  # This is wrapper with rescue block
313
443
  #
314
444
  # @return [Object] returns exactly the same, what you put in the block, but wrap it with begin-rescue block
315
- def check_github_response
316
- Retriable.retriable(retry_options) do
317
- yield
318
- end
445
+ def check_github_response(&block)
446
+ Retriable.retriable(retry_options, &block)
319
447
  rescue MovedPermanentlyError => e
320
- Helper.log.error("#{e.class}: #{e.message}")
321
- sys_abort("The repository has moved, please update your configuration")
448
+ fail_with_message(e, "The repository has moved, update your configuration")
322
449
  rescue Octokit::Forbidden => e
323
- Helper.log.error("#{e.class}: #{e.message}")
324
- sys_abort("Exceeded retry limit")
450
+ fail_with_message(e, "Exceeded retry limit")
325
451
  rescue Octokit::Unauthorized => e
326
- Helper.log.error("#{e.class}: #{e.message}")
327
- sys_abort("Error: wrong GitHub token")
452
+ fail_with_message(e, "Error: wrong GitHub token")
453
+ end
454
+
455
+ # Presents the exception, and the aborts with the message.
456
+ def fail_with_message(error, message)
457
+ Helper.log.error("#{error.class}: #{error.message}")
458
+ sys_abort(message)
328
459
  end
329
460
 
330
461
  # Exponential backoff
@@ -348,7 +479,7 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
348
479
  Helper.log.warn("RETRY - #{exception.class}: '#{exception.message}'")
349
480
  Helper.log.warn("#{try} tries in #{elapsed_time} seconds and #{next_interval} seconds until the next try")
350
481
  Helper.log.warn GH_RATE_LIMIT_EXCEEDED_MSG
351
- Helper.log.warn @client.rate_limit
482
+ Helper.log.warn(client.rate_limit)
352
483
  end
353
484
  end
354
485
 
@@ -360,7 +491,7 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
360
491
  #
361
492
  # @param [String] log_string
362
493
  def print_in_same_line(log_string)
363
- print log_string + "\r"
494
+ print "#{log_string}\r"
364
495
  end
365
496
 
366
497
  # Print long line with spaces on same line to clear prev message
@@ -373,7 +504,7 @@ Make sure, that you push tags to remote repo via 'git push --tags'"
373
504
  #
374
505
  # @return [String]
375
506
  def fetch_github_token
376
- env_var = @options[:token] ? @options[:token] : (ENV.fetch CHANGELOG_GITHUB_TOKEN, nil)
507
+ env_var = @options[:token].presence || ENV["CHANGELOG_GITHUB_TOKEN"]
377
508
 
378
509
  Helper.log.warn NO_TOKEN_PROVIDED unless env_var
379
510