gitolemy 0.0.4
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/bin/conglomerate.rb +145 -0
- data/bin/serve.rb +59 -0
- data/lib/cache.rb +92 -0
- data/lib/commit.rb +139 -0
- data/lib/commit_stats.rb +225 -0
- data/lib/diff.rb +65 -0
- data/lib/file_diff.rb +98 -0
- data/lib/file_helper.rb +58 -0
- data/lib/file_manager.rb +116 -0
- data/lib/function_trace/c_syntax_tracer.rb +111 -0
- data/lib/function_trace/python_tracer.rb +103 -0
- data/lib/function_trace/ruby_tracer.rb +64 -0
- data/lib/function_trace/tracer.rb +44 -0
- data/lib/gitolemy.rb +1 -0
- data/lib/integrations/airbrake_client.rb +134 -0
- data/lib/integrations/code_climate_client.rb +79 -0
- data/lib/integrations/covhura_client.rb +38 -0
- data/lib/integrations/error_client.rb +55 -0
- data/lib/integrations/git_client.rb +183 -0
- data/lib/integrations/jira_client.rb +145 -0
- data/lib/integrations/rollbar_client.rb +147 -0
- data/lib/line.rb +124 -0
- data/lib/line_tracker.rb +90 -0
- data/lib/loggr.rb +24 -0
- data/lib/notifier.rb +20 -0
- data/lib/project_cache.rb +13 -0
- data/lib/risk_analyzer.rb +53 -0
- data/lib/secure_file_store.rb +61 -0
- data/lib/source_tree.rb +23 -0
- data/lib/stack_tracer.rb +197 -0
- data/lib/store.rb +96 -0
- data/lib/util.rb +10 -0
- data/lib/virtual_file.rb +218 -0
- data/lib/virtual_file_system.rb +78 -0
- data/lib/virtual_function.rb +38 -0
- data/lib/virtual_tree.rb +233 -0
- metadata +223 -0
@@ -0,0 +1,55 @@
|
|
1
|
+
require "active_support/core_ext/object"
|
2
|
+
|
3
|
+
require_relative "../cache"
|
4
|
+
|
5
|
+
class ErrorClient
|
6
|
+
ERROR_CACHE_KEY = "errors"
|
7
|
+
DEPLOY_CACHE_KEY = "deploys"
|
8
|
+
|
9
|
+
# TODO: Shared
|
10
|
+
def get_errors!(commit)
|
11
|
+
deploy = commit_deploy(commit)
|
12
|
+
return [] if deploy.nil?
|
13
|
+
|
14
|
+
deploy_index = @deploys.index(deploy) || 0
|
15
|
+
@deploys = @deploys[(deploy_index + 1)..-1] || []
|
16
|
+
if @deploys.first.nil?
|
17
|
+
errors = @errors
|
18
|
+
@errors = []
|
19
|
+
else
|
20
|
+
errors = []
|
21
|
+
while @errors.any? && @errors.first[:last_time] < @deploys.first[:timestamp] do
|
22
|
+
errors << @errors.shift()
|
23
|
+
end
|
24
|
+
end
|
25
|
+
errors
|
26
|
+
end
|
27
|
+
|
28
|
+
# TODO:
|
29
|
+
# 1: sync to last deploy?
|
30
|
+
# 2: Shared
|
31
|
+
def sync!(commit)
|
32
|
+
while @errors.any? && @errors.first[:last_time] < commit.date do
|
33
|
+
@errors.shift()
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
private
|
38
|
+
|
39
|
+
def commit_deploy(commit)
|
40
|
+
@deploys.detect { |deploy| deploy[:commit_id] == commit.commit_id }
|
41
|
+
end
|
42
|
+
|
43
|
+
def load_from_cache()
|
44
|
+
Cache
|
45
|
+
.read(ERROR_CACHE_KEY, {})
|
46
|
+
.map do |error_id, error|
|
47
|
+
error = error.deep_symbolize_keys
|
48
|
+
error[:error_id] = error[:error_id].to_sym
|
49
|
+
error[:first_time] = DateTime.rfc3339(error[:first_time])
|
50
|
+
error[:last_time] = DateTime.rfc3339(error[:last_time])
|
51
|
+
error
|
52
|
+
end
|
53
|
+
.sort_by { |error| error[:last_time] }
|
54
|
+
end
|
55
|
+
end
|
@@ -0,0 +1,183 @@
|
|
1
|
+
require "active_support/core_ext/hash"
|
2
|
+
|
3
|
+
require_relative "../source_tree"
|
4
|
+
require_relative "../cache"
|
5
|
+
require_relative "../loggr"
|
6
|
+
require_relative "../store"
|
7
|
+
require_relative "../util"
|
8
|
+
require_relative "../commit"
|
9
|
+
|
10
|
+
class GitClient
|
11
|
+
COMMIT_REGEX = /^[0-9a-f]{40}\|\|\|/
|
12
|
+
TREE_PERMISSION = "040000"
|
13
|
+
|
14
|
+
attr_accessor :notification_url
|
15
|
+
|
16
|
+
def initialize(config)
|
17
|
+
@git_dir = config["git_dir"]
|
18
|
+
@default_branch = config["default_branch"]
|
19
|
+
@notification_url = config["notification_url"]
|
20
|
+
end
|
21
|
+
|
22
|
+
def commits(branch)
|
23
|
+
commit_ids = short_log(branch)
|
24
|
+
cache, index = commits_to_index(branch, commit_ids)
|
25
|
+
return [] if index == 0
|
26
|
+
|
27
|
+
commits = full_log(branch, index)
|
28
|
+
.reduce([], &fold_reducer(COMMIT_REGEX))
|
29
|
+
.map { |commit_lines| Commit.from_git(commit_lines, self) }
|
30
|
+
|
31
|
+
commits = SourceTree
|
32
|
+
.new()
|
33
|
+
.merge_collapse(commits)
|
34
|
+
|
35
|
+
commits.last.set_cached(cache)
|
36
|
+
cache_commits(commits)
|
37
|
+
|
38
|
+
commits.reverse()
|
39
|
+
end
|
40
|
+
|
41
|
+
def diff_tree(commit_id)
|
42
|
+
exec("diff-tree -t -r #{commit_id}")
|
43
|
+
end
|
44
|
+
|
45
|
+
def parse_diff_tree(tree)
|
46
|
+
tree
|
47
|
+
.reduce([]) do |acc, object|
|
48
|
+
a_perm, b_perm, a_id, b_id, operation, path = object[1..-1].split(" ")
|
49
|
+
if a_perm == TREE_PERMISSION || b_perm == TREE_PERMISSION
|
50
|
+
acc << {
|
51
|
+
a_tree_id: a_id,
|
52
|
+
b_tree_id: b_id,
|
53
|
+
path: path,
|
54
|
+
operation: operation == "D" ? :delete : :change
|
55
|
+
}
|
56
|
+
end
|
57
|
+
acc
|
58
|
+
end
|
59
|
+
.sort_by { |tree| tree[:path].split(File::SEPARATOR).length }
|
60
|
+
end
|
61
|
+
|
62
|
+
def ls_tree(commit_id)
|
63
|
+
exec("ls-tree -r -t #{commit_id}")
|
64
|
+
end
|
65
|
+
|
66
|
+
def file_tree(commit_id)
|
67
|
+
exec("ls-tree -r #{commit_id}")
|
68
|
+
end
|
69
|
+
|
70
|
+
def parse_ls_tree(tree)
|
71
|
+
tree.reduce([]) do |acc, object|
|
72
|
+
perm, type, tree_id, path = object.split(" ")
|
73
|
+
if type == "tree"
|
74
|
+
acc << {
|
75
|
+
a_tree_id: "0"*40,
|
76
|
+
b_tree_id: tree_id,
|
77
|
+
path: path,
|
78
|
+
operation: :change
|
79
|
+
}
|
80
|
+
end
|
81
|
+
acc
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
def diff(commit_id_a, commit_id_b)
|
86
|
+
exec("diff --full-index -l 10000 -U0 #{commit_id_a} #{commit_id_b}")
|
87
|
+
end
|
88
|
+
|
89
|
+
def remote_branches()
|
90
|
+
exec("branch -r")
|
91
|
+
.map { |branch| branch.split(" -> ").last.strip() }
|
92
|
+
.select { |branch| branch.index("origin/") == 0 }
|
93
|
+
.map { |branch| "remotes/#{branch}" }
|
94
|
+
end
|
95
|
+
|
96
|
+
private
|
97
|
+
|
98
|
+
def exec(cmd)
|
99
|
+
`git --git-dir=#{@git_dir || ".git"} #{cmd}`
|
100
|
+
.encode("UTF-8", {invalid: :replace})
|
101
|
+
.lines
|
102
|
+
.map(&:chomp)
|
103
|
+
end
|
104
|
+
|
105
|
+
def full_log(branch, count)
|
106
|
+
Loggr.instance.info("SHOW COMMITS")
|
107
|
+
count_option = count.nil? ? "" : "-n #{count}"
|
108
|
+
exec("log \
|
109
|
+
--topo-order \
|
110
|
+
--oneline \
|
111
|
+
-U0 \
|
112
|
+
--diff-algorithm=histogram \
|
113
|
+
--full-history \
|
114
|
+
--full-index \
|
115
|
+
--date=rfc \
|
116
|
+
--pretty=format:'%H|||%P|||%an <%aE>|||%cd|||%s' \
|
117
|
+
#{count_option} #{branch}"
|
118
|
+
)
|
119
|
+
end
|
120
|
+
|
121
|
+
def short_log(branch)
|
122
|
+
exec("log \
|
123
|
+
--graph \
|
124
|
+
--topo-order \
|
125
|
+
--oneline \
|
126
|
+
--pretty=format:%H #{branch} \
|
127
|
+
| grep '^\*'")
|
128
|
+
.map { |line| line.split(" ").last.to_sym }
|
129
|
+
end
|
130
|
+
|
131
|
+
def tree(commit_id)
|
132
|
+
exec("ls-tree -r -t #{commit_id}")
|
133
|
+
end
|
134
|
+
|
135
|
+
def parse_tree(tree)
|
136
|
+
tree
|
137
|
+
.map do |object|
|
138
|
+
permission, type, object_id, path = object.split(" ")
|
139
|
+
{
|
140
|
+
object_id: object_id,
|
141
|
+
type: type == "tree" ? :tree : :file,
|
142
|
+
path: path.strip()
|
143
|
+
}
|
144
|
+
end
|
145
|
+
.sort_by { |object| object[:path].split(File::SEPARATOR).length }
|
146
|
+
end
|
147
|
+
|
148
|
+
def commits_to_index(branch, commit_ids)
|
149
|
+
last_indexed_commit = Cache.last_indexed_commit(branch, commit_ids)
|
150
|
+
|
151
|
+
# Maybe use default_branch, also git merge-base.
|
152
|
+
if last_indexed_commit == nil && branch != "master"
|
153
|
+
last_indexed_commit = Cache.last_indexed_commit("master", commit_ids)
|
154
|
+
end
|
155
|
+
|
156
|
+
index = commit_ids.index(last_indexed_commit)
|
157
|
+
|
158
|
+
if sync?(index, commit_ids)
|
159
|
+
index = 1
|
160
|
+
last_indexed_commit = commit_ids[index]
|
161
|
+
end
|
162
|
+
|
163
|
+
# When switching to new branch, save all previously indexed commits.
|
164
|
+
if not index.nil?
|
165
|
+
Cache.index_commits(branch, commit_ids[index..-1].reverse())
|
166
|
+
end
|
167
|
+
|
168
|
+
cache = index.nil? ? [] : parse_tree(tree(last_indexed_commit))
|
169
|
+
|
170
|
+
[cache, index]
|
171
|
+
end
|
172
|
+
|
173
|
+
def cache_commits(commits)
|
174
|
+
commits.each { |commit| Store::Commit.index(commit.as_json()) }
|
175
|
+
Store::Commit.cache()
|
176
|
+
end
|
177
|
+
|
178
|
+
def sync?(index, commit_ids)
|
179
|
+
index == 0 &&
|
180
|
+
commit_ids.length > 1 &&
|
181
|
+
ENV["GITOLEMY_SYNC"] == "true"
|
182
|
+
end
|
183
|
+
end
|
@@ -0,0 +1,145 @@
|
|
1
|
+
require "json"
|
2
|
+
require "net/http"
|
3
|
+
require "active_support/core_ext/object"
|
4
|
+
|
5
|
+
require_relative "../loggr"
|
6
|
+
require_relative "../cache"
|
7
|
+
require_relative "../store"
|
8
|
+
|
9
|
+
class JiraClient
|
10
|
+
ISSUES_CACHE_KEY = "issues"
|
11
|
+
STORY_TYPE = "Story"
|
12
|
+
BUG_TYPE = "Bug"
|
13
|
+
|
14
|
+
def initialize(config)
|
15
|
+
@client_config = config["client_config"]
|
16
|
+
@issue_id_rgx = issue_id_rgx
|
17
|
+
@cached_issues = load_cached_issues()
|
18
|
+
@business_value_custom_field = "customfield_#{config["business_value_field_id"]}"
|
19
|
+
end
|
20
|
+
|
21
|
+
# TODO: Store Issues as they're fetched.
|
22
|
+
def merge_and_fetch_issues!(commits)
|
23
|
+
issue_ids = fetch_issues(merge_issue_ids!(commits))
|
24
|
+
.reject { |issue_id, issue| issue.empty? }
|
25
|
+
.map { |issue_id, issue| Store::Issue::index(issue) }
|
26
|
+
|
27
|
+
Store::Issue::cache(@cached_issues)
|
28
|
+
|
29
|
+
issue_ids
|
30
|
+
end
|
31
|
+
|
32
|
+
def merge_and_fetch_bugs!(commits)
|
33
|
+
commits
|
34
|
+
.reduce([]) do |acc, commit|
|
35
|
+
issue_id = parse_id(commit.subject).to_sym
|
36
|
+
issue = issue_id.present? ? find_by_id(issue_id) : nil
|
37
|
+
if issue.present? && issue[:issue_type] == "Bug"
|
38
|
+
bug = {}.merge(issue)
|
39
|
+
bug[:bug_id] = bug.delete(:issue_id)
|
40
|
+
commit.bug_id = bug[:bug_id]
|
41
|
+
acc << bug
|
42
|
+
end
|
43
|
+
acc
|
44
|
+
end
|
45
|
+
.map { |bug| Store::Bug::index(bug) }
|
46
|
+
end
|
47
|
+
|
48
|
+
private
|
49
|
+
|
50
|
+
def client(jira_config)
|
51
|
+
jira_config["auth_type"] = jira_config["auth_type"].to_sym
|
52
|
+
jira_config = Hash[jira_config.map{ |key, value| [key.to_sym, value] }]
|
53
|
+
JIRA::Client.new(jira_config)
|
54
|
+
end
|
55
|
+
|
56
|
+
# TODO: support many fix versions...
|
57
|
+
def find_by_id(id)
|
58
|
+
return @cached_issues[id] if @cached_issues.has_key?(id)
|
59
|
+
|
60
|
+
Loggr.instance.info("FETCHING ISSUE: #{id}")
|
61
|
+
issue = fetch_issue(id)
|
62
|
+
{
|
63
|
+
issue_id: id,
|
64
|
+
status: issue.dig("fields", "status", "name"),
|
65
|
+
assignee: extract_person(issue.dig("fields", "assignee")),
|
66
|
+
reporter: extract_person(issue.dig("fields", "reporter")),
|
67
|
+
issue_type: issue.dig("fields", "issuetype", "name"),
|
68
|
+
business_value: issue.dig("fields", @business_value_custom_field) || 0.0,
|
69
|
+
priority: issue.dig("fields", "priority", "name"),
|
70
|
+
fix_versions: issue.dig("fields", "fixVersions").map { |fix| fix["name"] }.first,
|
71
|
+
link: "#{@client_config["site"]}/browse/#{id}",
|
72
|
+
summary: issue.dig("fields", "summary")
|
73
|
+
}
|
74
|
+
rescue
|
75
|
+
{}
|
76
|
+
end
|
77
|
+
|
78
|
+
def parse_id(commit_subject)
|
79
|
+
@issue_id_rgx
|
80
|
+
.match(commit_subject)
|
81
|
+
.try(:[], 0) || ""
|
82
|
+
end
|
83
|
+
|
84
|
+
def fetch_issues(ids)
|
85
|
+
ids.reduce({}) do |obj, id|
|
86
|
+
obj[id] = find_by_id(id)
|
87
|
+
obj
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
def merge_issue_ids!(commits)
|
92
|
+
commits
|
93
|
+
.map do |commit|
|
94
|
+
commit.issue_id = parse_id(commit.subject).to_sym
|
95
|
+
commit.issue_id
|
96
|
+
end
|
97
|
+
.reject { |id| id.empty? }
|
98
|
+
.uniq
|
99
|
+
end
|
100
|
+
|
101
|
+
def load_cached_issues()
|
102
|
+
Cache.read(ISSUES_CACHE_KEY, {})
|
103
|
+
.reduce({}) do |acc, (issue_id, issue)|
|
104
|
+
issue = issue.symbolize_keys
|
105
|
+
issue[:issue_id] = issue_id.to_sym
|
106
|
+
acc[issue_id.to_sym] = issue
|
107
|
+
acc
|
108
|
+
end
|
109
|
+
end
|
110
|
+
|
111
|
+
# TODO: Cache projects
|
112
|
+
def issue_id_rgx()
|
113
|
+
Regexp.new("(#{project_keys.join("|")})-\\d+", Regexp::IGNORECASE)
|
114
|
+
end
|
115
|
+
|
116
|
+
def fetch_issue(issue_id)
|
117
|
+
request("/issue/#{issue_id}")
|
118
|
+
end
|
119
|
+
|
120
|
+
def extract_person(data)
|
121
|
+
return {} if data.nil?
|
122
|
+
{
|
123
|
+
email: data["emailAddress"],
|
124
|
+
name: data["displayName"]
|
125
|
+
}
|
126
|
+
end
|
127
|
+
|
128
|
+
def fetch_projects()
|
129
|
+
request("/project")
|
130
|
+
end
|
131
|
+
|
132
|
+
def request(uri)
|
133
|
+
uri = URI("#{@client_config["site"]}/rest/api/2#{uri}")
|
134
|
+
req = Net::HTTP::Get.new(uri)
|
135
|
+
req.basic_auth(@client_config["username"], @client_config["password"])
|
136
|
+
http = Net::HTTP.new(uri.hostname, uri.port)
|
137
|
+
http.use_ssl = true
|
138
|
+
JSON.parse(http.request(req).body)
|
139
|
+
end
|
140
|
+
|
141
|
+
def project_keys()
|
142
|
+
fetch_projects()
|
143
|
+
.map { |project| project["key"] }
|
144
|
+
end
|
145
|
+
end
|
@@ -0,0 +1,147 @@
|
|
1
|
+
require "json"
|
2
|
+
require "date"
|
3
|
+
require "net/http"
|
4
|
+
|
5
|
+
require_relative "error_client"
|
6
|
+
require_relative "../store"
|
7
|
+
require_relative "../loggr"
|
8
|
+
|
9
|
+
class RollbarClient < ErrorClient
|
10
|
+
|
11
|
+
def initialize(config)
|
12
|
+
@user = config["username"]
|
13
|
+
@key = config["api_key"]
|
14
|
+
@project = config["project"]
|
15
|
+
@environment = config["environment"]
|
16
|
+
@errors = @key.nil? ? [] : errors()
|
17
|
+
@deploys = @key.nil? ? [] : deploys()
|
18
|
+
end
|
19
|
+
|
20
|
+
private
|
21
|
+
|
22
|
+
def deploys()
|
23
|
+
fetch_deploys()
|
24
|
+
.map do |deploy|
|
25
|
+
{
|
26
|
+
commit_id: deploy["revision"].to_sym,
|
27
|
+
environment: deploy["environment"],
|
28
|
+
timestamp: Time.at(deploy["start_time"]).to_datetime
|
29
|
+
}
|
30
|
+
end
|
31
|
+
.sort_by { |deploy| deploy[:timestamp] }
|
32
|
+
.select { |deploy| deploy[:environment] == @environment }
|
33
|
+
end
|
34
|
+
|
35
|
+
# TODO: Cached errors are already applied, maybe skip...
|
36
|
+
def errors()
|
37
|
+
cached_errors = load_from_cache()
|
38
|
+
|
39
|
+
errors = fetch_errors(cached_errors)["items"]
|
40
|
+
.flat_map(&method(:select_traces))
|
41
|
+
.reject(&:nil?)
|
42
|
+
.map do |trace|
|
43
|
+
stack_trace = trace["frames"].map do |frame|
|
44
|
+
file = frame["filename"]
|
45
|
+
line = frame["lineno"]
|
46
|
+
function = frame["method"]
|
47
|
+
|
48
|
+
{
|
49
|
+
file: file,
|
50
|
+
line: line,
|
51
|
+
function: function
|
52
|
+
}
|
53
|
+
end
|
54
|
+
|
55
|
+
{
|
56
|
+
error_id: trace[:id],
|
57
|
+
first_time: trace[:first_time],
|
58
|
+
last_time: trace[:last_time],
|
59
|
+
environment: trace[:environment],
|
60
|
+
type: trace[:type],
|
61
|
+
message: trace[:message],
|
62
|
+
link: "https://rollbar.com/#{@user}/#{@project}/items/#{trace[:counter]}/",
|
63
|
+
total_occurrences: trace[:total_occurrences],
|
64
|
+
stack_trace: stack_trace.reverse()
|
65
|
+
}
|
66
|
+
end
|
67
|
+
.concat(cached_errors)
|
68
|
+
.select { |error| error[:environment] == @environment }
|
69
|
+
.sort_by { |error| error[:last_time] }
|
70
|
+
|
71
|
+
errors.each { |error| Store::Error::index(error) }
|
72
|
+
Store::Error::cache()
|
73
|
+
|
74
|
+
errors
|
75
|
+
end
|
76
|
+
|
77
|
+
def select_traces(item)
|
78
|
+
error = detail_error(item["id"])["result"]["instances"].first
|
79
|
+
traces = error["data"]["body"]["trace"].present? ?
|
80
|
+
[error["data"]["body"]["trace"]] :
|
81
|
+
error["data"]["body"]["trace_chain"]
|
82
|
+
|
83
|
+
return nil if traces.nil?
|
84
|
+
|
85
|
+
traces.map do |trace|
|
86
|
+
trace[:id] = item["id"].to_s.to_sym
|
87
|
+
trace[:environment] = item["environment"]
|
88
|
+
trace[:total_occurrences] = item["total_occurrences"]
|
89
|
+
trace[:error] = error
|
90
|
+
trace[:type] = item["level"]
|
91
|
+
trace[:message] = item["title"]
|
92
|
+
trace[:first_time] = Time.at(item["first_occurrence_timestamp"]).to_datetime
|
93
|
+
trace[:last_time] = Time.at(item["last_occurrence_timestamp"]).to_datetime
|
94
|
+
trace[:counter] = item["counter"]
|
95
|
+
trace
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
def fetch_deploys(page=1)
|
100
|
+
deploys = []
|
101
|
+
while true do
|
102
|
+
resp = JSON.parse(Net::HTTP.get(
|
103
|
+
URI("https://api.rollbar.com/api/1/deploys/?access_token=#{@key}&page=#{page}")
|
104
|
+
))["result"]["deploys"]
|
105
|
+
|
106
|
+
break if resp.count == 0
|
107
|
+
|
108
|
+
deploys.concat(resp)
|
109
|
+
page += 1
|
110
|
+
end
|
111
|
+
Cache.write(DEPLOY_CACHE_KEY, deploys)
|
112
|
+
deploys
|
113
|
+
rescue
|
114
|
+
deploys
|
115
|
+
end
|
116
|
+
|
117
|
+
def detail_error(item_id)
|
118
|
+
Loggr.instance.info("FETCHING ERROR: #{item_id}")
|
119
|
+
JSON.parse(Net::HTTP.get(
|
120
|
+
URI("https://api.rollbar.com/api/1/item/#{item_id}/instances/?access_token=#{@key}")
|
121
|
+
))
|
122
|
+
end
|
123
|
+
|
124
|
+
def fetch_errors(cached_errors, page=1)
|
125
|
+
errors = {"items" => [], "count" => 1}
|
126
|
+
while errors["items"].length < errors["count"] do
|
127
|
+
resp = JSON.parse(Net::HTTP.get(
|
128
|
+
URI("https://api.rollbar.com/api/1/items/?access_token=#{@key}&page=#{page}")
|
129
|
+
))
|
130
|
+
|
131
|
+
errors["count"] = resp["total_count"]
|
132
|
+
|
133
|
+
while item = resp["result"]["items"].shift() do
|
134
|
+
if cached_errors.detect { |cached_item| cached_item[:error_id] == item["id"].to_s.to_sym }
|
135
|
+
return errors
|
136
|
+
else
|
137
|
+
errors["items"] << item
|
138
|
+
end
|
139
|
+
end
|
140
|
+
|
141
|
+
page += 1
|
142
|
+
end
|
143
|
+
errors
|
144
|
+
rescue
|
145
|
+
errors
|
146
|
+
end
|
147
|
+
end
|