jira_cache 0.2.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.codeclimate.yml +10 -0
- data/.env.example +10 -0
- data/.env.test +10 -0
- data/.gitignore +15 -0
- data/.rspec +2 -0
- data/.rubocop.yml +12 -0
- data/.ruby-gemset +1 -0
- data/.ruby-version +1 -0
- data/.travis.yml +15 -0
- data/CODE_OF_CONDUCT.md +13 -0
- data/Gemfile +21 -0
- data/Guardfile +1 -0
- data/HISTORY.md +22 -0
- data/LICENSE.txt +22 -0
- data/README.md +73 -0
- data/Rakefile +7 -0
- data/VERSION +1 -0
- data/bin/console +11 -0
- data/bin/db/migrate +7 -0
- data/bin/db/psql +7 -0
- data/bin/db/reset +7 -0
- data/bin/setup +7 -0
- data/bin/sync +14 -0
- data/config.ru +7 -0
- data/config/Guardfile +35 -0
- data/config/boot.rb +11 -0
- data/config/db_migrations/001_create_issues.rb +21 -0
- data/docker-compose.yml +25 -0
- data/jira_cache.gemspec +41 -0
- data/lib/jira_cache.rb +53 -0
- data/lib/jira_cache/client.rb +185 -0
- data/lib/jira_cache/data.rb +10 -0
- data/lib/jira_cache/data/issue_repository.rb +94 -0
- data/lib/jira_cache/notifier.rb +30 -0
- data/lib/jira_cache/sync.rb +110 -0
- data/lib/jira_cache/version.rb +3 -0
- data/lib/jira_cache/webhook_app.rb +55 -0
- data/spec/fixtures/responses/get_issue_keys_jql_query_project=/"multiple_requests/"_start_at_0.json +1 -0
- data/spec/fixtures/responses/get_issue_keys_jql_query_project=/"multiple_requests/"_start_at_10.json +1 -0
- data/spec/fixtures/responses/get_issue_keys_jql_query_project=/"multiple_requests/"_start_at_5.json +1 -0
- data/spec/fixtures/responses/get_issue_keys_jql_query_project=/"single_request/"_start_at_0.json +1 -0
- data/spec/fixtures/responses/get_issue_many_worklogs.json +1 -0
- data/spec/fixtures/responses/get_issue_not_found.json +1 -0
- data/spec/fixtures/responses/get_issue_simple.json +1 -0
- data/spec/fixtures/responses/get_issue_worklog_many_worklogs.json +1 -0
- data/spec/spec_helper.rb +47 -0
- data/spec/support/response_fixture.rb +16 -0
- data/spec/unit/client_spec.rb +130 -0
- data/spec/unit/data/issue_repository_spec.rb +58 -0
- data/spec/unit/notifier_spec.rb +18 -0
- data/spec/unit/sync_spec.rb +116 -0
- data/spec/unit/webhook_app_spec.rb +96 -0
- metadata +280 -0
@@ -0,0 +1,55 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require "sinatra/base"
|
3
|
+
require "json"
|
4
|
+
require "jira_cache/sync"
|
5
|
+
|
6
|
+
module JiraCache
|
7
|
+
|
8
|
+
# A Sinatra::Base application to process JIRA webhooks.
|
9
|
+
#
|
10
|
+
# Defines 2 routes:
|
11
|
+
# - GET /: provides a basic JSON status,
|
12
|
+
# - POST /: which processes a webhook.
|
13
|
+
class WebhookApp < Sinatra::Base
|
14
|
+
|
15
|
+
# GET /
|
16
|
+
# Returns JSON with the app name and a status
|
17
|
+
get "/" do
|
18
|
+
default_response
|
19
|
+
end
|
20
|
+
|
21
|
+
# POST /
|
22
|
+
# Endpoint for JIRA webhook
|
23
|
+
post "/" do
|
24
|
+
client = self.class.client
|
25
|
+
request.body.rewind # in case it was already read
|
26
|
+
data = JSON.parse(request.body.read)
|
27
|
+
issue_key = data["issue"]["key"]
|
28
|
+
|
29
|
+
case (webhook_event = data["webhookEvent"])
|
30
|
+
when "jira:issue_created", "jira:issue_updated", "jira:worklog_updated"
|
31
|
+
JiraCache::Sync.new(client).sync_issue(issue_key)
|
32
|
+
when "jira:issue_deleted"
|
33
|
+
JiraCache::Sync.new.mark_deleted([issue_key])
|
34
|
+
else
|
35
|
+
raise "Unknown webhook event \"#{webhook_event}\""
|
36
|
+
end
|
37
|
+
|
38
|
+
default_response
|
39
|
+
end
|
40
|
+
|
41
|
+
# Returns the client (`JiraCache::Client`) defined on
|
42
|
+
# the class (see `JiraCache.webhook_app(...)`).
|
43
|
+
def client
|
44
|
+
self.class.client
|
45
|
+
end
|
46
|
+
|
47
|
+
def default_response
|
48
|
+
{
|
49
|
+
app: "jira_cache/webhook_app",
|
50
|
+
status: "ok",
|
51
|
+
client: client.info
|
52
|
+
}.to_json
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
data/spec/fixtures/responses/get_issue_keys_jql_query_project=/"multiple_requests/"_start_at_0.json
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
{"expand":"schema,names","startAt":0,"maxResults":5,"total":11,"issues":[{"expand":"operations,versionedRepresentations,editmeta,changelog,transitions,renderedFields","id":"001","self":"https://example.com/rest/api/2/issue/001","key":"MR-001"},{"expand":"operations,versionedRepresentations,editmeta,changelog,transitions,renderedFields","id":"002","self":"https://example.com/rest/api/2/issue/002","key":"MR-002"},{"expand":"operations,versionedRepresentations,editmeta,changelog,transitions,renderedFields","id":"003","self":"https://example.com/rest/api/2/issue/003","key":"MR-003"},{"expand":"operations,versionedRepresentations,editmeta,changelog,transitions,renderedFields","id":"004","self":"https://example.com/rest/api/2/issue/004","key":"MR-004"},{"expand":"operations,versionedRepresentations,editmeta,changelog,transitions,renderedFields","id":"005","self":"https://example.com/rest/api/2/issue/005","key":"MR-005"}]}
|
data/spec/fixtures/responses/get_issue_keys_jql_query_project=/"multiple_requests/"_start_at_10.json
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
{"expand":"schema,names","startAt":10,"maxResults":5,"total":11,"issues":[{"expand":"operations,versionedRepresentations,editmeta,changelog,transitions,renderedFields","id":"011","self":"https://example.com/rest/api/2/issue/011","key":"MR-011"}]}
|
data/spec/fixtures/responses/get_issue_keys_jql_query_project=/"multiple_requests/"_start_at_5.json
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
{"expand":"schema,names","startAt":5,"maxResults":5,"total":11,"issues":[{"expand":"operations,versionedRepresentations,editmeta,changelog,transitions,renderedFields","id":"006","self":"https://example.com/rest/api/2/issue/006","key":"MR-006"},{"expand":"operations,versionedRepresentations,editmeta,changelog,transitions,renderedFields","id":"007","self":"https://example.com/rest/api/2/issue/007","key":"MR-007"},{"expand":"operations,versionedRepresentations,editmeta,changelog,transitions,renderedFields","id":"008","self":"https://example.com/rest/api/2/issue/008","key":"MR-008"},{"expand":"operations,versionedRepresentations,editmeta,changelog,transitions,renderedFields","id":"009","self":"https://example.com/rest/api/2/issue/009","key":"MR-009"},{"expand":"operations,versionedRepresentations,editmeta,changelog,transitions,renderedFields","id":"010","self":"https://example.com/rest/api/2/issue/010","key":"MR-010"}]}
|
data/spec/fixtures/responses/get_issue_keys_jql_query_project=/"single_request/"_start_at_0.json
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
{"expand":"schema,names","startAt":0,"maxResults":5,"total":2,"issues":[{"expand":"operations,versionedrepresentations,editmeta,changelog,transitions,renderedfields","id":"001","self":"https://example.com/rest/api/2/issue/001","key":"MR-001"},{"expand":"operations,versionedRepresentations,editmeta,changelog,transitions,renderedFields","id":"002","self":"https://example.com/rest/api/2/issue/002","key":"MR-002"}]}
|
@@ -0,0 +1 @@
|
|
1
|
+
{"fields":{"worklog":{"total":"3","maxResults":"2","worklogs":["fakeWorklog1","fakeWorklog2"]}}}
|
@@ -0,0 +1 @@
|
|
1
|
+
{"errorMessages":["Issue Does Not Exist"],"errors":{}}
|
@@ -0,0 +1 @@
|
|
1
|
+
{"fields":{"worklog":{"total":"1","maxResults":"2"}}}
|
@@ -0,0 +1 @@
|
|
1
|
+
{"total":"3","maxResults":"3","worklogs":["fakeWorklog1","fakeWorklog2","fakeWorklog3"]}
|
data/spec/spec_helper.rb
ADDED
@@ -0,0 +1,47 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
# Load dependencies
|
3
|
+
require "rubygems"
|
4
|
+
require "bundler/setup"
|
5
|
+
require "rspec"
|
6
|
+
require "webmock/rspec"
|
7
|
+
require "rack/test"
|
8
|
+
require "pry"
|
9
|
+
|
10
|
+
require "simplecov"
|
11
|
+
SimpleCov.start do
|
12
|
+
add_filter do |src|
|
13
|
+
# Ignoring files from the spec directory
|
14
|
+
src.filename =~ %r{/spec/}
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
# ENV["APP_ENV"] replaces "RACK_ENV" since we're not in
|
19
|
+
# a Rack context.
|
20
|
+
ENV["APP_ENV"] = "test"
|
21
|
+
require File.expand_path("../../config/boot", __FILE__)
|
22
|
+
|
23
|
+
Dir[File.expand_path("../support/**/*.rb", __FILE__)].each { |f| require(f) }
|
24
|
+
|
25
|
+
# Database setup, teardown and cleanup during tests
|
26
|
+
require "sequel/extensions/migration"
|
27
|
+
require "jira_cache/data"
|
28
|
+
require "jira_cache/data/issue_repository"
|
29
|
+
client = JiraCache::Data::DB
|
30
|
+
|
31
|
+
MIGRATIONS_DIR = File.expand_path("../../config/db_migrations", __FILE__)
|
32
|
+
RSpec.configure do |config|
|
33
|
+
|
34
|
+
config.before(:all) do
|
35
|
+
Sequel::Migrator.apply(client, MIGRATIONS_DIR)
|
36
|
+
end
|
37
|
+
|
38
|
+
config.after(:each) do
|
39
|
+
JiraCache::Data::IssueRepository.delete_where("TRUE")
|
40
|
+
end
|
41
|
+
|
42
|
+
config.after(:all) do
|
43
|
+
Sequel::Migrator.apply(client, MIGRATIONS_DIR, 0)
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
require "jira_cache"
|
@@ -0,0 +1,16 @@
|
|
1
|
+
# Saves and loads response to be used as fixtures for HTTP request
|
2
|
+
# responses.
|
3
|
+
class ResponseFixture
|
4
|
+
|
5
|
+
def self.create(identifier, payload)
|
6
|
+
File.write(file(identifier), payload.to_json)
|
7
|
+
end
|
8
|
+
|
9
|
+
def self.get(identifier)
|
10
|
+
File.read(file(identifier))
|
11
|
+
end
|
12
|
+
|
13
|
+
def self.file(identifier)
|
14
|
+
File.expand_path("../../fixtures/responses/#{identifier}.json", __FILE__)
|
15
|
+
end
|
16
|
+
end
|
@@ -0,0 +1,130 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require "spec_helper"
|
3
|
+
require "jira_cache/client"
|
4
|
+
require "support/response_fixture"
|
5
|
+
require "webmock/rspec"
|
6
|
+
|
7
|
+
describe JiraCache::Client do
|
8
|
+
let(:domain) { "example.com" }
|
9
|
+
let(:username) { "username" }
|
10
|
+
let(:password) { "password" }
|
11
|
+
let(:notifier) { nil }
|
12
|
+
|
13
|
+
let(:client) do
|
14
|
+
described_class.new(
|
15
|
+
domain: domain,
|
16
|
+
username: username,
|
17
|
+
password: password,
|
18
|
+
notifier: notifier
|
19
|
+
)
|
20
|
+
end
|
21
|
+
|
22
|
+
describe "#issue_data(id_or_key)" do
|
23
|
+
let(:response) { ResponseFixture.get("get_issue_#{issue_key}") }
|
24
|
+
let(:headers) do
|
25
|
+
{
|
26
|
+
"Content-Type" => "application/json",
|
27
|
+
"Authorization" => "Basic dXNlcm5hbWU6cGFzc3dvcmQ="
|
28
|
+
}
|
29
|
+
end
|
30
|
+
let(:url) { "https://#{domain}/rest/api/2/issue/#{issue_key}" }
|
31
|
+
let(:url_query) { "?expand=renderedFields,changelog" }
|
32
|
+
|
33
|
+
before do
|
34
|
+
stub_request(:get, "#{url}#{url_query}")
|
35
|
+
.with(headers: headers)
|
36
|
+
.to_return(status: 200, body: response, headers: headers)
|
37
|
+
end
|
38
|
+
|
39
|
+
context "issue not found" do
|
40
|
+
let(:issue_key) { "not_found" }
|
41
|
+
|
42
|
+
it "returns nil" do
|
43
|
+
result = client.issue_data(issue_key)
|
44
|
+
expect(result).to be_nil
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
context 'simple issue' do
|
49
|
+
let(:issue_key) { 'simple' }
|
50
|
+
let(:notifier) { double('Notifier', publish: nil) }
|
51
|
+
let(:issue_data) { JSON.parse(response) }
|
52
|
+
|
53
|
+
it 'fetches the issue data' do
|
54
|
+
result = client.issue_data(issue_key)
|
55
|
+
expect(result.keys).to include('fields')
|
56
|
+
end
|
57
|
+
|
58
|
+
it 'publish an event through the notifier' do
|
59
|
+
expect(notifier).to receive(:publish).with('fetched_issue', key: issue_key, data: issue_data)
|
60
|
+
client.issue_data(issue_key)
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
context 'issue with lots of worklog' do
|
65
|
+
let(:issue_key) { 'many_worklogs' }
|
66
|
+
|
67
|
+
it 'fetches all worklogs' do
|
68
|
+
worklog_response = ResponseFixture.get("get_issue_worklog_#{issue_key}")
|
69
|
+
stub_request(:get, "#{url}/worklog")
|
70
|
+
.with(headers: headers)
|
71
|
+
.to_return(status: 200, body: worklog_response, headers: headers)
|
72
|
+
|
73
|
+
result = client.issue_data(issue_key)
|
74
|
+
expect(result['fields']['worklog']['worklogs'].count).to eq(3)
|
75
|
+
end
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
describe "#issue_keys_for_query(jql_query)" do
|
80
|
+
|
81
|
+
context "single request query" do
|
82
|
+
let(:jql_query) { 'project="single_request"' }
|
83
|
+
# let(:jql_query) { 'project="JT"' }
|
84
|
+
|
85
|
+
it "returns ids from the query results" do
|
86
|
+
url = "https://#{domain}/rest/api/2/search"
|
87
|
+
url_query = "?fields=id&jql=#{jql_query}&maxResults=1000&startAt=0"
|
88
|
+
headers = { "Content-Type" => "application/json" }
|
89
|
+
response = ResponseFixture.get('get_issue_keys_jql_query_project="single_request"_start_at_0')
|
90
|
+
|
91
|
+
stub_request(:get, "#{url}#{url_query}")
|
92
|
+
.with(headers: headers)
|
93
|
+
.to_return(status: 200, body: response, headers: headers)
|
94
|
+
|
95
|
+
result = client.issue_keys_for_query(jql_query)
|
96
|
+
expect(result.count).to eq(2)
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
100
|
+
context "query spanning over multiple requests" do
|
101
|
+
let(:jql_query) { 'project="multiple_requests"' }
|
102
|
+
|
103
|
+
it "returns ids from the multiple requests" do
|
104
|
+
url = "https://#{domain}/rest/api/2/search"
|
105
|
+
url_query_1 = "?fields=id&jql=#{jql_query}&maxResults=1000&startAt=0"
|
106
|
+
url_query_2 = "?fields=id&jql=#{jql_query}&maxResults=1000&startAt=5"
|
107
|
+
url_query_3 = "?fields=id&jql=#{jql_query}&maxResults=1000&startAt=10"
|
108
|
+
headers = { "Content-Type" => "application/json" }
|
109
|
+
|
110
|
+
response_fixture_prefix = 'get_issue_keys_jql_query_project="multiple_requests"_start_at_'
|
111
|
+
response_1 = ResponseFixture.get("#{response_fixture_prefix}0")
|
112
|
+
response_2 = ResponseFixture.get("#{response_fixture_prefix}5")
|
113
|
+
response_3 = ResponseFixture.get("#{response_fixture_prefix}10")
|
114
|
+
|
115
|
+
stub_request(:get, "#{url}#{url_query_1}")
|
116
|
+
.with(headers: headers)
|
117
|
+
.to_return(status: 200, body: response_1, headers: headers)
|
118
|
+
stub_request(:get, "#{url}#{url_query_2}")
|
119
|
+
.with(headers: headers)
|
120
|
+
.to_return(status: 200, body: response_2, headers: headers)
|
121
|
+
stub_request(:get, "#{url}#{url_query_3}")
|
122
|
+
.with(headers: headers)
|
123
|
+
.to_return(status: 200, body: response_3, headers: headers)
|
124
|
+
|
125
|
+
result = client.issue_keys_for_query(jql_query)
|
126
|
+
expect(result.count).to eq(11)
|
127
|
+
end
|
128
|
+
end
|
129
|
+
end
|
130
|
+
end
|
@@ -0,0 +1,58 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require "spec_helper"
|
3
|
+
require "timecop"
|
4
|
+
require "jira_cache/data/issue_repository"
|
5
|
+
|
6
|
+
describe JiraCache::Data::IssueRepository do
|
7
|
+
let(:time) { Time.now }
|
8
|
+
let(:issue1_data) { { "key" => "key1", "fields" => { "project" => { "key" => "PJ1" } }, "value" => "value1" } }
|
9
|
+
let(:issue2_data) { { "key" => "key2", "fields" => { "project" => { "key" => "PJ2" } }, "value" => "value2" } }
|
10
|
+
let(:issue3_data) { { "key" => "key3", "fields" => { "project" => { "key" => "PJ1" } }, "value" => "value3" } }
|
11
|
+
|
12
|
+
before do
|
13
|
+
Timecop.freeze(time)
|
14
|
+
described_class.insert(key: "key1", data: issue1_data, synced_at: time)
|
15
|
+
described_class.insert(key: "key2", data: issue2_data, synced_at: time)
|
16
|
+
described_class.insert(key: "key3", data: issue3_data, synced_at: time)
|
17
|
+
end
|
18
|
+
after { Timecop.return }
|
19
|
+
|
20
|
+
describe "::find_by_key(issue_key)" do
|
21
|
+
|
22
|
+
context "matching issue exists" do
|
23
|
+
it "returns the issue's attributes" do
|
24
|
+
result = described_class.find_by_key("key1")
|
25
|
+
expect(result[:data]["value"]).to eq("value1")
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
context "no matching issue" do
|
30
|
+
it "returns nil" do
|
31
|
+
expect(described_class.find_by_key("unknown")).to eq(nil)
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
describe "::insert(key:, data:, synced_at:, deleted_from_jira_at: nil)" do
|
37
|
+
context "successful" do
|
38
|
+
let(:key) { SecureRandom.uuid }
|
39
|
+
subject do
|
40
|
+
described_class.insert(key: key, data: issue1_data, synced_at: time)
|
41
|
+
end
|
42
|
+
|
43
|
+
it "returns nil" do
|
44
|
+
expect(subject).to eq(nil)
|
45
|
+
end
|
46
|
+
|
47
|
+
it "created the row" do
|
48
|
+
subject
|
49
|
+
expect(JiraCache::Data::IssueRepository.find_by_key(key)).not_to be_nil
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
describe "::keys_in_project(project_key)" do
|
55
|
+
subject { described_class.keys_in_project("PJ1") }
|
56
|
+
it { should eq(%w(key1 key3)) }
|
57
|
+
end
|
58
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
require "spec_helper"
|
2
|
+
require "jira_cache/notifier"
|
3
|
+
|
4
|
+
describe JiraCache::Notifier do
|
5
|
+
|
6
|
+
describe "::published(name, data = nil)" do
|
7
|
+
let(:event_name) { "test" }
|
8
|
+
let(:issue_data) { { "value" => "issue_value", "key" => "issue_data_key" } }
|
9
|
+
let(:event_data) { { key: "issue_key", data: issue_data } }
|
10
|
+
|
11
|
+
it "logs the event name and event\"s data key" do
|
12
|
+
logger = ::Logger.new("/dev/null")
|
13
|
+
notifier = described_class.new(logger)
|
14
|
+
expect(logger).to receive(:info).with("[test] #{event_data[:key]}")
|
15
|
+
notifier.publish event_name, event_data
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
@@ -0,0 +1,116 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require "spec_helper"
|
3
|
+
require "timecop"
|
4
|
+
require "jira_cache/sync"
|
5
|
+
|
6
|
+
describe JiraCache::Sync do
|
7
|
+
|
8
|
+
subject { described_class.new(client) }
|
9
|
+
let(:client) { double("JiraCache::Client", issue_data: {}, logger: nil) }
|
10
|
+
let(:now) { Time.now }
|
11
|
+
|
12
|
+
before do
|
13
|
+
Timecop.freeze(now)
|
14
|
+
end
|
15
|
+
|
16
|
+
after { Timecop.return }
|
17
|
+
|
18
|
+
it "has a version number" do
|
19
|
+
expect(JiraCache::VERSION).not_to be nil
|
20
|
+
end
|
21
|
+
|
22
|
+
let(:project_key) { "project_key" }
|
23
|
+
let(:remote_keys) { %w(a b c d) }
|
24
|
+
let(:cached_keys) { %w(c d e f) }
|
25
|
+
let(:updated_keys) { %w(c) }
|
26
|
+
let(:latest_sync_time) { now }
|
27
|
+
|
28
|
+
describe "::sync_issues(project_key: nil)" do
|
29
|
+
|
30
|
+
before do
|
31
|
+
expect(subject)
|
32
|
+
.to receive(:remote_keys)
|
33
|
+
.with(project_key: project_key)
|
34
|
+
.and_return(remote_keys)
|
35
|
+
expect(subject)
|
36
|
+
.to receive(:cached_keys)
|
37
|
+
.with(project_key: project_key)
|
38
|
+
.and_return(cached_keys)
|
39
|
+
expect(subject)
|
40
|
+
.to receive(:updated_keys)
|
41
|
+
.with(project_key: project_key)
|
42
|
+
.and_return(updated_keys)
|
43
|
+
end
|
44
|
+
|
45
|
+
it "fetches new and updated issues" do
|
46
|
+
expect(subject).to receive(:fetch_issues).with(%w(a b c), now)
|
47
|
+
subject.sync_issues(project_key: project_key)
|
48
|
+
end
|
49
|
+
|
50
|
+
it "marks deleted issues" do
|
51
|
+
expect(subject).to receive(:mark_deleted).with(%w(e f))
|
52
|
+
subject.sync_issues(project_key: project_key)
|
53
|
+
end
|
54
|
+
|
55
|
+
it "stores issues with the sync time" do
|
56
|
+
subject.sync_issues(project_key: project_key)
|
57
|
+
expect(JiraCache::Data::IssueRepository.latest_sync_time).to be_within(1).of latest_sync_time
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
describe "::remote_keys(project_key: nil)" do
|
62
|
+
it "fetches the issue keys for the project" do
|
63
|
+
expect(subject).to receive(:fetch_issue_keys).with(project_key: project_key)
|
64
|
+
subject.remote_keys(project_key: project_key)
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
describe "::cached_keys(project_key: nil)" do
|
69
|
+
it "fetches keys from cached issues" do
|
70
|
+
expect(JiraCache::Data::IssueRepository)
|
71
|
+
.to receive(:keys_in_project)
|
72
|
+
.with(project_key: project_key)
|
73
|
+
subject.cached_keys(project_key: project_key)
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
describe "::updated_keys(project_key: nil)" do
|
78
|
+
it "fetch issue keys for the project updated from the last sync date" do
|
79
|
+
expect(subject).to receive(:latest_sync_time).and_return(latest_sync_time)
|
80
|
+
expect(subject).to receive(:fetch_issue_keys).with(project_key: project_key, updated_since: latest_sync_time)
|
81
|
+
subject.updated_keys(project_key: project_key)
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
describe "::fetch_issue_keys(project_key: nil, updated_since: nil)" do
|
86
|
+
|
87
|
+
context "with no parameter" do
|
88
|
+
it "fetches issue keys with an empty JQL query" do
|
89
|
+
expect(client)
|
90
|
+
.to receive(:issue_keys_for_query)
|
91
|
+
.with("")
|
92
|
+
subject.fetch_issue_keys()
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
96
|
+
context "with only the `project_key` parameter" do
|
97
|
+
it "fetches issue keys with the project JQL query" do
|
98
|
+
expect(client)
|
99
|
+
.to receive(:issue_keys_for_query)
|
100
|
+
.with("project = \"#{project_key}\"")
|
101
|
+
subject.fetch_issue_keys(project_key: project_key)
|
102
|
+
end
|
103
|
+
end
|
104
|
+
|
105
|
+
context "with both parameters" do
|
106
|
+
it "fetches issue keys with the project and updated_since JQL query" do
|
107
|
+
expected_jql = "project = \"#{project_key}\""
|
108
|
+
expected_jql += " AND updatedDate > \"#{latest_sync_time.strftime('%Y-%m-%d %H:%M')}\""
|
109
|
+
expect(client)
|
110
|
+
.to receive(:issue_keys_for_query)
|
111
|
+
.with(expected_jql)
|
112
|
+
subject.fetch_issue_keys(project_key: project_key, updated_since: latest_sync_time)
|
113
|
+
end
|
114
|
+
end
|
115
|
+
end
|
116
|
+
end
|