oss-stats 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1242 @@
1
+ require 'rspec'
2
+ require 'octokit'
3
+ require 'base64'
4
+ require_relative '../lib/oss_stats/repo_stats'
5
+ require_relative '../lib/oss_stats/config/repo_stats'
6
+ require_relative '../lib/oss_stats/log'
7
+
8
+ RSpec.describe 'repo_stats' do
9
+ include OssStats::RepoStats
10
+
11
+ let(:client) { instance_double(Octokit::Client) }
12
+ let(:options) do
13
+ {
14
+ org: 'test_org',
15
+ repo: 'test_repo',
16
+ days: 30,
17
+ branches: ['main'],
18
+ }
19
+ end
20
+
21
+ before do
22
+ allow(client).to receive(:issues).and_return([])
23
+ allow(client).to receive(:pull_requests).and_return([])
24
+ allow(client).to receive(:workflows).and_return(double(workflows: []))
25
+ allow(client).to receive(:workflow_runs).and_return(
26
+ double(workflow_runs: []),
27
+ )
28
+ allow(client).to receive(:workflow_run_jobs).and_return(double(jobs: []))
29
+ end
30
+
31
+ describe '#rate_limited_sleep' do
32
+ after(:each) do
33
+ OssStats::Config::RepoStats.limit_gh_ops_per_minute = nil
34
+ end
35
+
36
+ it 'sleeps for the correct amount of time based on the rate limit' do
37
+ OssStats::Config::RepoStats.limit_gh_ops_per_minute = 60
38
+ expect(self).to receive(:sleep).with(1.0)
39
+ rate_limited_sleep
40
+ end
41
+
42
+ it 'does not sleep if the rate limit is not set' do
43
+ OssStats::Config::RepoStats.limit_gh_ops_per_minute = nil
44
+ expect(self).not_to receive(:sleep)
45
+ rate_limited_sleep
46
+ end
47
+
48
+ it 'does not sleep if the rate limit is 0' do
49
+ OssStats::Config::RepoStats.limit_gh_ops_per_minute = 0
50
+ expect(self).not_to receive(:sleep)
51
+ rate_limited_sleep
52
+ end
53
+ end
54
+
55
+ describe '#get_pr_and_issue_stats' do
56
+ it 'fetches PR and issue stats from GitHub' do
57
+ allow(client).to receive(:issues).with(
58
+ 'test_org/test_repo',
59
+ hash_including(page: 1),
60
+ ).and_return(
61
+ [
62
+ # closed PR
63
+ double(
64
+ created_at: Date.today - 7,
65
+ closed_at: Date.today - 5,
66
+ pull_request: double(
67
+ merged_at: Date.today - 5,
68
+ ),
69
+ updated_at: Date.today - 3,
70
+ labels: [],
71
+ ),
72
+ # open Issue
73
+ double(
74
+ created_at: Date.today - 7,
75
+ closed_at: nil,
76
+ pull_request: nil,
77
+ updated_at: Date.today - 3,
78
+ labels: [],
79
+ ),
80
+ ],
81
+ )
82
+ allow(client).to receive(:issues).with(
83
+ 'test_org/test_repo',
84
+ hash_including(page: 2),
85
+ ).and_return([])
86
+
87
+ stats = get_pr_and_issue_stats(client, options)
88
+
89
+ expect(stats[:pr][:open]).to eq(0)
90
+ expect(stats[:pr][:closed]).to eq(1)
91
+ expect(stats[:issue][:open]).to eq(1)
92
+ expect(stats[:issue][:closed]).to eq(0)
93
+ end
94
+
95
+ it 'handles empty responses gracefully' do
96
+ allow(client).to receive(:issues).and_return([])
97
+
98
+ stats = get_pr_and_issue_stats(client, options)
99
+
100
+ expect(stats[:pr][:open]).to eq(0)
101
+ expect(stats[:issue][:open]).to eq(0)
102
+ end
103
+ end
104
+
105
+ describe '#get_failed_tests_from_ci' do
106
+ it 'fetches failed tests from CI workflows' do
107
+ allow(client).to receive(:readme).with('test_org/test_repo')
108
+ .and_return(double(content: ''))
109
+ allow(client).to receive(:workflows).and_return(
110
+ double(workflows: [
111
+ double(id: 1, name: 'Test Workflow', html_url: 'testurl'),
112
+ ]),
113
+ )
114
+ allow(client).to receive(:workflow_runs).with(
115
+ 'test_org/test_repo',
116
+ 1,
117
+ hash_including(page: 1),
118
+ ).and_return(
119
+ double(workflow_runs: [
120
+ double(id: 1, created_at: Date.today - 5),
121
+ ]),
122
+ )
123
+ allow(client).to receive(:workflow_runs).with(
124
+ 'test_org/test_repo',
125
+ 1,
126
+ hash_including(page: 2),
127
+ ).and_return(double(workflow_runs: []))
128
+ allow(client).to receive(:workflow_run_jobs).and_return(
129
+ double(jobs: [
130
+ double(name: 'Test Job', conclusion: 'failure'),
131
+ ]),
132
+ )
133
+
134
+ failed_tests = get_failed_tests_from_ci(client, nil, options, {})
135
+
136
+ expect(failed_tests['main']['Test Workflow / Test Job'][:dates])
137
+ .to include(Date.today - 5)
138
+ end
139
+
140
+ it 'handles no failures gracefully' do
141
+ allow(client).to receive(:readme).with('test_org/test_repo')
142
+ .and_return(double(content: ''))
143
+ allow(client).to receive(:workflows).and_return(
144
+ double(workflows: [double(id: 1, name: 'Test Workflow')]),
145
+ )
146
+ allow(client).to receive(:workflow_runs).and_return(
147
+ double(workflow_runs: []),
148
+ )
149
+
150
+ failed_tests = get_failed_tests_from_ci(client, nil, options, {})
151
+
152
+ expect(failed_tests['main']).to be_empty
153
+ end
154
+
155
+ describe 'Buildkite Integration' do
156
+ let(:mock_buildkite_client) { instance_double(OssStats::BuildkiteClient) }
157
+ # Updated README content to match the new regex
158
+ let(:readme_content_with_badge) do
159
+ badge1 = 'https://badge.buildkite.com/someuuid.svg?branch=main'
160
+ url1 = 'https://buildkite.com/test-buildkite-org/actual-pipeline-name'
161
+ badge2 = 'https://badge.buildkite.com/another.svg'
162
+ url2 = 'https://buildkite.com/other-org/other-pipeline'
163
+ Base64.encode64(
164
+ <<~README,
165
+ Some text before
166
+ [![Build Status](#{badge1})](#{url1})
167
+ More text [![Another Badge](#{badge2})](#{url2})
168
+ Some text after
169
+ README
170
+ )
171
+ end
172
+ let(:readme_content_with_badge_alternative_format) do
173
+ # Test with a slightly different markdown image link format
174
+ badge = 'https://badge.buildkite.com/short-uuid.svg'
175
+ url = 'https://buildkite.com/test-buildkite-org/another-actual-pipeline'
176
+ Base64.encode64(
177
+ <<~README,
178
+ [![] (#{badge})](#{url})
179
+ README
180
+ )
181
+ end
182
+ let(:readme_content_without_badge) do
183
+ Base64.encode64('This README has no Buildkite badge, only text.')
184
+ end
185
+ let(:settings_with_buildkite_token) do
186
+ options.merge(buildkite_token: 'fake-bk-token')
187
+ end
188
+
189
+ before do
190
+ allow(mock_buildkite_client).to receive(:get_pipeline_builds)
191
+ .and_return([])
192
+ end
193
+
194
+ context 'when repository has a Buildkite badge in README' do
195
+ let(:readme_double) { double(content: readme_content_with_badge) }
196
+ let(:repo_full_name) { "#{options[:org]}/#{options[:repo]}" }
197
+
198
+ before do
199
+ allow(client).to receive(:readme)
200
+ .with(repo_full_name)
201
+ .and_return(readme_double)
202
+ end
203
+
204
+ it 'calls BuildkiteClient with correct slugs and processes results' do
205
+ expect(mock_buildkite_client).to receive(:get_pipeline)
206
+ .with('test-buildkite-org', 'actual-pipeline-name')
207
+ .and_return({
208
+ url: 'testurl',
209
+ slug: 'actual-pipeline-name',
210
+ })
211
+ expect(mock_buildkite_client).to receive(:get_pipeline)
212
+ .with('other-org', 'other-pipeline')
213
+ .and_return({
214
+ url: 'testurl',
215
+ slug: 'other-pipeline',
216
+ })
217
+ expect(mock_buildkite_client).to receive(:get_pipeline_builds)
218
+ .with(
219
+ 'test-buildkite-org',
220
+ 'actual-pipeline-name',
221
+ Date.today - options[:days],
222
+ Date.today,
223
+ 'main',
224
+ )
225
+ .and_return([
226
+ {
227
+ 'node' => {
228
+ 'createdAt' => (Date.today - 1).to_s, 'state' => 'FAILED'
229
+ },
230
+ },
231
+ ])
232
+ expect(mock_buildkite_client).to receive(:get_pipeline_builds)
233
+ .with(
234
+ 'other-org',
235
+ 'other-pipeline',
236
+ Date.today - options[:days],
237
+ Date.today,
238
+ 'main',
239
+ )
240
+ .and_return([
241
+ {
242
+ 'node' => {
243
+ 'createdAt' => (Date.today - 1).to_s, 'state' => 'PASSED'
244
+ },
245
+ },
246
+ ])
247
+ failed_tests = get_failed_tests_from_ci(
248
+ client, mock_buildkite_client, settings_with_buildkite_token, {}
249
+ )
250
+ job1_key = '[BK] test-buildkite-org/actual-pipeline-name'
251
+ expect(failed_tests['main'][job1_key][:dates])
252
+ .to include(Date.today - 1)
253
+ end
254
+
255
+ it 'correctly parses alternative badge markdown format' do
256
+ expect(mock_buildkite_client).to receive(:get_pipeline)
257
+ .with('test-buildkite-org', 'another-actual-pipeline')
258
+ .and_return({
259
+ url: 'testurl',
260
+ slug: 'another-actual-pipelinename',
261
+ })
262
+ allow(client).to receive(:readme)
263
+ .with(repo_full_name)
264
+ .and_return(
265
+ double(content: readme_content_with_badge_alternative_format),
266
+ )
267
+ expect(mock_buildkite_client).to receive(:get_pipeline_builds)
268
+ .with(
269
+ 'test-buildkite-org',
270
+ 'another-actual-pipeline',
271
+ Date.today - options[:days],
272
+ Date.today,
273
+ 'main',
274
+ )
275
+ .and_return([])
276
+ get_failed_tests_from_ci(
277
+ client, mock_buildkite_client, settings_with_buildkite_token, {}
278
+ )
279
+ end
280
+
281
+ it 'handles no failed builds from Buildkite' do
282
+ expect(mock_buildkite_client).to receive(:get_pipeline)
283
+ .with('test-buildkite-org', 'actual-pipeline-name')
284
+ .and_return({
285
+ url: 'testurl',
286
+ slug: 'actual-pipeline-name',
287
+ })
288
+ expect(mock_buildkite_client).to receive(:get_pipeline)
289
+ .with('other-org', 'other-pipeline')
290
+ .and_return({
291
+ url: 'testurl',
292
+ slug: 'other-pipeline',
293
+ })
294
+ allow(mock_buildkite_client).to receive(:get_pipeline_builds)
295
+ .and_return([
296
+ {
297
+ 'node' => {
298
+ 'createdAt' => (Date.today - 1).to_s,
299
+ 'state' => 'PASSED',
300
+ },
301
+ },
302
+ ])
303
+ failed_tests = get_failed_tests_from_ci(
304
+ client, mock_buildkite_client, settings_with_buildkite_token, {}
305
+ )
306
+ buildkite_job_keys = failed_tests['main'].keys.select do |k|
307
+ k.start_with?('Buildkite /')
308
+ end
309
+ expect(buildkite_job_keys).to be_empty
310
+ end
311
+
312
+ context 'with ongoing failures' do
313
+ let(:days_to_check) { 5 }
314
+ let(:options_for_ongoing) { options.merge(days: days_to_check) }
315
+ let(:today) { Date.today }
316
+ let(:org_name) { 'test-buildkite-org' }
317
+ let(:pipeline_name) { 'actual-pipeline-name' }
318
+ let(:job_key) { "[BK] #{org_name}/#{pipeline_name}" }
319
+
320
+ let(:mock_builds_for_ongoing_test) do
321
+ # Helper to create a build node
322
+ def build_node(created_at_val, state_val)
323
+ {
324
+ 'node' => {
325
+ 'createdAt' => created_at_val.to_s,
326
+ 'state' => state_val,
327
+ },
328
+ }
329
+ end
330
+
331
+ [
332
+ build_node(today - days_to_check + 1, 'FAILED'),
333
+ build_node(today - days_to_check + 2, 'FAILED'),
334
+ build_node(today - days_to_check + 3, 'PASSED'),
335
+ build_node(today - days_to_check + 4, 'FAILED'),
336
+ ].sort_by { |b| DateTime.parse(b['node']['createdAt']) }
337
+ end
338
+
339
+ it 'correctly reports days for ongoing and fixed failures' do
340
+ expect(mock_buildkite_client).to receive(:get_pipeline)
341
+ .with('test-buildkite-org', 'actual-pipeline-name')
342
+ .and_return({
343
+ url: 'testurl',
344
+ slug: 'actual-pipeline-name',
345
+ })
346
+ expect(mock_buildkite_client).to receive(:get_pipeline)
347
+ .with('other-org', 'other-pipeline')
348
+ .and_return({
349
+ url: 'testurl',
350
+ slug: 'other-pipeline',
351
+ })
352
+ allow(mock_buildkite_client).to receive(:get_pipeline_builds)
353
+ .with(
354
+ org_name, pipeline_name, today - days_to_check, today, 'main'
355
+ )
356
+ .and_return(mock_builds_for_ongoing_test)
357
+
358
+ failed_tests = get_failed_tests_from_ci(
359
+ client, mock_buildkite_client, options_for_ongoing, {}
360
+ )
361
+
362
+ expected_job_dates = Set.new([
363
+ today - days_to_check + 1,
364
+ today - days_to_check + 2,
365
+ # no 3, it passed that day
366
+ today - days_to_check + 4,
367
+ # add today (days_to_check = 5), becuase we fill in
368
+ # all days through today if the last check is failing
369
+ today,
370
+ ])
371
+ expect(failed_tests['main'][job_key][:dates])
372
+ .to eq(expected_job_dates)
373
+ expect(failed_tests['main'][job_key][:dates].size)
374
+ .to eq(days_to_check - 1)
375
+ end
376
+ end
377
+ end
378
+
379
+ context 'when repository does not have a Buildkite badge' do
380
+ before do
381
+ allow(client).to receive(:readme)
382
+ .with("#{options[:org]}/#{options[:repo]}")
383
+ .and_return(double(content: readme_content_without_badge))
384
+ end
385
+
386
+ it 'does not call BuildkiteClient' do
387
+ expect(OssStats::BuildkiteClient).not_to receive(:new)
388
+ expect(mock_buildkite_client).not_to receive(:get_pipeline_builds)
389
+ get_failed_tests_from_ci(
390
+ client, mock_buildkite_client, settings_with_buildkite_token, {}
391
+ )
392
+ end
393
+ end
394
+
395
+ context 'when README is not found' do
396
+ it 'handles the error and does not call BuildkiteClient' do
397
+ allow(client).to receive(:readme)
398
+ .with("#{options[:org]}/#{options[:repo]}")
399
+ .and_raise(Octokit::NotFound)
400
+ expect(OssStats::BuildkiteClient).not_to receive(:new)
401
+ expect(OssStats::Log).to receive(:warn)
402
+ .with(%r{README.md not found for repo test_org/test_repo})
403
+ get_failed_tests_from_ci(
404
+ client, mock_buildkite_client, settings_with_buildkite_token, {}
405
+ )
406
+ end
407
+ end
408
+
409
+ context 'when Buildkite API call fails' do
410
+ before do
411
+ allow(client).to receive(:readme)
412
+ .with("#{options[:org]}/#{options[:repo]}")
413
+ .and_return(double(content: readme_content_with_badge))
414
+ allow(mock_buildkite_client)
415
+ .to receive(:get_pipeline_builds)
416
+ .and_raise(StandardError.new('Buildkite API Error'))
417
+ allow(mock_buildkite_client)
418
+ .to receive(:get_pipeline)
419
+ .and_return({
420
+ url: 'testurl',
421
+ slug: 'actual-pipeline-name',
422
+ })
423
+ end
424
+
425
+ it 'handles the error gracefully and logs it' do
426
+ expect(OssStats::Log).to receive(:error)
427
+ .with(/Error during Buildkite integration for test_org/)
428
+ failed_tests = get_failed_tests_from_ci(
429
+ client, mock_buildkite_client, settings_with_buildkite_token, {}
430
+ )
431
+ buildkite_job_keys = failed_tests['main'].keys.select do |k|
432
+ k.start_with?('Buildkite /')
433
+ end
434
+ expect(buildkite_job_keys).to be_empty
435
+ end
436
+ end
437
+
438
+ context 'when Buildkite token is not available' do
439
+ before do
440
+ # Mock get_buildkite_token! to return nil
441
+ allow(self).to receive(:get_buildkite_token!)
442
+ .and_raise(ArgumentError)
443
+ allow(client).to receive(:readme)
444
+ .with("#{options[:org]}/#{options[:repo]}")
445
+ .and_return(double(content: readme_content_with_badge))
446
+ end
447
+ end
448
+ end
449
+ end
450
+
451
+ describe '#print_ci_status' do
452
+ context 'with only GitHub Actions failures' do
453
+ let(:test_failures) do
454
+ {
455
+ 'main' => {
456
+ 'GH Workflow / Job A' => {
457
+ dates: Set[Date.today, Date.today - 1],
458
+ url: 'testurla',
459
+ },
460
+ 'GH Workflow / Job B' => {
461
+ dates: Set[Date.today],
462
+ url: 'testurlb',
463
+ },
464
+ },
465
+ }
466
+ end
467
+
468
+ it 'prints GitHub Actions failures correctly' do
469
+ expect(OssStats::Log).to receive(:info)
470
+ .with("\n* CI Stats:")
471
+ expect(OssStats::Log).to receive(:info)
472
+ .with(' * Branch: `main` has the following failures:')
473
+ expect(OssStats::Log).to receive(:info)
474
+ .with(' * [GH Workflow / Job A](testurla): 2 days')
475
+ expect(OssStats::Log).to receive(:info)
476
+ .with(' * [GH Workflow / Job B](testurlb): 1 days')
477
+ print_ci_status(test_failures)
478
+ end
479
+ end
480
+
481
+ context 'with only Buildkite failures' do
482
+ let(:test_failures) do
483
+ {
484
+ 'main' => {
485
+ '[BK] org/pipe1' => {
486
+ dates: Set[Date.today],
487
+ url: 'testurl1',
488
+ },
489
+ '[BK] org/pipe2' => {
490
+ dates: Set[Date.today, Date.today - 1, Date.today - 2],
491
+ url: 'testurl2',
492
+ },
493
+ },
494
+ }
495
+ end
496
+
497
+ it 'prints Buildkite failures correctly' do
498
+ expect(OssStats::Log).to receive(:info)
499
+ .with("\n* CI Stats:")
500
+ expect(OssStats::Log).to receive(:info)
501
+ .with(' * Branch: `main` has the following failures:')
502
+ expect(OssStats::Log).to receive(:info)
503
+ .with(' * [[BK] org/pipe1](testurl1): 1 days')
504
+ expect(OssStats::Log).to receive(:info)
505
+ .with(' * [[BK] org/pipe2](testurl2): 3 days')
506
+ print_ci_status(test_failures)
507
+ end
508
+ end
509
+
510
+ context 'with mixed GitHub Actions and Buildkite failures' do
511
+ let(:test_failures) do
512
+ {
513
+ 'main' => {
514
+ 'GH Workflow / Job A' => {
515
+ dates: Set[Date.today],
516
+ url: 'testurla',
517
+ },
518
+ 'Buildkite / org/pipe / Job X' => {
519
+ dates: Set[Date.today - 1],
520
+ url: 'testurlx',
521
+ },
522
+ 'GH Workflow / Job C' => {
523
+ dates: Set[Date.today - 2, Date.today - 3],
524
+ url: 'testurlc',
525
+ },
526
+ },
527
+ }
528
+ end
529
+
530
+ it 'prints mixed failures correctly and sorted' do
531
+ expect(OssStats::Log).to receive(:info)
532
+ .with("\n* CI Stats:")
533
+ expect(OssStats::Log).to receive(:info)
534
+ .with(' * Branch: `main` has the following failures:')
535
+ # Sorted order: Buildkite job first, then GH jobs
536
+ expect(OssStats::Log).to receive(:info)
537
+ .with(' * [Buildkite / org/pipe / Job X](testurlx): 1 days')
538
+ .ordered
539
+ expect(OssStats::Log).to receive(:info)
540
+ .with(' * [GH Workflow / Job A](testurla): 1 days').ordered
541
+ expect(OssStats::Log).to receive(:info)
542
+ .with(' * [GH Workflow / Job C](testurlc): 2 days').ordered
543
+ print_ci_status(test_failures)
544
+ end
545
+ end
546
+
547
+ context 'with no failures' do
548
+ let(:test_failures) { { 'main' => {} } }
549
+
550
+ it 'prints the no failures message' do
551
+ expect(OssStats::Log).to receive(:info)
552
+ .with("\n* CI Stats:")
553
+ expect(OssStats::Log).to receive(:info)
554
+ .with(' * Branch: `main`: No job failures found! :tada:')
555
+ print_ci_status(test_failures)
556
+ end
557
+ end
558
+
559
+ context 'with failures on multiple branches' do
560
+ let(:test_failures) do
561
+ {
562
+ 'main' => {
563
+ 'GH Workflow / Job A' => {
564
+ dates: Set[Date.today],
565
+ url: 'testurla',
566
+ },
567
+ },
568
+ 'develop' => {
569
+ '[BK] org/pipe' => {
570
+ dates: Set[Date.today - 1, Date.today - 2],
571
+ url: 'testurlp',
572
+ },
573
+ },
574
+ }
575
+ end
576
+
577
+ it 'groups failures by branch and prints them correctly' do
578
+ expect(OssStats::Log).to receive(:info)
579
+ .with("\n* CI Stats:")
580
+ expect(OssStats::Log).to receive(:info)
581
+ .with(' * Branch: `develop` has the following failures:')
582
+ expect(OssStats::Log).to receive(:info)
583
+ .with(' * [[BK] org/pipe](testurlp): 2 days')
584
+ expect(OssStats::Log).to receive(:info)
585
+ .with(' * Branch: `main` has the following failures:')
586
+ expect(OssStats::Log).to receive(:info)
587
+ .with(' * [GH Workflow / Job A](testurla): 1 days')
588
+
589
+ print_ci_status(test_failures)
590
+ end
591
+ end
592
+ end
593
+
594
+ describe '#determine_orgs_to_process' do
595
+ before(:each) do
596
+ OssStats::Config::RepoStats.organizations({
597
+ 'org1' => {
598
+ 'days' => 2,
599
+ 'repositories' => {
600
+ 'repo1' => {},
601
+ 'repo2' => {
602
+ 'days' => 3,
603
+ },
604
+ },
605
+ },
606
+ 'org2' => {
607
+ 'days' => 7,
608
+ 'repositories' => {
609
+ 'repoA' => {
610
+ 'days' => 30,
611
+ },
612
+ 'repoB' => {},
613
+ },
614
+ },
615
+ })
616
+ end
617
+ let(:config) { OssStats::Config::RepoStats }
618
+
619
+ context 'combines org/repo limits properly' do
620
+ it 'returns the config orgs when no limits specified' do
621
+ ans = config.organizations.dup
622
+ expect(determine_orgs_to_process).to eq(ans)
623
+ end
624
+
625
+ it 'returns only the specified org when requested' do
626
+ ans = { 'org2' => config.organizations['org2'].dup }
627
+ config.github_org = 'org2'
628
+ expect(determine_orgs_to_process).to eq(ans)
629
+ end
630
+
631
+ it 'returns only the specified org/repo when requested' do
632
+ ans = { 'org2' => config.organizations['org2'].dup }
633
+ ans['org2']['repositories'].delete('repoA')
634
+ config.github_org = 'org2'
635
+ config.github_repo = 'repoB'
636
+ expect(determine_orgs_to_process).to eq(ans)
637
+ end
638
+
639
+ it 'creates an appropriate entry when none exists' do
640
+ ans = { 'neworg' => { 'repositories' => { 'repo' => {} } } }
641
+ config.github_org = 'neworg'
642
+ config.github_repo = 'repo'
643
+ expect(determine_orgs_to_process).to eq(ans)
644
+ end
645
+ end
646
+ end
647
+
648
+ describe '#get_effective_repo_settings' do
649
+ before(:each) do
650
+ OssStats::Config::RepoStats.days = nil
651
+ OssStats::Config::RepoStats.branches = nil
652
+ OssStats::Config::RepoStats.default_days = 15
653
+ OssStats::Config::RepoStats.default_branches = ['foo']
654
+ end
655
+ context 'with no org or repo overrides' do
656
+ it 'uses defaults properly' do
657
+ ans = {
658
+ org: 'org1', repo: 'repo1', days: 15, branches: ['foo']
659
+ }
660
+ expect(get_effective_repo_settings('org1', 'repo1', {}, {})).to eq(ans)
661
+ end
662
+
663
+ it 'uses CLI days override properly' do
664
+ OssStats::Config::RepoStats.days = 2
665
+ ans = {
666
+ org: 'org1', repo: 'repo1', days: 2, branches: ['foo']
667
+ }
668
+ expect(get_effective_repo_settings('org1', 'repo1', {}, {})).to eq(ans)
669
+ end
670
+ end
671
+
672
+ context 'with org and repo overrides' do
673
+ it 'overrides default with org settings' do
674
+ s = get_effective_repo_settings(
675
+ 'org1',
676
+ 'repo1',
677
+ { 'days' => 77 },
678
+ {},
679
+ )
680
+ expect(s[:days]).to eq(77)
681
+ end
682
+
683
+ it 'overrides default and org with repo settings' do
684
+ s = get_effective_repo_settings(
685
+ 'org1',
686
+ 'repo1',
687
+ { 'days' => 77, 'branches' => ['release'] },
688
+ { 'days' => 99 },
689
+ )
690
+ # days comes from repo settings
691
+ expect(s[:days]).to eq(99)
692
+ # most specific branches setting is from org
693
+ expect(s[:branches]).to eq(['release'])
694
+ end
695
+
696
+ it 'overrides default org and repo with cli days settings' do
697
+ OssStats::Config::RepoStats.days = 11
698
+ s = get_effective_repo_settings(
699
+ 'org1',
700
+ 'repo1',
701
+ { 'days' => 77, 'branches' => ['release'] },
702
+ { 'days' => 99, 'branches' => ['special'] },
703
+ )
704
+ # days comes from CLI override
705
+ expect(s[:days]).to eq(11)
706
+ # most specific branches setting is from repo
707
+ expect(s[:branches]).to eq(['special'])
708
+ end
709
+
710
+ it 'overrides default org and repo with cli branches settings' do
711
+ OssStats::Config::RepoStats.branches = ['somebranch']
712
+ s = get_effective_repo_settings(
713
+ 'org1',
714
+ 'repo1',
715
+ { 'days' => 77, 'branches' => ['release'] },
716
+ { 'days' => 99, 'branches' => ['special'] },
717
+ )
718
+ # days comes from CLI override
719
+ expect(s[:days]).to eq(99)
720
+ # most specific branches setting is from repo
721
+ expect(s[:branches]).to eq(['somebranch'])
722
+ end
723
+ end
724
+ end
725
+
726
+ describe '#filter_repositories' do
727
+ let(:config) { OssStats::Config::RepoStats }
728
+ let(:all_repos) { [] } # Populated in specific contexts
729
+
730
+ # Helper to create mock repository data for testing filter_repositories
731
+ def mock_repo_data(
732
+ name, stale_pr: 0, stale_issue: 0, oldest_pr_days: 0,
733
+ oldest_issue_days: 0, avg_close_pr_hours: 0, avg_close_issue_hours: 0,
734
+ ci_broken_days_map: {}, ci_distinct_broken_jobs: []
735
+ )
736
+ pr_stats = {
737
+ stale_count: stale_pr, oldest_open_days: oldest_pr_days,
738
+ avg_time_to_close_hours: avg_close_pr_hours, closed: 1,
739
+ total_close_time: avg_close_pr_hours
740
+ }
741
+ issue_stats = {
742
+ stale_count: stale_issue, oldest_open_days: oldest_issue_days,
743
+ avg_time_to_close_hours: avg_close_issue_hours, closed: 1,
744
+ total_close_time: avg_close_issue_hours
745
+ }
746
+
747
+ # ci_broken_days_map: { "job_name" => num_days_failed, ... }
748
+ # ci_distinct_broken_jobs: ["job_name1", "job_name2", ...]
749
+ ci_failures_data = nil
750
+ if !ci_broken_days_map.empty? || !ci_distinct_broken_jobs.empty?
751
+ ci_failures_data = { 'main' => {} }
752
+
753
+ ci_broken_days_map.each do |job_name, days_count|
754
+ ci_failures_data['main'][job_name] = {
755
+ dates: Set.new((1..days_count).map { |i| Date.today - i }),
756
+ url: "http://ci.com/#{job_name}",
757
+ }
758
+ end
759
+
760
+ ci_distinct_broken_jobs.each do |job_name|
761
+ next if ci_failures_data['main'].key?(job_name)
762
+ ci_failures_data['main'][job_name] = {
763
+ dates: Set[Date.today - 1],
764
+ url: "http://ci.com/#{job_name}",
765
+ }
766
+ end
767
+ end
768
+
769
+ {
770
+ name:,
771
+ url: "http://github.com/org/#{name}",
772
+ settings: { days: 30 }, # Default settings
773
+ pr_issue_stats: {
774
+ pr: pr_stats, issue: issue_stats,
775
+ pr_list: { open: [], closed: [] },
776
+ issue_list: { open: [], closed: [] }
777
+ },
778
+ ci_failures: ci_failures_data,
779
+ }
780
+ end
781
+
782
+ after(:each) do
783
+ config.top_n_stale = nil
784
+ config.top_n_oldest = nil
785
+ config.top_n_time_to_close = nil
786
+ config.top_n_most_broken_ci_days = nil
787
+ config.top_n_most_broken_ci_jobs = nil
788
+ config.top_n_stale_pr = nil
789
+ config.top_n_stale_issue = nil
790
+ config.top_n_oldest_pr = nil
791
+ config.top_n_oldest_issue = nil
792
+ config.top_n_time_to_close_pr = nil
793
+ config.top_n_time_to_close_issue = nil
794
+ end
795
+
796
+ before do
797
+ config.mode = %w{pr issue ci}
798
+ end
799
+
800
+ context 'when no filters are set' do
801
+ let(:repos_data) do
802
+ [
803
+ mock_repo_data('repo1', stale_pr: 1),
804
+ mock_repo_data('repo2', stale_pr: 2),
805
+ ]
806
+ end
807
+
808
+ it 'returns all repositories' do
809
+ result = filter_repositories(repos_data, config).map { |r| r[:name] }
810
+ expect(result).to match_array(%w{repo1 repo2})
811
+ end
812
+ end
813
+
814
+ context 'with a single filter (absolute number)' do
815
+ let(:repos_data) do
816
+ [
817
+ mock_repo_data('repo1', stale_pr: 10), # Most stale
818
+ mock_repo_data('repo2', stale_pr: 5), # Second most
819
+ mock_repo_data('repo3', stale_pr: 1),
820
+ ]
821
+ end
822
+
823
+ it 'returns the top N repositories for that filter' do
824
+ config.top_n_stale = 2
825
+ filtered = filter_repositories(repos_data, config)
826
+ expect(filtered.map { |r| r[:name] }).to match_array(%w{repo1 repo2})
827
+ end
828
+
829
+ it 'returns all repositories if N is larger than the number of repos' do
830
+ config.top_n_stale = 5
831
+ filtered = filter_repositories(repos_data, config)
832
+ expect(filtered.map { |r| r[:name] })
833
+ .to match_array(%w{repo1 repo2 repo3})
834
+ end
835
+ end
836
+
837
+ context 'with a single filter (percentage)' do
838
+ let(:repos_data) do
839
+ [
840
+ mock_repo_data('repo1', oldest_pr_days: 100), # Oldest
841
+ mock_repo_data('repo2', oldest_pr_days: 90), # Second oldest
842
+ mock_repo_data('repo3', oldest_pr_days: 80),
843
+ mock_repo_data('repo4', oldest_pr_days: 70),
844
+ ]
845
+ end
846
+
847
+ it 'returns the top N% repositories (even count)' do
848
+ config.top_n_oldest = 0.5 # 50%
849
+ filtered = filter_repositories(repos_data, config)
850
+ # 50% of 4 is 2. Expect repo1, repo2
851
+ expect(filtered.map { |r| r[:name] }).to match_array(%w{repo1 repo2})
852
+ end
853
+
854
+ it 'returns the top N% repositories (odd count, ceil)' do
855
+ three_repos = repos_data[0..2] # repo1, repo2, repo3
856
+ config.top_n_oldest = 0.5 # 50%
857
+ filtered = filter_repositories(three_repos, config)
858
+ # 50% of 3 is 1.5, ceil(1.5) is 2. Expect repo1, repo2
859
+ expect(filtered.map { |r| r[:name] }).to match_array(%w{repo1 repo2})
860
+ end
861
+ end
862
+
863
+ context 'with multiple filters' do
864
+ let(:repos_data) do
865
+ [
866
+ # High stale, low CI
867
+ mock_repo_data(
868
+ 'repoA', stale_pr: 10, ci_broken_days_map: { 'job1' => 1 }
869
+ ),
870
+ # Low stale, high CI
871
+ mock_repo_data(
872
+ 'repoB', stale_pr: 1, ci_broken_days_map: { 'job1' => 10 }
873
+ ),
874
+ # Medium for both
875
+ mock_repo_data(
876
+ 'repoC', stale_pr: 2, ci_broken_days_map: { 'job1' => 2 }
877
+ ),
878
+ # Low for both
879
+ mock_repo_data('repoD', stale_pr: 0, ci_broken_days_map: {}),
880
+ ]
881
+ end
882
+
883
+ it 'returns repositories meeting any condition without duplicates' do
884
+ # Expect repoA
885
+ config.top_n_stale = 1
886
+ # Expect repoB
887
+ config.top_n_most_broken_ci_days = 1
888
+
889
+ filtered = filter_repositories(repos_data, config)
890
+ expect(filtered.map { |r| r[:name] }).to match_array(%w{repoA repoB})
891
+ end
892
+ end
893
+
894
+ context 'with all filters active' do
895
+ let(:repos_data) do
896
+ [
897
+ mock_repo_data('r_stale', stale_pr: 100),
898
+ mock_repo_data('r_oldest', oldest_issue_days: 100),
899
+ mock_repo_data('r_ttc', avg_close_pr_hours: 100),
900
+ mock_repo_data('r_ci_days',
901
+ ci_broken_days_map: { 'main_job' => 100 }),
902
+ mock_repo_data(
903
+ 'r_ci_jobs',
904
+ ci_distinct_broken_jobs: %w{j1 j2 j3 j4 j5},
905
+ ),
906
+ mock_repo_data(
907
+ 'r_all_low',
908
+ stale_pr: 1,
909
+ oldest_issue_days: 1,
910
+ avg_close_pr_hours: 1,
911
+ ci_broken_days_map: { 'mj' => 1 },
912
+ ci_distinct_broken_jobs: ['j'],
913
+ ),
914
+ ]
915
+ end
916
+
917
+ it 'returns repositories meeting any of the criteria' do
918
+ # r_stale
919
+ config.top_n_stale = 1
920
+ # r_oldest
921
+ config.top_n_oldest = 1
922
+ # r_ttc
923
+ config.top_n_time_to_close = 1
924
+ # r_ci_days
925
+ config.top_n_most_broken_ci_days = 1
926
+ # r_ci_jobs
927
+ config.top_n_most_broken_ci_jobs = 1
928
+
929
+ filtered = filter_repositories(repos_data, config)
930
+ expect(filtered.map { |r| r[:name] })
931
+ .to match_array(%w{r_stale r_oldest r_ttc r_ci_days r_ci_jobs})
932
+ end
933
+ end
934
+
935
+ context 'edge cases' do
936
+ it 'returns an empty list if no repositories meet criteria' do
937
+ repos = [mock_repo_data('repo1', stale_pr: 0)]
938
+ config.top_n_stale = 1
939
+ expect(filter_repositories(repos, config)).to be_empty
940
+ end
941
+
942
+ it 'returns an empty list if input is empty' do
943
+ expect(filter_repositories([], config)).to be_empty
944
+ end
945
+ end
946
+
947
+ context 'data integrity' do
948
+ it 'returned repositories retain their original data structure' do
949
+ repo = mock_repo_data('integrity_test', stale_pr: 10)
950
+ config.top_n_stale = 1
951
+ filtered = filter_repositories([repo], config)
952
+ expect(filtered.first).to eq(repo)
953
+ expect(filtered.first[:pr_issue_stats][:pr][:stale_count]).to eq(10)
954
+ end
955
+ end
956
+
957
+ context 'specific filter logic: top_n_stale' do
958
+ let(:repos_data) do
959
+ [
960
+ mock_repo_data(
961
+ 'repo_pr_high_issue_low', stale_pr: 10, stale_issue: 1
962
+ ),
963
+ mock_repo_data(
964
+ 'repo_pr_low_issue_high', stale_pr: 1, stale_issue: 12
965
+ ),
966
+ mock_repo_data('repo_both_mid', stale_pr: 5, stale_issue: 5),
967
+ mock_repo_data('repo_both_low', stale_pr: 1, stale_issue: 1),
968
+ ]
969
+ end
970
+ it 'selects based on MAX of stale PRs or Issues' do
971
+ # Expect repo_pr_low_issue_high (12), repo_pr_high_issue_low (10)
972
+ config.top_n_stale = 2
973
+ filtered_names = filter_repositories(repos_data, config)
974
+ .map { |r| r[:name] }
975
+ expect(filtered_names)
976
+ .to match_array(%w{repo_pr_low_issue_high repo_pr_high_issue_low})
977
+ end
978
+ end
979
+
980
+ context 'specific filter logic: top_n_oldest' do
981
+ let(:repos_data) do
982
+ [
983
+ mock_repo_data(
984
+ 'repo_pr_old', oldest_pr_days: 100, oldest_issue_days: 10
985
+ ),
986
+ mock_repo_data(
987
+ 'repo_issue_old', oldest_pr_days: 10, oldest_issue_days: 100
988
+ ),
989
+ mock_repo_data(
990
+ 'repo_both_young', oldest_pr_days: 5, oldest_issue_days: 5
991
+ ),
992
+ ]
993
+ end
994
+ it 'selects based on max of oldest PR or Issue' do
995
+ config.top_n_oldest = 2 # Expect repo_pr_old, repo_issue_old
996
+ filtered_names = filter_repositories(repos_data, config)
997
+ .map { |r| r[:name] }
998
+ expect(filtered_names).to match_array(%w{repo_pr_old repo_issue_old})
999
+ end
1000
+ end
1001
+
1002
+ context 'specific filter logic: top_n_time_to_close' do
1003
+ let(:repos_data) do
1004
+ [
1005
+ mock_repo_data(
1006
+ 'repo_pr_slow', avg_close_pr_hours: 100, avg_close_issue_hours: 10
1007
+ ),
1008
+ mock_repo_data(
1009
+ 'repo_issue_slow', avg_close_pr_hours: 10,
1010
+ avg_close_issue_hours: 100
1011
+ ),
1012
+ mock_repo_data(
1013
+ 'repo_both_fast', avg_close_pr_hours: 5, avg_close_issue_hours: 5
1014
+ ),
1015
+ ]
1016
+ end
1017
+ it 'selects based on max of PR or Issue avg time to close' do
1018
+ # Expect repo_pr_slow, repo_issue_slow
1019
+ config.top_n_time_to_close = 2
1020
+ filtered_names = filter_repositories(repos_data, config)
1021
+ .map { |r| r[:name] }
1022
+ expect(filtered_names).to match_array(%w{repo_pr_slow repo_issue_slow})
1023
+ end
1024
+ end
1025
+
1026
+ context 'specific filter logic: top_n_most_broken_ci_days' do
1027
+ let(:repos_data) do
1028
+ [
1029
+ mock_repo_data(
1030
+ 'ci_heavy_broken', ci_broken_days_map: { 'jobA' => 10, 'jobB' => 5 }
1031
+ ),
1032
+ mock_repo_data('ci_light_broken',
1033
+ ci_broken_days_map: { 'jobA' => 1 }),
1034
+ mock_repo_data(
1035
+ 'ci_medium_broken', ci_broken_days_map: { 'jobA' => 3, 'jobB' => 3 }
1036
+ ),
1037
+ ]
1038
+ end
1039
+ it 'selects based on total broken days across all jobs' do
1040
+ # Expect ci_heavy_broken, ci_medium_broken
1041
+ config.top_n_most_broken_ci_days = 2
1042
+ filtered_names = filter_repositories(repos_data, config)
1043
+ .map { |r| r[:name] }
1044
+ expect(filtered_names)
1045
+ .to match_array(%w{ci_heavy_broken ci_medium_broken})
1046
+ end
1047
+ end
1048
+
1049
+ context 'specific filter logic: top_n_most_broken_ci_jobs' do
1050
+ let(:repos_data) do
1051
+ [
1052
+ mock_repo_data(
1053
+ 'ci_many_jobs', ci_distinct_broken_jobs: %w{j1 j2 j3}
1054
+ ),
1055
+ mock_repo_data(
1056
+ 'ci_few_jobs', ci_distinct_broken_jobs: ['j1']
1057
+ ),
1058
+ mock_repo_data(
1059
+ 'ci_moderate_jobs', ci_distinct_broken_jobs: %w{j1 j2}
1060
+ ),
1061
+ ]
1062
+ end
1063
+ it 'selects based on number of distinct broken jobs' do
1064
+ # Expect ci_many_jobs, ci_moderate_jobs
1065
+ config.top_n_most_broken_ci_jobs = 2
1066
+ filtered_names = filter_repositories(repos_data, config)
1067
+ .map { |r| r[:name] }
1068
+ expect(filtered_names).to match_array(%w{ci_many_jobs ci_moderate_jobs})
1069
+ end
1070
+ end
1071
+
1072
+ context 'with missing stats sections' do
1073
+ let(:repo_no_ci) { mock_repo_data('no_ci_stats', stale_pr: 5) }
1074
+ let(:repo_no_pr_issue) do
1075
+ mock_repo_data('no_pr_issue_stats', ci_broken_days_map: { 'j1' => 5 })
1076
+ end
1077
+
1078
+ before do
1079
+ # Simulate missing sections more accurately
1080
+ repo_no_ci[:ci_failures] = nil
1081
+ repo_no_pr_issue[:pr_issue_stats] = nil
1082
+ end
1083
+
1084
+ let(:repos_data) do
1085
+ [
1086
+ repo_no_ci,
1087
+ repo_no_pr_issue,
1088
+ mock_repo_data(
1089
+ 'full_stats', stale_pr: 10, ci_broken_days_map: { 'j1' => 10 }
1090
+ ),
1091
+ ]
1092
+ end
1093
+
1094
+ it 'handles missing ci_failures for CI filters' do
1095
+ # Expect 'full_stats' (10 days), then 'no_pr_issue_stats' (5 days).
1096
+ # 'no_ci_stats' effectively has 0.
1097
+ config.top_n_most_broken_ci_days = 1
1098
+ filtered = filter_repositories(repos_data, config)
1099
+ expect(filtered.map { |r| r[:name] }).to eq(['full_stats'])
1100
+ end
1101
+
1102
+ it 'handles missing pr_issue_stats for PR/issue filters' do
1103
+ # Expect 'full_stats' (10 stale), then 'no_ci_stats' (5 stale).
1104
+ # 'no_pr_issue_stats' effectively has 0.
1105
+ config.top_n_stale = 1
1106
+ filtered = filter_repositories(repos_data, config)
1107
+ expect(filtered.map { |r| r[:name] }).to eq(['full_stats'])
1108
+ end
1109
+ end
1110
+
1111
+ context 'specific filter logic: top_n_stale_pr' do
1112
+ let(:repos_data) do
1113
+ [
1114
+ mock_repo_data('r1', stale_pr: 10),
1115
+ mock_repo_data('r2', stale_pr: 5),
1116
+ mock_repo_data('r3', stale_pr: 12),
1117
+ ]
1118
+ end
1119
+ it 'selects based on PR stale count only' do
1120
+ config.top_n_stale_pr = 2
1121
+ expect(filter_repositories(repos_data, config)
1122
+ .map { |r| r[:name] }).to match_array(%w{r3 r1})
1123
+ end
1124
+ end
1125
+
1126
+ context 'specific filter logic: top_n_oldest_pr' do
1127
+ let(:repos_data) do
1128
+ [
1129
+ mock_repo_data('r1', oldest_pr_days: 100),
1130
+ mock_repo_data('r2', oldest_pr_days: 50),
1131
+ mock_repo_data('r3', oldest_pr_days: 120),
1132
+ ]
1133
+ end
1134
+ it 'selects based on PR oldest open days only' do
1135
+ config.top_n_oldest_pr = 2
1136
+ expect(filter_repositories(repos_data, config)
1137
+ .map { |r| r[:name] }).to match_array(%w{r3 r1})
1138
+ end
1139
+ end
1140
+
1141
+ context 'specific filter logic: top_n_time_to_close_pr' do
1142
+ let(:repos_data) do
1143
+ [
1144
+ mock_repo_data('r1', avg_close_pr_hours: 100),
1145
+ mock_repo_data('r2', avg_close_pr_hours: 50),
1146
+ mock_repo_data('r3', avg_close_pr_hours: 120),
1147
+ ]
1148
+ end
1149
+ it 'selects based on PR avg time to close only' do
1150
+ config.top_n_time_to_close_pr = 2
1151
+ expect(filter_repositories(repos_data, config)
1152
+ .map { |r| r[:name] }).to match_array(%w{r3 r1})
1153
+ end
1154
+ end
1155
+
1156
+ context 'specific filter logic: top_n_stale_issue' do
1157
+ let(:repos_data) do
1158
+ [
1159
+ mock_repo_data('r1', stale_issue: 10),
1160
+ mock_repo_data('r2', stale_issue: 5),
1161
+ mock_repo_data('r3', stale_issue: 12),
1162
+ ]
1163
+ end
1164
+
1165
+ it 'selects based on Issue stale count only' do
1166
+ config.top_n_stale_issue = 2
1167
+ expect(filter_repositories(repos_data, config)
1168
+ .map { |r| r[:name] }).to match_array(%w{r3 r1})
1169
+ end
1170
+ end
1171
+
1172
+ context 'specific filter logic: top_n_oldest_issue' do
1173
+ let(:repos_data) do
1174
+ [
1175
+ mock_repo_data('r1', oldest_issue_days: 100),
1176
+ mock_repo_data('r2', oldest_issue_days: 50),
1177
+ mock_repo_data('r3', oldest_issue_days: 120),
1178
+ ]
1179
+ end
1180
+
1181
+ it 'selects based on Issue oldest open days only' do
1182
+ config.top_n_oldest_issue = 2
1183
+ expect(filter_repositories(repos_data, config)
1184
+ .map { |r| r[:name] }).to match_array(%w{r3 r1})
1185
+ end
1186
+ end
1187
+
1188
+ context 'specific filter logic: top_n_time_to_close_issue' do
1189
+ let(:repos_data) do
1190
+ [
1191
+ mock_repo_data('r1', avg_close_issue_hours: 100),
1192
+ mock_repo_data('r2', avg_close_issue_hours: 50),
1193
+ mock_repo_data('r3', avg_close_issue_hours: 120),
1194
+ ]
1195
+ end
1196
+ it 'selects based on Issue avg time to close only' do
1197
+ config.top_n_time_to_close_issue = 2
1198
+ expect(filter_repositories(repos_data, config)
1199
+ .map { |r| r[:name] }).to match_array(%w{r3 r1})
1200
+ end
1201
+ end
1202
+
1203
+ context 'with combinations of general and specific PR/Issue filters' do
1204
+ let(:repos_data) do
1205
+ [
1206
+ # Max stale = 20 (Issue)
1207
+ mock_repo_data('repo_max_stale_high', stale_pr: 1, stale_issue: 20),
1208
+ # PR stale = 15
1209
+ mock_repo_data('repo_pr_stale_high', stale_pr: 15, stale_issue: 1),
1210
+ # Issue stale = 18
1211
+ mock_repo_data('repo_issue_stale_high', stale_pr: 2, stale_issue: 18),
1212
+ mock_repo_data('repo_all_low', stale_pr: 1, stale_issue: 1),
1213
+ ]
1214
+ end
1215
+
1216
+ it 'includes repos meeting general OR specific criteria ' +
1217
+ '(top_n_stale and top_n_stale_pr)' do
1218
+ # Expect repo_max_stale_high (20)
1219
+ config.top_n_stale = 1
1220
+ # Expect repo_pr_stale_high (15)
1221
+ config.top_n_stale_pr = 1
1222
+
1223
+ filtered_names = filter_repositories(repos_data, config)
1224
+ .map { |r| r[:name] }
1225
+ expect(filtered_names)
1226
+ .to match_array(%w{repo_max_stale_high repo_pr_stale_high})
1227
+ end
1228
+
1229
+ it 'includes repos meeting general OR specific criteria ' +
1230
+ '(top_n_stale and top_n_stale_issue)' do
1231
+ # Expect repo_max_stale_high (20)
1232
+ config.top_n_stale = 1
1233
+ # top issues count is the same
1234
+ config.top_n_stale_issue = 1
1235
+
1236
+ filtered_names = filter_repositories(repos_data, config)
1237
+ .map { |r| r[:name] }
1238
+ expect(filtered_names).to match_array(%w{repo_max_stale_high})
1239
+ end
1240
+ end
1241
+ end
1242
+ end