@elisra-devops/docgen-data-provider 1.63.13 → 1.68.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/ci.yml +26 -9
- package/.github/workflows/release.yml +9 -10
- package/README.md +50 -24
- package/bin/helpers/tfs.d.ts +3 -0
- package/bin/helpers/tfs.js +44 -7
- package/bin/helpers/tfs.js.map +1 -1
- package/bin/modules/GitDataProvider.d.ts +10 -0
- package/bin/modules/GitDataProvider.js +10 -0
- package/bin/modules/GitDataProvider.js.map +1 -1
- package/bin/modules/TestDataProvider.js +0 -1
- package/bin/modules/TestDataProvider.js.map +1 -1
- package/bin/modules/TicketsDataProvider.d.ts +63 -24
- package/bin/modules/TicketsDataProvider.js +216 -114
- package/bin/modules/TicketsDataProvider.js.map +1 -1
- package/bin/tests/helpers/helper.test.js +279 -0
- package/bin/tests/helpers/helper.test.js.map +1 -0
- package/bin/{helpers/test → tests/helpers}/tfs.test.js +312 -49
- package/bin/tests/helpers/tfs.test.js.map +1 -0
- package/bin/tests/index.test.js +25 -0
- package/bin/tests/index.test.js.map +1 -0
- package/bin/tests/models/tfs-data.test.js +160 -0
- package/bin/tests/models/tfs-data.test.js.map +1 -0
- package/bin/{modules/test → tests/modules}/JfrogDataProvider.test.js +9 -9
- package/bin/tests/modules/JfrogDataProvider.test.js.map +1 -0
- package/bin/tests/modules/ResultDataProvider.test.js +1942 -0
- package/bin/tests/modules/ResultDataProvider.test.js.map +1 -0
- package/bin/tests/modules/gitDataProvider.test.js +1888 -0
- package/bin/tests/modules/gitDataProvider.test.js.map +1 -0
- package/bin/{modules/test → tests/modules}/managmentDataProvider.test.js +13 -1
- package/bin/tests/modules/managmentDataProvider.test.js.map +1 -0
- package/bin/tests/modules/pipelineDataProvider.test.d.ts +1 -0
- package/bin/tests/modules/pipelineDataProvider.test.js +783 -0
- package/bin/tests/modules/pipelineDataProvider.test.js.map +1 -0
- package/bin/tests/modules/testDataProvider.test.d.ts +1 -0
- package/bin/tests/modules/testDataProvider.test.js +717 -0
- package/bin/tests/modules/testDataProvider.test.js.map +1 -0
- package/bin/tests/modules/ticketsDataProvider.test.d.ts +1 -0
- package/bin/tests/modules/ticketsDataProvider.test.js +1681 -0
- package/bin/tests/modules/ticketsDataProvider.test.js.map +1 -0
- package/bin/tests/utils/DataProviderUtils.test.d.ts +1 -0
- package/bin/tests/utils/DataProviderUtils.test.js +61 -0
- package/bin/tests/utils/DataProviderUtils.test.js.map +1 -0
- package/bin/tests/utils/testStepParserHelper.test.d.ts +1 -0
- package/bin/tests/utils/testStepParserHelper.test.js +359 -0
- package/bin/tests/utils/testStepParserHelper.test.js.map +1 -0
- package/package.json +9 -1
- package/src/helpers/tfs.ts +51 -7
- package/src/modules/GitDataProvider.ts +10 -0
- package/src/modules/TestDataProvider.ts +0 -1
- package/src/modules/TicketsDataProvider.ts +298 -141
- package/src/tests/helpers/helper.test.ts +337 -0
- package/src/tests/helpers/tfs.test.ts +1092 -0
- package/src/tests/index.test.ts +28 -0
- package/src/tests/models/tfs-data.test.ts +203 -0
- package/src/tests/modules/JfrogDataProvider.test.ts +167 -0
- package/src/tests/modules/ResultDataProvider.test.ts +2571 -0
- package/src/tests/modules/gitDataProvider.test.ts +2628 -0
- package/src/{modules/test → tests/modules}/managmentDataProvider.test.ts +33 -1
- package/src/tests/modules/pipelineDataProvider.test.ts +1038 -0
- package/src/tests/modules/testDataProvider.test.ts +1046 -0
- package/src/tests/modules/ticketsDataProvider.test.ts +2204 -0
- package/src/tests/utils/DataProviderUtils.test.ts +76 -0
- package/src/tests/utils/testStepParserHelper.test.ts +437 -0
- package/tsconfig.json +1 -0
- package/bin/helpers/test/tfs.test.js.map +0 -1
- package/bin/modules/test/JfrogDataProvider.test.js.map +0 -1
- package/bin/modules/test/ResultDataProvider.test.js +0 -444
- package/bin/modules/test/ResultDataProvider.test.js.map +0 -1
- package/bin/modules/test/gitDataProvider.test.js +0 -428
- package/bin/modules/test/gitDataProvider.test.js.map +0 -1
- package/bin/modules/test/managmentDataProvider.test.js.map +0 -1
- package/bin/modules/test/pipelineDataProvider.test.js +0 -237
- package/bin/modules/test/pipelineDataProvider.test.js.map +0 -1
- package/bin/modules/test/testDataProvider.test.js +0 -234
- package/bin/modules/test/testDataProvider.test.js.map +0 -1
- package/bin/modules/test/ticketsDataProvider.test.js +0 -348
- package/bin/modules/test/ticketsDataProvider.test.js.map +0 -1
- package/src/helpers/test/tfs.test.ts +0 -748
- package/src/modules/test/JfrogDataProvider.test.ts +0 -171
- package/src/modules/test/ResultDataProvider.test.ts +0 -542
- package/src/modules/test/gitDataProvider.test.ts +0 -645
- package/src/modules/test/pipelineDataProvider.test.ts +0 -292
- package/src/modules/test/testDataProvider.test.ts +0 -318
- package/src/modules/test/ticketsDataProvider.test.ts +0 -462
- /package/bin/{helpers/test/tfs.test.d.ts → tests/helpers/helper.test.d.ts} +0 -0
- /package/bin/{modules/test/JfrogDataProvider.test.d.ts → tests/helpers/tfs.test.d.ts} +0 -0
- /package/bin/{modules/test/ResultDataProvider.test.d.ts → tests/index.test.d.ts} +0 -0
- /package/bin/{modules/test/gitDataProvider.test.d.ts → tests/models/tfs-data.test.d.ts} +0 -0
- /package/bin/{modules/test/managmentDataProvider.test.d.ts → tests/modules/JfrogDataProvider.test.d.ts} +0 -0
- /package/bin/{modules/test/pipelineDataProvider.test.d.ts → tests/modules/ResultDataProvider.test.d.ts} +0 -0
- /package/bin/{modules/test/testDataProvider.test.d.ts → tests/modules/gitDataProvider.test.d.ts} +0 -0
- /package/bin/{modules/test/ticketsDataProvider.test.d.ts → tests/modules/managmentDataProvider.test.d.ts} +0 -0
|
@@ -0,0 +1,783 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const tfs_1 = require("../../helpers/tfs");
|
|
4
|
+
const PipelinesDataProvider_1 = require("../../modules/PipelinesDataProvider");
|
|
5
|
+
const logger_1 = require("../../utils/logger");
|
|
6
|
+
jest.mock('../../helpers/tfs');
|
|
7
|
+
jest.mock('../../utils/logger');
|
|
8
|
+
jest.mock('../../modules/GitDataProvider');
|
|
9
|
+
describe('PipelinesDataProvider', () => {
|
|
10
|
+
let pipelinesDataProvider;
|
|
11
|
+
const mockOrgUrl = 'https://dev.azure.com/orgname/';
|
|
12
|
+
const mockToken = 'mock-token';
|
|
13
|
+
beforeEach(() => {
|
|
14
|
+
jest.clearAllMocks();
|
|
15
|
+
pipelinesDataProvider = new PipelinesDataProvider_1.default(mockOrgUrl, mockToken);
|
|
16
|
+
});
|
|
17
|
+
describe('isMatchingPipeline', () => {
|
|
18
|
+
// Create test method to access private method
|
|
19
|
+
const invokeIsMatchingPipeline = (fromPipeline, targetPipeline, searchPrevPipelineFromDifferentCommit) => {
|
|
20
|
+
return pipelinesDataProvider.isMatchingPipeline(fromPipeline, targetPipeline, searchPrevPipelineFromDifferentCommit);
|
|
21
|
+
};
|
|
22
|
+
it('should return false when repository IDs are different', () => {
|
|
23
|
+
// Arrange
|
|
24
|
+
const fromPipeline = {
|
|
25
|
+
resources: {
|
|
26
|
+
repositories: {
|
|
27
|
+
'0': {
|
|
28
|
+
self: {
|
|
29
|
+
repository: { id: 'repo1' },
|
|
30
|
+
version: 'v1',
|
|
31
|
+
refName: 'refs/heads/main',
|
|
32
|
+
},
|
|
33
|
+
},
|
|
34
|
+
},
|
|
35
|
+
},
|
|
36
|
+
};
|
|
37
|
+
const targetPipeline = {
|
|
38
|
+
resources: {
|
|
39
|
+
repositories: {
|
|
40
|
+
'0': {
|
|
41
|
+
self: {
|
|
42
|
+
repository: { id: 'repo2' },
|
|
43
|
+
version: 'v1',
|
|
44
|
+
refName: 'refs/heads/main',
|
|
45
|
+
},
|
|
46
|
+
},
|
|
47
|
+
},
|
|
48
|
+
},
|
|
49
|
+
};
|
|
50
|
+
// Act
|
|
51
|
+
const result = invokeIsMatchingPipeline(fromPipeline, targetPipeline, false);
|
|
52
|
+
// Assert
|
|
53
|
+
expect(result).toBe(false);
|
|
54
|
+
});
|
|
55
|
+
it('should return true when versions are the same and searchPrevPipelineFromDifferentCommit is false', () => {
|
|
56
|
+
// Arrange
|
|
57
|
+
const fromPipeline = {
|
|
58
|
+
resources: {
|
|
59
|
+
repositories: {
|
|
60
|
+
'0': {
|
|
61
|
+
self: {
|
|
62
|
+
repository: { id: 'repo1' },
|
|
63
|
+
version: 'v1',
|
|
64
|
+
refName: 'refs/heads/main',
|
|
65
|
+
},
|
|
66
|
+
},
|
|
67
|
+
},
|
|
68
|
+
},
|
|
69
|
+
};
|
|
70
|
+
const targetPipeline = {
|
|
71
|
+
resources: {
|
|
72
|
+
repositories: {
|
|
73
|
+
'0': {
|
|
74
|
+
self: {
|
|
75
|
+
repository: { id: 'repo1' },
|
|
76
|
+
version: 'v1',
|
|
77
|
+
refName: 'refs/heads/main',
|
|
78
|
+
},
|
|
79
|
+
},
|
|
80
|
+
},
|
|
81
|
+
},
|
|
82
|
+
};
|
|
83
|
+
// Act
|
|
84
|
+
const result = invokeIsMatchingPipeline(fromPipeline, targetPipeline, false);
|
|
85
|
+
// Assert
|
|
86
|
+
expect(result).toBe(true);
|
|
87
|
+
});
|
|
88
|
+
it('should return false when versions are the same and searchPrevPipelineFromDifferentCommit is true', () => {
|
|
89
|
+
// Arrange
|
|
90
|
+
const fromPipeline = {
|
|
91
|
+
resources: {
|
|
92
|
+
repositories: {
|
|
93
|
+
'0': {
|
|
94
|
+
self: {
|
|
95
|
+
repository: { id: 'repo1' },
|
|
96
|
+
version: 'v1',
|
|
97
|
+
refName: 'refs/heads/main',
|
|
98
|
+
},
|
|
99
|
+
},
|
|
100
|
+
},
|
|
101
|
+
},
|
|
102
|
+
};
|
|
103
|
+
const targetPipeline = {
|
|
104
|
+
resources: {
|
|
105
|
+
repositories: {
|
|
106
|
+
'0': {
|
|
107
|
+
self: {
|
|
108
|
+
repository: { id: 'repo1' },
|
|
109
|
+
version: 'v1',
|
|
110
|
+
refName: 'refs/heads/main',
|
|
111
|
+
},
|
|
112
|
+
},
|
|
113
|
+
},
|
|
114
|
+
},
|
|
115
|
+
};
|
|
116
|
+
// Act
|
|
117
|
+
const result = invokeIsMatchingPipeline(fromPipeline, targetPipeline, true);
|
|
118
|
+
// Assert
|
|
119
|
+
expect(result).toBe(false);
|
|
120
|
+
});
|
|
121
|
+
it('should return true when refNames match but versions differ', () => {
|
|
122
|
+
// Arrange
|
|
123
|
+
const fromPipeline = {
|
|
124
|
+
resources: {
|
|
125
|
+
repositories: {
|
|
126
|
+
'0': {
|
|
127
|
+
self: {
|
|
128
|
+
repository: { id: 'repo1' },
|
|
129
|
+
version: 'v1',
|
|
130
|
+
refName: 'refs/heads/main',
|
|
131
|
+
},
|
|
132
|
+
},
|
|
133
|
+
},
|
|
134
|
+
},
|
|
135
|
+
};
|
|
136
|
+
const targetPipeline = {
|
|
137
|
+
resources: {
|
|
138
|
+
repositories: {
|
|
139
|
+
'0': {
|
|
140
|
+
self: {
|
|
141
|
+
repository: { id: 'repo1' },
|
|
142
|
+
version: 'v2',
|
|
143
|
+
refName: 'refs/heads/main',
|
|
144
|
+
},
|
|
145
|
+
},
|
|
146
|
+
},
|
|
147
|
+
},
|
|
148
|
+
};
|
|
149
|
+
// Act
|
|
150
|
+
const result = invokeIsMatchingPipeline(fromPipeline, targetPipeline, true);
|
|
151
|
+
// Assert
|
|
152
|
+
expect(result).toBe(true);
|
|
153
|
+
});
|
|
154
|
+
it('should use __designer_repo when self is not available', () => {
|
|
155
|
+
// Arrange
|
|
156
|
+
const fromPipeline = {
|
|
157
|
+
resources: {
|
|
158
|
+
repositories: {
|
|
159
|
+
__designer_repo: {
|
|
160
|
+
repository: { id: 'repo1' },
|
|
161
|
+
version: 'v1',
|
|
162
|
+
refName: 'refs/heads/main',
|
|
163
|
+
},
|
|
164
|
+
},
|
|
165
|
+
},
|
|
166
|
+
};
|
|
167
|
+
const targetPipeline = {
|
|
168
|
+
resources: {
|
|
169
|
+
repositories: {
|
|
170
|
+
__designer_repo: {
|
|
171
|
+
repository: { id: 'repo1' },
|
|
172
|
+
version: 'v1',
|
|
173
|
+
refName: 'refs/heads/main',
|
|
174
|
+
},
|
|
175
|
+
},
|
|
176
|
+
},
|
|
177
|
+
};
|
|
178
|
+
// Act
|
|
179
|
+
const result = invokeIsMatchingPipeline(fromPipeline, targetPipeline, false);
|
|
180
|
+
// Assert
|
|
181
|
+
expect(result).toBe(true);
|
|
182
|
+
});
|
|
183
|
+
});
|
|
184
|
+
describe('getPipelineRunDetails', () => {
|
|
185
|
+
it('should call TFSServices.getItemContent with correct parameters', async () => {
|
|
186
|
+
// Arrange
|
|
187
|
+
const projectName = 'project1';
|
|
188
|
+
const pipelineId = 123;
|
|
189
|
+
const runId = 456;
|
|
190
|
+
const mockResponse = { id: runId, resources: {} };
|
|
191
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce(mockResponse);
|
|
192
|
+
// Act
|
|
193
|
+
const result = await pipelinesDataProvider.getPipelineRunDetails(projectName, pipelineId, runId);
|
|
194
|
+
// Assert
|
|
195
|
+
expect(tfs_1.TFSServices.getItemContent).toHaveBeenCalledWith(`${mockOrgUrl}${projectName}/_apis/pipelines/${pipelineId}/runs/${runId}`, mockToken);
|
|
196
|
+
expect(result).toEqual(mockResponse);
|
|
197
|
+
});
|
|
198
|
+
});
|
|
199
|
+
describe('GetPipelineRunHistory', () => {
|
|
200
|
+
it('should return filtered pipeline run history', async () => {
|
|
201
|
+
// Arrange
|
|
202
|
+
const projectName = 'project1';
|
|
203
|
+
const pipelineId = '123';
|
|
204
|
+
const mockResponse = {
|
|
205
|
+
value: [
|
|
206
|
+
{ id: 1, result: 'succeeded' },
|
|
207
|
+
{ id: 2, result: 'failed' },
|
|
208
|
+
{ id: 3, result: 'canceled' },
|
|
209
|
+
{ id: 4, result: 'succeeded' },
|
|
210
|
+
],
|
|
211
|
+
};
|
|
212
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce(mockResponse);
|
|
213
|
+
// Act
|
|
214
|
+
const result = await pipelinesDataProvider.GetPipelineRunHistory(projectName, pipelineId);
|
|
215
|
+
// Assert
|
|
216
|
+
expect(tfs_1.TFSServices.getItemContent).toHaveBeenCalledWith(`${mockOrgUrl}${projectName}/_apis/pipelines/${pipelineId}/runs`, mockToken, 'get', null, null);
|
|
217
|
+
expect(result).toEqual({
|
|
218
|
+
count: 4, // Note: Current filter logic keeps all runs where result is not 'failed' AND not 'canceled'
|
|
219
|
+
value: mockResponse.value,
|
|
220
|
+
});
|
|
221
|
+
});
|
|
222
|
+
it('should handle API errors gracefully', async () => {
|
|
223
|
+
// Arrange
|
|
224
|
+
const projectName = 'project1';
|
|
225
|
+
const pipelineId = '123';
|
|
226
|
+
const expectedError = new Error('API error');
|
|
227
|
+
tfs_1.TFSServices.getItemContent.mockRejectedValueOnce(expectedError);
|
|
228
|
+
// Act
|
|
229
|
+
const result = await pipelinesDataProvider.GetPipelineRunHistory(projectName, pipelineId);
|
|
230
|
+
// Assert
|
|
231
|
+
expect(tfs_1.TFSServices.getItemContent).toHaveBeenCalledWith(`${mockOrgUrl}${projectName}/_apis/pipelines/${pipelineId}/runs`, mockToken, 'get', null, null);
|
|
232
|
+
expect(logger_1.default.error).toHaveBeenCalledWith(`Could not fetch Pipeline Run History: ${expectedError.message}`);
|
|
233
|
+
expect(result).toBeUndefined();
|
|
234
|
+
});
|
|
235
|
+
it('should return response when value is undefined', async () => {
|
|
236
|
+
// Arrange
|
|
237
|
+
const projectName = 'project1';
|
|
238
|
+
const pipelineId = '123';
|
|
239
|
+
const mockResponse = { count: 0 };
|
|
240
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce(mockResponse);
|
|
241
|
+
// Act
|
|
242
|
+
const result = await pipelinesDataProvider.GetPipelineRunHistory(projectName, pipelineId);
|
|
243
|
+
// Assert
|
|
244
|
+
expect(result).toEqual(mockResponse);
|
|
245
|
+
});
|
|
246
|
+
});
|
|
247
|
+
describe('getPipelineBuildByBuildId', () => {
|
|
248
|
+
it('should fetch pipeline build by build ID', async () => {
|
|
249
|
+
// Arrange
|
|
250
|
+
const projectName = 'project1';
|
|
251
|
+
const buildId = 123;
|
|
252
|
+
const mockResponse = { id: buildId, buildNumber: '20231201.1' };
|
|
253
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce(mockResponse);
|
|
254
|
+
// Act
|
|
255
|
+
const result = await pipelinesDataProvider.getPipelineBuildByBuildId(projectName, buildId);
|
|
256
|
+
// Assert
|
|
257
|
+
expect(tfs_1.TFSServices.getItemContent).toHaveBeenCalledWith(`${mockOrgUrl}${projectName}/_apis/build/builds/${buildId}`, mockToken, 'get');
|
|
258
|
+
expect(result).toEqual(mockResponse);
|
|
259
|
+
});
|
|
260
|
+
});
|
|
261
|
+
describe('TriggerBuildById', () => {
|
|
262
|
+
it('should trigger a build with parameters', async () => {
|
|
263
|
+
// Arrange
|
|
264
|
+
const projectName = 'project1';
|
|
265
|
+
const buildDefId = '456';
|
|
266
|
+
const parameters = '{"Test":"123"}';
|
|
267
|
+
const mockResponse = { id: 789, status: 'queued' };
|
|
268
|
+
tfs_1.TFSServices.postRequest.mockResolvedValueOnce(mockResponse);
|
|
269
|
+
// Act
|
|
270
|
+
const result = await pipelinesDataProvider.TriggerBuildById(projectName, buildDefId, parameters);
|
|
271
|
+
// Assert
|
|
272
|
+
expect(tfs_1.TFSServices.postRequest).toHaveBeenCalledWith(`${mockOrgUrl}${projectName}/_apis/build/builds?api-version=5.0`, mockToken, 'post', {
|
|
273
|
+
definition: { id: buildDefId },
|
|
274
|
+
parameters: parameters,
|
|
275
|
+
}, null);
|
|
276
|
+
expect(result).toEqual(mockResponse);
|
|
277
|
+
});
|
|
278
|
+
});
|
|
279
|
+
describe('GetArtifactByBuildId', () => {
|
|
280
|
+
it('should return empty response when no artifacts exist', async () => {
|
|
281
|
+
// Arrange
|
|
282
|
+
const projectName = 'project1';
|
|
283
|
+
const buildId = '123';
|
|
284
|
+
const artifactName = 'drop';
|
|
285
|
+
const mockResponse = { count: 0 };
|
|
286
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce(mockResponse);
|
|
287
|
+
// Act
|
|
288
|
+
const result = await pipelinesDataProvider.GetArtifactByBuildId(projectName, buildId, artifactName);
|
|
289
|
+
// Assert
|
|
290
|
+
expect(result).toEqual(mockResponse);
|
|
291
|
+
});
|
|
292
|
+
it('should download artifact when it exists', async () => {
|
|
293
|
+
// Arrange
|
|
294
|
+
const projectName = 'project1';
|
|
295
|
+
const buildId = '123';
|
|
296
|
+
const artifactName = 'drop';
|
|
297
|
+
const mockArtifactsResponse = { count: 1 };
|
|
298
|
+
const mockArtifactResponse = {
|
|
299
|
+
resource: { downloadUrl: 'https://example.com/download' },
|
|
300
|
+
};
|
|
301
|
+
const mockDownloadResult = { data: Buffer.from('zip content') };
|
|
302
|
+
tfs_1.TFSServices.getItemContent
|
|
303
|
+
.mockResolvedValueOnce(mockArtifactsResponse)
|
|
304
|
+
.mockResolvedValueOnce(mockArtifactResponse);
|
|
305
|
+
tfs_1.TFSServices.downloadZipFile.mockResolvedValueOnce(mockDownloadResult);
|
|
306
|
+
// Act
|
|
307
|
+
const result = await pipelinesDataProvider.GetArtifactByBuildId(projectName, buildId, artifactName);
|
|
308
|
+
// Assert
|
|
309
|
+
expect(tfs_1.TFSServices.downloadZipFile).toHaveBeenCalledWith('https://example.com/download', mockToken);
|
|
310
|
+
expect(result).toEqual(mockDownloadResult);
|
|
311
|
+
});
|
|
312
|
+
it('should throw error when artifact fetch fails', async () => {
|
|
313
|
+
// Arrange
|
|
314
|
+
const projectName = 'project1';
|
|
315
|
+
const buildId = '123';
|
|
316
|
+
const artifactName = 'drop';
|
|
317
|
+
const mockError = new Error('Artifact not found');
|
|
318
|
+
tfs_1.TFSServices.getItemContent.mockRejectedValueOnce(mockError);
|
|
319
|
+
// Act & Assert
|
|
320
|
+
await expect(pipelinesDataProvider.GetArtifactByBuildId(projectName, buildId, artifactName)).rejects.toThrow();
|
|
321
|
+
expect(logger_1.default.error).toHaveBeenCalled();
|
|
322
|
+
});
|
|
323
|
+
});
|
|
324
|
+
describe('getPipelineStageName', () => {
|
|
325
|
+
it('should return the matching Stage record', async () => {
|
|
326
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce({
|
|
327
|
+
records: [
|
|
328
|
+
{ type: 'Stage', name: 'Deploy', state: 'completed', result: 'succeeded' },
|
|
329
|
+
{ type: 'Job', name: 'Job1' },
|
|
330
|
+
],
|
|
331
|
+
});
|
|
332
|
+
const record = await pipelinesDataProvider.getPipelineStageName(123, 'project1', 'Deploy');
|
|
333
|
+
expect(record).toEqual(expect.objectContaining({ type: 'Stage', name: 'Deploy' }));
|
|
334
|
+
});
|
|
335
|
+
it('should return undefined when no matching stage exists', async () => {
|
|
336
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce({
|
|
337
|
+
records: [{ type: 'Stage', name: 'Build', state: 'completed', result: 'succeeded' }],
|
|
338
|
+
});
|
|
339
|
+
const record = await pipelinesDataProvider.getPipelineStageName(123, 'project1', 'Deploy');
|
|
340
|
+
expect(record).toBeUndefined();
|
|
341
|
+
});
|
|
342
|
+
it('should return undefined on fetch error and log', async () => {
|
|
343
|
+
tfs_1.TFSServices.getItemContent.mockRejectedValueOnce(new Error('boom'));
|
|
344
|
+
const record = await pipelinesDataProvider.getPipelineStageName(123, 'project1', 'Deploy');
|
|
345
|
+
expect(record).toBeUndefined();
|
|
346
|
+
expect(logger_1.default.error).toHaveBeenCalled();
|
|
347
|
+
});
|
|
348
|
+
});
|
|
349
|
+
describe('isStageSuccessful', () => {
|
|
350
|
+
it('should return false when stage is missing', async () => {
|
|
351
|
+
jest.spyOn(pipelinesDataProvider, 'getPipelineStageName').mockResolvedValueOnce(undefined);
|
|
352
|
+
await expect(pipelinesDataProvider.isStageSuccessful({ id: 1 }, 'project1', 'Deploy')).resolves.toBeUndefined();
|
|
353
|
+
});
|
|
354
|
+
it('should return false when stage is not completed', async () => {
|
|
355
|
+
jest
|
|
356
|
+
.spyOn(pipelinesDataProvider, 'getPipelineStageName')
|
|
357
|
+
.mockResolvedValueOnce({ state: 'inProgress', result: 'succeeded' });
|
|
358
|
+
await expect(pipelinesDataProvider.isStageSuccessful({ id: 1 }, 'project1', 'Deploy')).resolves.toBe(false);
|
|
359
|
+
});
|
|
360
|
+
it('should return false when stage result is not succeeded', async () => {
|
|
361
|
+
jest
|
|
362
|
+
.spyOn(pipelinesDataProvider, 'getPipelineStageName')
|
|
363
|
+
.mockResolvedValueOnce({ state: 'completed', result: 'failed' });
|
|
364
|
+
await expect(pipelinesDataProvider.isStageSuccessful({ id: 1 }, 'project1', 'Deploy')).resolves.toBe(false);
|
|
365
|
+
});
|
|
366
|
+
it('should return true when stage is completed and succeeded', async () => {
|
|
367
|
+
jest
|
|
368
|
+
.spyOn(pipelinesDataProvider, 'getPipelineStageName')
|
|
369
|
+
.mockResolvedValueOnce({ state: 'completed', result: 'succeeded' });
|
|
370
|
+
await expect(pipelinesDataProvider.isStageSuccessful({ id: 1 }, 'project1', 'Deploy')).resolves.toBe(true);
|
|
371
|
+
});
|
|
372
|
+
});
|
|
373
|
+
describe('GetReleaseByReleaseId', () => {
|
|
374
|
+
it('should fetch release by ID', async () => {
|
|
375
|
+
// Arrange
|
|
376
|
+
const projectName = 'project1';
|
|
377
|
+
const releaseId = 123;
|
|
378
|
+
const mockResponse = { id: releaseId, name: 'Release-1' };
|
|
379
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce(mockResponse);
|
|
380
|
+
// Act
|
|
381
|
+
const result = await pipelinesDataProvider.GetReleaseByReleaseId(projectName, releaseId);
|
|
382
|
+
// Assert
|
|
383
|
+
expect(result).toEqual(mockResponse);
|
|
384
|
+
});
|
|
385
|
+
it('should replace dev.azure.com with vsrm.dev.azure.com for release URL', async () => {
|
|
386
|
+
// Arrange
|
|
387
|
+
const projectName = 'project1';
|
|
388
|
+
const releaseId = 123;
|
|
389
|
+
const mockResponse = { id: releaseId };
|
|
390
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce(mockResponse);
|
|
391
|
+
// Act
|
|
392
|
+
await pipelinesDataProvider.GetReleaseByReleaseId(projectName, releaseId);
|
|
393
|
+
// Assert
|
|
394
|
+
expect(tfs_1.TFSServices.getItemContent).toHaveBeenCalledWith(expect.stringContaining('vsrm.dev.azure.com'), mockToken, 'get', null, null);
|
|
395
|
+
});
|
|
396
|
+
});
|
|
397
|
+
describe('GetReleaseHistory', () => {
|
|
398
|
+
it('should fetch release history for a definition', async () => {
|
|
399
|
+
// Arrange
|
|
400
|
+
const projectName = 'project1';
|
|
401
|
+
const definitionId = '456';
|
|
402
|
+
const mockResponse = { value: [{ id: 1 }, { id: 2 }] };
|
|
403
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce(mockResponse);
|
|
404
|
+
// Act
|
|
405
|
+
const result = await pipelinesDataProvider.GetReleaseHistory(projectName, definitionId);
|
|
406
|
+
// Assert
|
|
407
|
+
expect(tfs_1.TFSServices.getItemContent).toHaveBeenCalledWith(expect.stringContaining(`definitionId=${definitionId}`), mockToken, 'get', null, null);
|
|
408
|
+
expect(result).toEqual(mockResponse);
|
|
409
|
+
});
|
|
410
|
+
});
|
|
411
|
+
describe('GetAllReleaseHistory', () => {
|
|
412
|
+
it('should fetch all releases with pagination', async () => {
|
|
413
|
+
// Arrange
|
|
414
|
+
const projectName = 'project1';
|
|
415
|
+
const definitionId = '456';
|
|
416
|
+
const mockResponse1 = {
|
|
417
|
+
data: { value: [{ id: 1 }, { id: 2 }] },
|
|
418
|
+
headers: { 'x-ms-continuationtoken': 'token123' },
|
|
419
|
+
};
|
|
420
|
+
const mockResponse2 = {
|
|
421
|
+
data: { value: [{ id: 3 }] },
|
|
422
|
+
headers: {},
|
|
423
|
+
};
|
|
424
|
+
tfs_1.TFSServices.getItemContentWithHeaders
|
|
425
|
+
.mockResolvedValueOnce(mockResponse1)
|
|
426
|
+
.mockResolvedValueOnce(mockResponse2);
|
|
427
|
+
// Act
|
|
428
|
+
const result = await pipelinesDataProvider.GetAllReleaseHistory(projectName, definitionId);
|
|
429
|
+
// Assert
|
|
430
|
+
expect(result.count).toBe(3);
|
|
431
|
+
expect(result.value).toHaveLength(3);
|
|
432
|
+
});
|
|
433
|
+
it('should support x-ms-continuation-token header and default value when data is missing', async () => {
|
|
434
|
+
const projectName = 'project1';
|
|
435
|
+
const definitionId = '456';
|
|
436
|
+
tfs_1.TFSServices.getItemContentWithHeaders
|
|
437
|
+
.mockResolvedValueOnce({
|
|
438
|
+
data: undefined,
|
|
439
|
+
headers: { 'x-ms-continuation-token': 'token123' },
|
|
440
|
+
})
|
|
441
|
+
.mockResolvedValueOnce({
|
|
442
|
+
data: { value: [{ id: 1 }] },
|
|
443
|
+
headers: {},
|
|
444
|
+
});
|
|
445
|
+
const result = await pipelinesDataProvider.GetAllReleaseHistory(projectName, definitionId);
|
|
446
|
+
expect(result.count).toBe(1);
|
|
447
|
+
expect(result.value).toEqual([{ id: 1 }]);
|
|
448
|
+
});
|
|
449
|
+
it('should handle errors during pagination', async () => {
|
|
450
|
+
// Arrange
|
|
451
|
+
const projectName = 'project1';
|
|
452
|
+
const definitionId = '456';
|
|
453
|
+
tfs_1.TFSServices.getItemContentWithHeaders.mockRejectedValueOnce(new Error('API Error'));
|
|
454
|
+
// Act
|
|
455
|
+
const result = await pipelinesDataProvider.GetAllReleaseHistory(projectName, definitionId);
|
|
456
|
+
// Assert
|
|
457
|
+
expect(result).toEqual({ count: 0, value: [] });
|
|
458
|
+
expect(logger_1.default.error).toHaveBeenCalled();
|
|
459
|
+
});
|
|
460
|
+
});
|
|
461
|
+
describe('GetAllPipelines', () => {
|
|
462
|
+
it('should fetch all pipelines for a project', async () => {
|
|
463
|
+
// Arrange
|
|
464
|
+
const projectName = 'project1';
|
|
465
|
+
const mockResponse = { value: [{ id: 1 }, { id: 2 }], count: 2 };
|
|
466
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce(mockResponse);
|
|
467
|
+
// Act
|
|
468
|
+
const result = await pipelinesDataProvider.GetAllPipelines(projectName);
|
|
469
|
+
// Assert
|
|
470
|
+
expect(tfs_1.TFSServices.getItemContent).toHaveBeenCalledWith(`${mockOrgUrl}${projectName}/_apis/pipelines?$top=2000`, mockToken, 'get', null, null);
|
|
471
|
+
expect(result).toEqual(mockResponse);
|
|
472
|
+
});
|
|
473
|
+
});
|
|
474
|
+
describe('GetAllReleaseDefenitions', () => {
|
|
475
|
+
it('should fetch all release definitions', async () => {
|
|
476
|
+
// Arrange
|
|
477
|
+
const projectName = 'project1';
|
|
478
|
+
const mockResponse = { value: [{ id: 1 }, { id: 2 }] };
|
|
479
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce(mockResponse);
|
|
480
|
+
// Act
|
|
481
|
+
const result = await pipelinesDataProvider.GetAllReleaseDefenitions(projectName);
|
|
482
|
+
// Assert
|
|
483
|
+
expect(tfs_1.TFSServices.getItemContent).toHaveBeenCalledWith(expect.stringContaining('vsrm.dev.azure.com'), mockToken, 'get', null, null);
|
|
484
|
+
expect(result).toEqual(mockResponse);
|
|
485
|
+
});
|
|
486
|
+
});
|
|
487
|
+
describe('GetRecentReleaseArtifactInfo', () => {
|
|
488
|
+
it('should return empty array when no releases exist', async () => {
|
|
489
|
+
// Arrange
|
|
490
|
+
const projectName = 'project1';
|
|
491
|
+
const mockResponse = { value: [] };
|
|
492
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce(mockResponse);
|
|
493
|
+
// Act
|
|
494
|
+
const result = await pipelinesDataProvider.GetRecentReleaseArtifactInfo(projectName);
|
|
495
|
+
// Assert
|
|
496
|
+
expect(result).toEqual([]);
|
|
497
|
+
});
|
|
498
|
+
it('should return artifact info from most recent release', async () => {
|
|
499
|
+
// Arrange
|
|
500
|
+
const projectName = 'project1';
|
|
501
|
+
const mockReleasesResponse = { value: [{ id: 123 }] };
|
|
502
|
+
const mockReleaseResponse = {
|
|
503
|
+
artifacts: [
|
|
504
|
+
{
|
|
505
|
+
definitionReference: {
|
|
506
|
+
definition: { name: 'artifact1' },
|
|
507
|
+
version: { name: '1.0.0' },
|
|
508
|
+
},
|
|
509
|
+
},
|
|
510
|
+
],
|
|
511
|
+
};
|
|
512
|
+
tfs_1.TFSServices.getItemContent
|
|
513
|
+
.mockResolvedValueOnce(mockReleasesResponse)
|
|
514
|
+
.mockResolvedValueOnce(mockReleaseResponse);
|
|
515
|
+
// Act
|
|
516
|
+
const result = await pipelinesDataProvider.GetRecentReleaseArtifactInfo(projectName);
|
|
517
|
+
// Assert
|
|
518
|
+
expect(result).toEqual([{ artifactName: 'artifact1', artifactVersion: '1.0.0' }]);
|
|
519
|
+
});
|
|
520
|
+
});
|
|
521
|
+
describe('getPipelineResourcePipelinesFromObject', () => {
|
|
522
|
+
it('should return empty set when no pipeline resources exist', async () => {
|
|
523
|
+
// Arrange
|
|
524
|
+
const inPipeline = {
|
|
525
|
+
resources: {},
|
|
526
|
+
};
|
|
527
|
+
// Act
|
|
528
|
+
const result = await pipelinesDataProvider.getPipelineResourcePipelinesFromObject(inPipeline);
|
|
529
|
+
// Assert
|
|
530
|
+
expect(result).toEqual(new Set());
|
|
531
|
+
});
|
|
532
|
+
it('should extract pipeline resources from pipeline object', async () => {
|
|
533
|
+
// Arrange
|
|
534
|
+
const inPipeline = {
|
|
535
|
+
resources: {
|
|
536
|
+
pipelines: {
|
|
537
|
+
myPipeline: {
|
|
538
|
+
pipeline: {
|
|
539
|
+
id: 123,
|
|
540
|
+
url: 'https://dev.azure.com/org/project/_apis/pipelines/123?revision=1',
|
|
541
|
+
},
|
|
542
|
+
},
|
|
543
|
+
},
|
|
544
|
+
},
|
|
545
|
+
};
|
|
546
|
+
const mockBuildResponse = {
|
|
547
|
+
definition: { id: 456, type: 'build' },
|
|
548
|
+
buildNumber: '20231201.1',
|
|
549
|
+
project: { name: 'project1' },
|
|
550
|
+
repository: { type: 'TfsGit' },
|
|
551
|
+
};
|
|
552
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce(mockBuildResponse);
|
|
553
|
+
// Act
|
|
554
|
+
const result = await pipelinesDataProvider.getPipelineResourcePipelinesFromObject(inPipeline);
|
|
555
|
+
// Assert
|
|
556
|
+
expect(result).toHaveLength(1);
|
|
557
|
+
expect(result[0]).toEqual({
|
|
558
|
+
name: 'myPipeline',
|
|
559
|
+
buildId: 123,
|
|
560
|
+
definitionId: 456,
|
|
561
|
+
buildNumber: '20231201.1',
|
|
562
|
+
teamProject: 'project1',
|
|
563
|
+
provider: 'TfsGit',
|
|
564
|
+
});
|
|
565
|
+
});
|
|
566
|
+
it('should handle errors when fetching pipeline resources', async () => {
|
|
567
|
+
// Arrange
|
|
568
|
+
const inPipeline = {
|
|
569
|
+
resources: {
|
|
570
|
+
pipelines: {
|
|
571
|
+
myPipeline: {
|
|
572
|
+
pipeline: {
|
|
573
|
+
id: 123,
|
|
574
|
+
url: 'https://dev.azure.com/org/project/_apis/pipelines/123?revision=1',
|
|
575
|
+
},
|
|
576
|
+
},
|
|
577
|
+
},
|
|
578
|
+
},
|
|
579
|
+
};
|
|
580
|
+
tfs_1.TFSServices.getItemContent.mockRejectedValueOnce(new Error('API Error'));
|
|
581
|
+
// Act
|
|
582
|
+
const result = await pipelinesDataProvider.getPipelineResourcePipelinesFromObject(inPipeline);
|
|
583
|
+
// Assert
|
|
584
|
+
expect(result).toEqual([]);
|
|
585
|
+
expect(logger_1.default.error).toHaveBeenCalled();
|
|
586
|
+
});
|
|
587
|
+
});
|
|
588
|
+
describe('getPipelineResourceRepositoriesFromObject', () => {
|
|
589
|
+
it('should return empty map when no repository resources exist', async () => {
|
|
590
|
+
// Arrange
|
|
591
|
+
const inPipeline = {
|
|
592
|
+
resources: {},
|
|
593
|
+
};
|
|
594
|
+
const mockGitDataProvider = {};
|
|
595
|
+
// Act
|
|
596
|
+
const result = await pipelinesDataProvider.getPipelineResourceRepositoriesFromObject(inPipeline, mockGitDataProvider);
|
|
597
|
+
// Assert
|
|
598
|
+
expect(result).toEqual(new Map());
|
|
599
|
+
});
|
|
600
|
+
it('should extract repository resources from pipeline object', async () => {
|
|
601
|
+
// Arrange
|
|
602
|
+
const inPipeline = {
|
|
603
|
+
resources: {
|
|
604
|
+
repositories: {
|
|
605
|
+
self: {
|
|
606
|
+
repository: { id: 'repo-123', type: 'azureReposGit' },
|
|
607
|
+
version: 'abc123',
|
|
608
|
+
},
|
|
609
|
+
},
|
|
610
|
+
},
|
|
611
|
+
};
|
|
612
|
+
const mockRepo = {
|
|
613
|
+
name: 'MyRepo',
|
|
614
|
+
url: 'https://dev.azure.com/org/project/_git/MyRepo',
|
|
615
|
+
};
|
|
616
|
+
const mockGitDataProvider = {
|
|
617
|
+
GetGitRepoFromRepoId: jest.fn().mockResolvedValue(mockRepo),
|
|
618
|
+
};
|
|
619
|
+
// Act
|
|
620
|
+
const result = await pipelinesDataProvider.getPipelineResourceRepositoriesFromObject(inPipeline, mockGitDataProvider);
|
|
621
|
+
// Assert
|
|
622
|
+
expect(result).toHaveLength(1);
|
|
623
|
+
expect(result[0]).toEqual({
|
|
624
|
+
repoName: 'MyRepo',
|
|
625
|
+
repoSha1: 'abc123',
|
|
626
|
+
url: 'https://dev.azure.com/org/project/_git/MyRepo',
|
|
627
|
+
});
|
|
628
|
+
});
|
|
629
|
+
it('should skip non-azureReposGit repositories', async () => {
|
|
630
|
+
// Arrange
|
|
631
|
+
const inPipeline = {
|
|
632
|
+
resources: {
|
|
633
|
+
repositories: {
|
|
634
|
+
external: {
|
|
635
|
+
repository: { id: 'repo-123', type: 'GitHub' },
|
|
636
|
+
version: 'abc123',
|
|
637
|
+
},
|
|
638
|
+
},
|
|
639
|
+
},
|
|
640
|
+
};
|
|
641
|
+
const mockGitDataProvider = {
|
|
642
|
+
GetGitRepoFromRepoId: jest.fn(),
|
|
643
|
+
};
|
|
644
|
+
// Act
|
|
645
|
+
const result = await pipelinesDataProvider.getPipelineResourceRepositoriesFromObject(inPipeline, mockGitDataProvider);
|
|
646
|
+
// Assert
|
|
647
|
+
expect(result).toHaveLength(0);
|
|
648
|
+
expect(mockGitDataProvider.GetGitRepoFromRepoId).not.toHaveBeenCalled();
|
|
649
|
+
});
|
|
650
|
+
});
|
|
651
|
+
describe('findPreviousPipeline', () => {
|
|
652
|
+
it('should return undefined when no pipeline runs exist', async () => {
|
|
653
|
+
// Arrange
|
|
654
|
+
const teamProject = 'project1';
|
|
655
|
+
const pipelineId = '123';
|
|
656
|
+
const toPipelineRunId = 100;
|
|
657
|
+
const targetPipeline = {};
|
|
658
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce({});
|
|
659
|
+
// Act
|
|
660
|
+
const result = await pipelinesDataProvider.findPreviousPipeline(teamProject, pipelineId, toPipelineRunId, targetPipeline, false);
|
|
661
|
+
// Assert
|
|
662
|
+
expect(result).toBeUndefined();
|
|
663
|
+
});
|
|
664
|
+
it('should skip invalid runs and return first matching previous pipeline', async () => {
|
|
665
|
+
const teamProject = 'project1';
|
|
666
|
+
const pipelineId = '123';
|
|
667
|
+
const toPipelineRunId = 100;
|
|
668
|
+
const targetPipeline = {
|
|
669
|
+
resources: {
|
|
670
|
+
repositories: { '0': { self: { repository: { id: 'r' }, version: 'v2', refName: 'main' } } },
|
|
671
|
+
},
|
|
672
|
+
};
|
|
673
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce({
|
|
674
|
+
value: [
|
|
675
|
+
{ id: 100, result: 'succeeded' },
|
|
676
|
+
{ id: 99, result: 'succeeded' },
|
|
677
|
+
],
|
|
678
|
+
});
|
|
679
|
+
jest.spyOn(pipelinesDataProvider, 'getPipelineRunDetails').mockResolvedValueOnce({
|
|
680
|
+
resources: {
|
|
681
|
+
repositories: { '0': { self: { repository: { id: 'r' }, version: 'v1', refName: 'main' } } },
|
|
682
|
+
},
|
|
683
|
+
});
|
|
684
|
+
jest.spyOn(pipelinesDataProvider, 'isMatchingPipeline').mockReturnValueOnce(true);
|
|
685
|
+
const res = await pipelinesDataProvider.findPreviousPipeline(teamProject, pipelineId, toPipelineRunId, targetPipeline, true);
|
|
686
|
+
expect(res).toBe(99);
|
|
687
|
+
});
|
|
688
|
+
it('should skip when fromStage provided but stage is not successful', async () => {
|
|
689
|
+
const teamProject = 'project1';
|
|
690
|
+
const pipelineId = '123';
|
|
691
|
+
const toPipelineRunId = 100;
|
|
692
|
+
const targetPipeline = {};
|
|
693
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce({
|
|
694
|
+
value: [{ id: 99, result: 'succeeded' }],
|
|
695
|
+
});
|
|
696
|
+
jest.spyOn(pipelinesDataProvider, 'isStageSuccessful').mockResolvedValueOnce(false);
|
|
697
|
+
const detailsSpy = jest.spyOn(pipelinesDataProvider, 'getPipelineRunDetails');
|
|
698
|
+
const res = await pipelinesDataProvider.findPreviousPipeline(teamProject, pipelineId, toPipelineRunId, targetPipeline, false, 'Deploy');
|
|
699
|
+
expect(res).toBeUndefined();
|
|
700
|
+
expect(detailsSpy).not.toHaveBeenCalled();
|
|
701
|
+
});
|
|
702
|
+
it('should skip when pipeline details do not include repositories', async () => {
|
|
703
|
+
const teamProject = 'project1';
|
|
704
|
+
const pipelineId = '123';
|
|
705
|
+
const toPipelineRunId = 100;
|
|
706
|
+
const targetPipeline = {};
|
|
707
|
+
tfs_1.TFSServices.getItemContent.mockResolvedValueOnce({
|
|
708
|
+
value: [{ id: 99, result: 'succeeded' }],
|
|
709
|
+
});
|
|
710
|
+
jest
|
|
711
|
+
.spyOn(pipelinesDataProvider, 'getPipelineRunDetails')
|
|
712
|
+
.mockResolvedValueOnce({ resources: {} });
|
|
713
|
+
const res = await pipelinesDataProvider.findPreviousPipeline(teamProject, pipelineId, toPipelineRunId, targetPipeline, false);
|
|
714
|
+
expect(res).toBeUndefined();
|
|
715
|
+
});
|
|
716
|
+
it('should find matching previous pipeline', async () => {
|
|
717
|
+
// Arrange
|
|
718
|
+
const teamProject = 'project1';
|
|
719
|
+
const pipelineId = '123';
|
|
720
|
+
const toPipelineRunId = 100;
|
|
721
|
+
const targetPipeline = {
|
|
722
|
+
resources: {
|
|
723
|
+
repositories: {
|
|
724
|
+
'0': {
|
|
725
|
+
self: {
|
|
726
|
+
repository: { id: 'repo1' },
|
|
727
|
+
version: 'v2',
|
|
728
|
+
refName: 'refs/heads/main',
|
|
729
|
+
},
|
|
730
|
+
},
|
|
731
|
+
},
|
|
732
|
+
},
|
|
733
|
+
};
|
|
734
|
+
const mockRunHistory = {
|
|
735
|
+
value: [{ id: 99, result: 'succeeded' }],
|
|
736
|
+
};
|
|
737
|
+
const mockPipelineDetails = {
|
|
738
|
+
resources: {
|
|
739
|
+
repositories: {
|
|
740
|
+
'0': {
|
|
741
|
+
self: {
|
|
742
|
+
repository: { id: 'repo1' },
|
|
743
|
+
version: 'v1',
|
|
744
|
+
refName: 'refs/heads/main',
|
|
745
|
+
},
|
|
746
|
+
},
|
|
747
|
+
},
|
|
748
|
+
},
|
|
749
|
+
};
|
|
750
|
+
tfs_1.TFSServices.getItemContent
|
|
751
|
+
.mockResolvedValueOnce(mockRunHistory)
|
|
752
|
+
.mockResolvedValueOnce(mockPipelineDetails);
|
|
753
|
+
// Act
|
|
754
|
+
const result = await pipelinesDataProvider.findPreviousPipeline(teamProject, pipelineId, toPipelineRunId, targetPipeline, true);
|
|
755
|
+
// Assert
|
|
756
|
+
expect(result).toBe(99);
|
|
757
|
+
});
|
|
758
|
+
});
|
|
759
|
+
describe('isInvalidPipelineRun', () => {
|
|
760
|
+
const invokeIsInvalidPipelineRun = (pipelineRun, toPipelineRunId, fromStage) => {
|
|
761
|
+
return pipelinesDataProvider.isInvalidPipelineRun(pipelineRun, toPipelineRunId, fromStage);
|
|
762
|
+
};
|
|
763
|
+
it('should return true when pipeline run id >= toPipelineRunId', () => {
|
|
764
|
+
expect(invokeIsInvalidPipelineRun({ id: 100, result: 'succeeded' }, 100, '')).toBe(true);
|
|
765
|
+
expect(invokeIsInvalidPipelineRun({ id: 101, result: 'succeeded' }, 100, '')).toBe(true);
|
|
766
|
+
});
|
|
767
|
+
it('should return true for canceled/failed/canceling results', () => {
|
|
768
|
+
expect(invokeIsInvalidPipelineRun({ id: 99, result: 'canceled' }, 100, '')).toBe(true);
|
|
769
|
+
expect(invokeIsInvalidPipelineRun({ id: 99, result: 'failed' }, 100, '')).toBe(true);
|
|
770
|
+
expect(invokeIsInvalidPipelineRun({ id: 99, result: 'canceling' }, 100, '')).toBe(true);
|
|
771
|
+
});
|
|
772
|
+
it('should return true for unknown result without fromStage', () => {
|
|
773
|
+
expect(invokeIsInvalidPipelineRun({ id: 99, result: 'unknown' }, 100, '')).toBe(true);
|
|
774
|
+
});
|
|
775
|
+
it('should return false for valid pipeline run with fromStage', () => {
|
|
776
|
+
expect(invokeIsInvalidPipelineRun({ id: 99, result: 'unknown' }, 100, 'Deploy')).toBe(false);
|
|
777
|
+
});
|
|
778
|
+
it('should return false for succeeded result', () => {
|
|
779
|
+
expect(invokeIsInvalidPipelineRun({ id: 99, result: 'succeeded' }, 100, '')).toBe(false);
|
|
780
|
+
});
|
|
781
|
+
});
|
|
782
|
+
});
|
|
783
|
+
//# sourceMappingURL=pipelineDataProvider.test.js.map
|