@pranaysahith/decap-cms-backend-gitlab 3.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,802 @@
1
+ import { ApolloClient } from 'apollo-client';
2
+ import { InMemoryCache } from 'apollo-cache-inmemory';
3
+ import { createHttpLink } from 'apollo-link-http';
4
+ import { setContext } from 'apollo-link-context';
5
+ import { localForage, parseLinkHeader, unsentRequest, then, APIError, Cursor, readFile, CMS_BRANCH_PREFIX, generateContentKey, isCMSLabel, EditorialWorkflowError, labelToStatus, statusToLabel, DEFAULT_PR_BODY, MERGE_COMMIT_MESSAGE, responseParser, PreviewState, parseContentKey, branchFromContentKey, requestWithBackoff, readFileMetadata, throwOnConflictingBranches } from 'decap-cms-lib-util';
6
+ import { dirname } from 'path';
7
+ import { Base64 } from 'js-base64';
8
+ import { Map } from 'immutable';
9
+ import flow from 'lodash/flow';
10
+ import partial from 'lodash/partial';
11
+ import result from 'lodash/result';
12
+ import trimStart from 'lodash/trimStart';
13
+ const NO_CACHE = 'no-cache';
14
+ import * as queries from './queries';
15
+ export const API_NAME = 'GitLab';
16
+ var CommitAction = /*#__PURE__*/function (CommitAction) {
17
+ CommitAction["CREATE"] = "create";
18
+ CommitAction["DELETE"] = "delete";
19
+ CommitAction["MOVE"] = "move";
20
+ CommitAction["UPDATE"] = "update";
21
+ return CommitAction;
22
+ }(CommitAction || {});
23
+ var GitLabCommitStatuses = /*#__PURE__*/function (GitLabCommitStatuses) {
24
+ GitLabCommitStatuses["Pending"] = "pending";
25
+ GitLabCommitStatuses["Running"] = "running";
26
+ GitLabCommitStatuses["Success"] = "success";
27
+ GitLabCommitStatuses["Failed"] = "failed";
28
+ GitLabCommitStatuses["Canceled"] = "canceled";
29
+ return GitLabCommitStatuses;
30
+ }(GitLabCommitStatuses || {});
31
+ export function getMaxAccess(groups) {
32
+ return groups.reduce((previous, current) => {
33
+ if (current.group_access_level > previous.group_access_level) {
34
+ return current;
35
+ }
36
+ return previous;
37
+ }, groups[0]);
38
+ }
39
+ function batch(items, maxPerBatch, action) {
40
+ for (let index = 0; index < items.length; index = index + maxPerBatch) {
41
+ const itemsSlice = items.slice(index, index + maxPerBatch);
42
+ action(itemsSlice);
43
+ }
44
+ }
45
+ export default class API {
46
+ constructor(config) {
47
+ this.apiRoot = config.apiRoot || 'https://gitlab.com/api/v4';
48
+ this.graphQLAPIRoot = config.graphQLAPIRoot || 'https://gitlab.com/api/graphql';
49
+ this.token = config.token || false;
50
+ this.branch = config.branch || 'master';
51
+ this.repo = config.repo || '';
52
+ this.repoURL = `/projects/${encodeURIComponent(this.repo)}`;
53
+ this.squashMerges = config.squashMerges;
54
+ this.initialWorkflowStatus = config.initialWorkflowStatus;
55
+ this.cmsLabelPrefix = config.cmsLabelPrefix;
56
+ if (config.useGraphQL === true) {
57
+ this.graphQLClient = this.getApolloClient();
58
+ }
59
+ }
60
+ getApolloClient() {
61
+ const authLink = setContext((_, {
62
+ headers
63
+ }) => {
64
+ return {
65
+ headers: {
66
+ 'Content-Type': 'application/json; charset=utf-8',
67
+ ...headers,
68
+ authorization: this.token ? `token ${this.token}` : ''
69
+ }
70
+ };
71
+ });
72
+ const httpLink = createHttpLink({
73
+ uri: this.graphQLAPIRoot
74
+ });
75
+ return new ApolloClient({
76
+ link: authLink.concat(httpLink),
77
+ cache: new InMemoryCache(),
78
+ defaultOptions: {
79
+ watchQuery: {
80
+ fetchPolicy: NO_CACHE,
81
+ errorPolicy: 'ignore'
82
+ },
83
+ query: {
84
+ fetchPolicy: NO_CACHE,
85
+ errorPolicy: 'all'
86
+ }
87
+ }
88
+ });
89
+ }
90
+ reset() {
91
+ return this.graphQLClient?.resetStore();
92
+ }
93
+ withAuthorizationHeaders = req => {
94
+ const withHeaders = unsentRequest.withHeaders(this.token ? {
95
+ Authorization: `Bearer ${this.token}`
96
+ } : {}, req);
97
+ return Promise.resolve(withHeaders);
98
+ };
99
+ buildRequest = async req => {
100
+ const withRoot = unsentRequest.withRoot(this.apiRoot)(req);
101
+ const withAuthorizationHeaders = await this.withAuthorizationHeaders(withRoot);
102
+ if (withAuthorizationHeaders.has('cache')) {
103
+ return withAuthorizationHeaders;
104
+ } else {
105
+ const withNoCache = unsentRequest.withNoCache(withAuthorizationHeaders);
106
+ return withNoCache;
107
+ }
108
+ };
109
+ request = async req => {
110
+ try {
111
+ return requestWithBackoff(this, req);
112
+ } catch (err) {
113
+ throw new APIError(err.message, null, API_NAME);
114
+ }
115
+ };
116
+ responseToJSON = responseParser({
117
+ format: 'json',
118
+ apiName: API_NAME
119
+ });
120
+ responseToBlob = responseParser({
121
+ format: 'blob',
122
+ apiName: API_NAME
123
+ });
124
+ responseToText = responseParser({
125
+ format: 'text',
126
+ apiName: API_NAME
127
+ });
128
+
129
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
130
+ requestJSON = req => this.request(req).then(this.responseToJSON);
131
+ requestText = req => this.request(req).then(this.responseToText);
132
+ user = () => this.requestJSON('/user');
133
+ WRITE_ACCESS = 30;
134
+ MAINTAINER_ACCESS = 40;
135
+ hasWriteAccess = async () => {
136
+ const {
137
+ shared_with_groups: sharedWithGroups,
138
+ permissions
139
+ } = await this.requestJSON(this.repoURL);
140
+ const {
141
+ project_access: projectAccess,
142
+ group_access: groupAccess
143
+ } = permissions;
144
+ if (projectAccess && projectAccess.access_level >= this.WRITE_ACCESS) {
145
+ return true;
146
+ }
147
+ if (groupAccess && groupAccess.access_level >= this.WRITE_ACCESS) {
148
+ return true;
149
+ }
150
+ // check for group write permissions
151
+ if (sharedWithGroups && sharedWithGroups.length > 0) {
152
+ const maxAccess = getMaxAccess(sharedWithGroups);
153
+ // maintainer access
154
+ if (maxAccess.group_access_level >= this.MAINTAINER_ACCESS) {
155
+ return true;
156
+ }
157
+ // developer access
158
+ if (maxAccess.group_access_level >= this.WRITE_ACCESS) {
159
+ // check permissions to merge and push
160
+ try {
161
+ const branch = await this.getDefaultBranch();
162
+ if (branch.developers_can_merge && branch.developers_can_push) {
163
+ return true;
164
+ }
165
+ } catch (e) {
166
+ console.log('Failed getting default branch', e);
167
+ }
168
+ }
169
+ }
170
+ return false;
171
+ };
172
+ readFile = async (path, sha, {
173
+ parseText = true,
174
+ branch = this.branch
175
+ } = {}) => {
176
+ const fetchContent = async () => {
177
+ const content = await this.request({
178
+ url: `${this.repoURL}/repository/files/${encodeURIComponent(path)}/raw`,
179
+ params: {
180
+ ref: branch
181
+ },
182
+ cache: 'no-store'
183
+ }).then(parseText ? this.responseToText : this.responseToBlob);
184
+ return content;
185
+ };
186
+ const content = await readFile(sha, fetchContent, localForage, parseText);
187
+ return content;
188
+ };
189
+ async readFileMetadata(path, sha) {
190
+ const fetchFileMetadata = async () => {
191
+ try {
192
+ const result = await this.requestJSON({
193
+ url: `${this.repoURL}/repository/commits`,
194
+ params: {
195
+ path,
196
+ ref_name: this.branch
197
+ }
198
+ });
199
+ const commit = result[0];
200
+ return {
201
+ author: commit.author_name || commit.author_email,
202
+ updatedOn: commit.authored_date
203
+ };
204
+ } catch (e) {
205
+ return {
206
+ author: '',
207
+ updatedOn: ''
208
+ };
209
+ }
210
+ };
211
+ const fileMetadata = await readFileMetadata(sha, fetchFileMetadata, localForage);
212
+ return fileMetadata;
213
+ }
214
+ getCursorFromHeaders = headers => {
215
+ const page = parseInt(headers.get('X-Page'), 10);
216
+ const pageCount = parseInt(headers.get('X-Total-Pages'), 10);
217
+ const pageSize = parseInt(headers.get('X-Per-Page'), 10);
218
+ const count = parseInt(headers.get('X-Total'), 10);
219
+ const links = parseLinkHeader(headers.get('Link'));
220
+ const actions = Map(links).keySeq().flatMap(key => key === 'prev' && page > 1 || key === 'next' && page < pageCount || key === 'first' && page > 1 || key === 'last' && page < pageCount ? [key] : []);
221
+ return Cursor.create({
222
+ actions,
223
+ meta: {
224
+ page,
225
+ count,
226
+ pageSize,
227
+ pageCount
228
+ },
229
+ data: {
230
+ links
231
+ }
232
+ });
233
+ };
234
+ getCursor = ({
235
+ headers
236
+ }) => this.getCursorFromHeaders(headers);
237
+
238
+ // Gets a cursor without retrieving the entries by using a HEAD
239
+ // request
240
+ fetchCursor = req => flow([unsentRequest.withMethod('HEAD'), this.request, then(this.getCursor)])(req);
241
+ fetchCursorAndEntries = req => flow([unsentRequest.withMethod('GET'), this.request, p => Promise.all([p.then(this.getCursor), p.then(this.responseToJSON).catch(e => {
242
+ if (e.status === 404) {
243
+ return [];
244
+ } else {
245
+ throw e;
246
+ }
247
+ })]), then(([cursor, entries]) => ({
248
+ cursor,
249
+ entries
250
+ }))])(req);
251
+ listFiles = async (path, recursive = false) => {
252
+ const {
253
+ entries,
254
+ cursor
255
+ } = await this.fetchCursorAndEntries({
256
+ url: `${this.repoURL}/repository/tree`,
257
+ params: {
258
+ path,
259
+ ref: this.branch,
260
+ recursive
261
+ }
262
+ });
263
+ return {
264
+ files: entries.filter(({
265
+ type
266
+ }) => type === 'blob'),
267
+ cursor
268
+ };
269
+ };
270
+ traverseCursor = async (cursor, action) => {
271
+ const link = cursor.data.getIn(['links', action]);
272
+ const {
273
+ entries,
274
+ cursor: newCursor
275
+ } = await this.fetchCursorAndEntries(link);
276
+ return {
277
+ entries: entries.filter(({
278
+ type
279
+ }) => type === 'blob'),
280
+ cursor: newCursor
281
+ };
282
+ };
283
+ listAllFilesGraphQL = async (path, recursive, branch) => {
284
+ const files = [];
285
+ let blobsPaths;
286
+ let cursor;
287
+ do {
288
+ blobsPaths = await this.graphQLClient.query({
289
+ query: queries.files,
290
+ variables: {
291
+ repo: this.repo,
292
+ branch,
293
+ path,
294
+ recursive,
295
+ cursor
296
+ }
297
+ });
298
+ files.push(...blobsPaths.data.project.repository.tree.blobs.nodes);
299
+ cursor = blobsPaths.data.project.repository.tree.blobs.pageInfo.endCursor;
300
+ } while (blobsPaths.data.project.repository.tree.blobs.pageInfo.hasNextPage);
301
+ return files;
302
+ };
303
+ readFilesGraphQL = async files => {
304
+ const paths = files.map(({
305
+ path
306
+ }) => path);
307
+ const blobPromises = [];
308
+ batch(paths, 90, slice => {
309
+ blobPromises.push(this.graphQLClient.query({
310
+ query: queries.blobs,
311
+ variables: {
312
+ repo: this.repo,
313
+ branch: this.branch,
314
+ paths: slice
315
+ },
316
+ fetchPolicy: 'cache-first'
317
+ }));
318
+ });
319
+ const commitPromises = [];
320
+ batch(paths, 8, slice => {
321
+ commitPromises.push(this.graphQLClient.query({
322
+ query: queries.lastCommits(slice),
323
+ variables: {
324
+ repo: this.repo,
325
+ branch: this.branch
326
+ },
327
+ fetchPolicy: 'cache-first'
328
+ }));
329
+ });
330
+ const [blobsResults, commitsResults] = await Promise.all([(await Promise.all(blobPromises)).map(result => result.data.project.repository.blobs.nodes), (await Promise.all(commitPromises)).map(result => Object.values(result.data.project.repository).map(({
331
+ lastCommit
332
+ }) => lastCommit).filter(Boolean))]);
333
+ const blobs = blobsResults.flat().map(result => result.data);
334
+ const metadata = commitsResults.flat().map(({
335
+ author,
336
+ authoredDate,
337
+ authorName
338
+ }) => ({
339
+ author: author ? author.name || author.username || author.publicEmail : authorName,
340
+ updatedOn: authoredDate
341
+ }));
342
+ const filesWithData = files.map((file, index) => ({
343
+ file: {
344
+ ...file,
345
+ ...metadata[index]
346
+ },
347
+ data: blobs[index]
348
+ }));
349
+ return filesWithData;
350
+ };
351
+ listAllFiles = async (path, recursive = false, branch = this.branch) => {
352
+ if (this.graphQLClient) {
353
+ return await this.listAllFilesGraphQL(path, recursive, branch);
354
+ }
355
+ const entries = [];
356
+ // eslint-disable-next-line prefer-const
357
+ let {
358
+ cursor,
359
+ entries: initialEntries
360
+ } = await this.fetchCursorAndEntries({
361
+ url: `${this.repoURL}/repository/tree`,
362
+ // Get the maximum number of entries per page
363
+ params: {
364
+ path,
365
+ ref: branch,
366
+ per_page: 100,
367
+ recursive
368
+ }
369
+ });
370
+ entries.push(...initialEntries);
371
+ while (cursor && cursor.actions.has('next')) {
372
+ const link = cursor.data.getIn(['links', 'next']);
373
+ const {
374
+ cursor: newCursor,
375
+ entries: newEntries
376
+ } = await this.fetchCursorAndEntries(link);
377
+ entries.push(...newEntries);
378
+ cursor = newCursor;
379
+ }
380
+ return entries.filter(({
381
+ type
382
+ }) => type === 'blob');
383
+ };
384
+ toBase64 = str => Promise.resolve(Base64.encode(str));
385
+ fromBase64 = str => Base64.decode(str);
386
+ async getBranch(branchName) {
387
+ const branch = await this.requestJSON(`${this.repoURL}/repository/branches/${encodeURIComponent(branchName)}`);
388
+ return branch;
389
+ }
390
+ async uploadAndCommit(items, {
391
+ commitMessage = '',
392
+ branch = this.branch,
393
+ newBranch = false
394
+ }) {
395
+ const actions = items.map(item => ({
396
+ action: item.action,
397
+ file_path: item.path,
398
+ ...(item.oldPath ? {
399
+ previous_path: item.oldPath
400
+ } : {}),
401
+ ...(item.base64Content !== undefined ? {
402
+ content: item.base64Content,
403
+ encoding: 'base64'
404
+ } : {})
405
+ }));
406
+ const commitParams = {
407
+ branch,
408
+ commit_message: commitMessage,
409
+ actions,
410
+ ...(newBranch ? {
411
+ start_branch: this.branch
412
+ } : {})
413
+ };
414
+ if (this.commitAuthor) {
415
+ const {
416
+ name,
417
+ email
418
+ } = this.commitAuthor;
419
+ commitParams.author_name = name;
420
+ commitParams.author_email = email;
421
+ }
422
+ try {
423
+ const result = await this.requestJSON({
424
+ url: `${this.repoURL}/repository/commits`,
425
+ method: 'POST',
426
+ headers: {
427
+ 'Content-Type': 'application/json; charset=utf-8'
428
+ },
429
+ body: JSON.stringify(commitParams)
430
+ });
431
+ return result;
432
+ } catch (error) {
433
+ const message = error.message || '';
434
+ if (newBranch && message.includes(`Could not update ${branch}`)) {
435
+ await throwOnConflictingBranches(branch, name => this.getBranch(name), API_NAME);
436
+ }
437
+ throw error;
438
+ }
439
+ }
440
+ async getCommitItems(files, branch, hasSubfolders = true) {
441
+ const items = await Promise.all(files.map(async file => {
442
+ const [base64Content, fileExists] = await Promise.all([result(file, 'toBase64', partial(this.toBase64, file.raw)), this.isFileExists(file.path, branch)]);
443
+ let action = CommitAction.CREATE;
444
+ let path = trimStart(file.path, '/');
445
+ let oldPath = undefined;
446
+ if (fileExists) {
447
+ oldPath = file.newPath && path;
448
+ action = file.newPath && file.newPath !== oldPath ? CommitAction.MOVE : CommitAction.UPDATE;
449
+ path = file.newPath ? trimStart(file.newPath, '/') : path;
450
+ }
451
+ return {
452
+ action,
453
+ base64Content,
454
+ path,
455
+ oldPath
456
+ };
457
+ }));
458
+
459
+ // Move children if subfolders is true (legacy/default behavior)
460
+ if (hasSubfolders) {
461
+ for (const item of items.filter(i => i.oldPath && i.action === CommitAction.MOVE)) {
462
+ const sourceDir = dirname(item.oldPath);
463
+ const destDir = dirname(item.path);
464
+ const children = await this.listAllFiles(sourceDir, true, branch);
465
+ children.filter(f => f.path !== item.oldPath).forEach(file => {
466
+ items.push({
467
+ action: CommitAction.MOVE,
468
+ path: file.path.replace(sourceDir, destDir),
469
+ oldPath: file.path
470
+ });
471
+ });
472
+ }
473
+ }
474
+ return items;
475
+ }
476
+ async persistFiles(dataFiles, mediaFiles, options) {
477
+ const files = [...dataFiles, ...mediaFiles];
478
+ const hasSubfolders = options.hasSubfolders !== false; // default to true
479
+ if (options.useWorkflow) {
480
+ const slug = dataFiles[0].slug;
481
+ return this.editorialWorkflowGit(files, slug, options);
482
+ } else {
483
+ const items = await this.getCommitItems(files, this.branch, hasSubfolders);
484
+ return this.uploadAndCommit(items, {
485
+ commitMessage: options.commitMessage
486
+ });
487
+ }
488
+ }
489
+ deleteFiles = (paths, commitMessage) => {
490
+ const branch = this.branch;
491
+ const commitParams = {
492
+ commit_message: commitMessage,
493
+ branch
494
+ };
495
+ if (this.commitAuthor) {
496
+ const {
497
+ name,
498
+ email
499
+ } = this.commitAuthor;
500
+ commitParams.author_name = name;
501
+ commitParams.author_email = email;
502
+ }
503
+ const items = paths.map(path => ({
504
+ path,
505
+ action: CommitAction.DELETE
506
+ }));
507
+ return this.uploadAndCommit(items, {
508
+ commitMessage
509
+ });
510
+ };
511
+ async getMergeRequests(sourceBranch) {
512
+ const mergeRequests = await this.requestJSON({
513
+ url: `${this.repoURL}/merge_requests`,
514
+ params: {
515
+ state: 'opened',
516
+ labels: 'Any',
517
+ per_page: 100,
518
+ target_branch: this.branch,
519
+ ...(sourceBranch ? {
520
+ source_branch: sourceBranch
521
+ } : {})
522
+ }
523
+ });
524
+ return mergeRequests.filter(mr => mr.source_branch.startsWith(CMS_BRANCH_PREFIX) && mr.labels.some(l => isCMSLabel(l, this.cmsLabelPrefix)));
525
+ }
526
+ async listUnpublishedBranches() {
527
+ console.log('%c Checking for Unpublished entries', 'line-height: 30px;text-align: center;font-weight: bold');
528
+ const mergeRequests = await this.getMergeRequests();
529
+ const branches = mergeRequests.map(mr => mr.source_branch);
530
+ return branches;
531
+ }
532
+ async getFileId(path, branch) {
533
+ const request = await this.request({
534
+ method: 'HEAD',
535
+ url: `${this.repoURL}/repository/files/${encodeURIComponent(path)}`,
536
+ params: {
537
+ ref: branch
538
+ }
539
+ });
540
+ const blobId = request.headers.get('X-Gitlab-Blob-Id');
541
+ return blobId;
542
+ }
543
+ async isFileExists(path, branch) {
544
+ const fileExists = await this.requestText({
545
+ method: 'HEAD',
546
+ url: `${this.repoURL}/repository/files/${encodeURIComponent(path)}`,
547
+ params: {
548
+ ref: branch
549
+ }
550
+ }).then(() => true).catch(error => {
551
+ if (error instanceof APIError && error.status === 404) {
552
+ return false;
553
+ }
554
+ throw error;
555
+ });
556
+ return fileExists;
557
+ }
558
+ async getBranchMergeRequest(branch) {
559
+ const mergeRequests = await this.getMergeRequests(branch);
560
+ if (mergeRequests.length <= 0) {
561
+ throw new EditorialWorkflowError('content is not under editorial workflow', true);
562
+ }
563
+ return mergeRequests[0];
564
+ }
565
+ async getDifferences(to, from = this.branch) {
566
+ if (to === from) {
567
+ return [];
568
+ }
569
+ const result = await this.requestJSON({
570
+ url: `${this.repoURL}/repository/compare`,
571
+ params: {
572
+ from,
573
+ to
574
+ }
575
+ });
576
+ if (result.diffs.length >= 1000) {
577
+ throw new APIError('Diff limit reached', null, API_NAME);
578
+ }
579
+ return result.diffs.map(d => {
580
+ let status = 'modified';
581
+ if (d.new_file) {
582
+ status = 'added';
583
+ } else if (d.deleted_file) {
584
+ status = 'deleted';
585
+ } else if (d.renamed_file) {
586
+ status = 'renamed';
587
+ }
588
+ return {
589
+ status,
590
+ oldPath: d.old_path,
591
+ newPath: d.new_path,
592
+ newFile: d.new_file,
593
+ path: d.new_path || d.old_path,
594
+ binary: d.diff.startsWith('Binary') || /.svg$/.test(d.new_path)
595
+ };
596
+ });
597
+ }
598
+ async retrieveUnpublishedEntryData(contentKey) {
599
+ const {
600
+ collection,
601
+ slug
602
+ } = parseContentKey(contentKey);
603
+ const branch = branchFromContentKey(contentKey);
604
+ const mergeRequest = await this.getBranchMergeRequest(branch);
605
+ const diffs = await this.getDifferences(mergeRequest.sha);
606
+ const diffsWithIds = await Promise.all(diffs.map(async d => {
607
+ const {
608
+ path,
609
+ newFile
610
+ } = d;
611
+ const id = await this.getFileId(path, branch);
612
+ return {
613
+ id,
614
+ path,
615
+ newFile
616
+ };
617
+ }));
618
+ const label = mergeRequest.labels.find(l => isCMSLabel(l, this.cmsLabelPrefix));
619
+ const status = labelToStatus(label, this.cmsLabelPrefix);
620
+ const updatedAt = mergeRequest.updated_at;
621
+ const pullRequestAuthor = mergeRequest.author.name;
622
+ return {
623
+ collection,
624
+ slug,
625
+ status,
626
+ diffs: diffsWithIds,
627
+ updatedAt,
628
+ pullRequestAuthor
629
+ };
630
+ }
631
+ async rebaseMergeRequest(mergeRequest) {
632
+ let rebase = await this.requestJSON({
633
+ method: 'PUT',
634
+ url: `${this.repoURL}/merge_requests/${mergeRequest.iid}/rebase?skip_ci=true`
635
+ });
636
+ let i = 1;
637
+ while (rebase.rebase_in_progress) {
638
+ await new Promise(resolve => setTimeout(resolve, 1000));
639
+ rebase = await this.requestJSON({
640
+ url: `${this.repoURL}/merge_requests/${mergeRequest.iid}`,
641
+ params: {
642
+ include_rebase_in_progress: true
643
+ }
644
+ });
645
+ if (!rebase.rebase_in_progress || i > 30) {
646
+ break;
647
+ }
648
+ i++;
649
+ }
650
+ if (rebase.rebase_in_progress) {
651
+ throw new APIError('Timed out rebasing merge request', null, API_NAME);
652
+ } else if (rebase.merge_error) {
653
+ throw new APIError(`Rebase error: ${rebase.merge_error}`, null, API_NAME);
654
+ }
655
+ }
656
+ async createMergeRequest(branch, commitMessage, status) {
657
+ await this.requestJSON({
658
+ method: 'POST',
659
+ url: `${this.repoURL}/merge_requests`,
660
+ params: {
661
+ source_branch: branch,
662
+ target_branch: this.branch,
663
+ title: commitMessage,
664
+ description: DEFAULT_PR_BODY,
665
+ labels: statusToLabel(status, this.cmsLabelPrefix),
666
+ remove_source_branch: true,
667
+ squash: this.squashMerges
668
+ }
669
+ });
670
+ }
671
+ async editorialWorkflowGit(files, slug, options) {
672
+ const contentKey = generateContentKey(options.collectionName, slug);
673
+ const branch = branchFromContentKey(contentKey);
674
+ const unpublished = options.unpublished || false;
675
+ const hasSubfolders = options.hasSubfolders !== false; // default to true
676
+ if (!unpublished) {
677
+ const items = await this.getCommitItems(files, this.branch, hasSubfolders);
678
+ await this.uploadAndCommit(items, {
679
+ commitMessage: options.commitMessage,
680
+ branch,
681
+ newBranch: true
682
+ });
683
+ await this.createMergeRequest(branch, options.commitMessage, options.status || this.initialWorkflowStatus);
684
+ } else {
685
+ const mergeRequest = await this.getBranchMergeRequest(branch);
686
+ await this.rebaseMergeRequest(mergeRequest);
687
+ const [items, diffs] = await Promise.all([this.getCommitItems(files, branch, hasSubfolders), this.getDifferences(branch)]);
688
+ // mark files for deletion
689
+ for (const diff of diffs.filter(d => d.binary)) {
690
+ if (!items.some(item => item.path === diff.path)) {
691
+ items.push({
692
+ action: CommitAction.DELETE,
693
+ path: diff.newPath
694
+ });
695
+ }
696
+ }
697
+ await this.uploadAndCommit(items, {
698
+ commitMessage: options.commitMessage,
699
+ branch
700
+ });
701
+ }
702
+ }
703
+ async updateMergeRequestLabels(mergeRequest, labels) {
704
+ await this.requestJSON({
705
+ method: 'PUT',
706
+ url: `${this.repoURL}/merge_requests/${mergeRequest.iid}`,
707
+ params: {
708
+ labels: labels.join(',')
709
+ }
710
+ });
711
+ }
712
+ async updateUnpublishedEntryStatus(collection, slug, newStatus) {
713
+ const contentKey = generateContentKey(collection, slug);
714
+ const branch = branchFromContentKey(contentKey);
715
+ const mergeRequest = await this.getBranchMergeRequest(branch);
716
+ const labels = [...mergeRequest.labels.filter(label => !isCMSLabel(label, this.cmsLabelPrefix)), statusToLabel(newStatus, this.cmsLabelPrefix)];
717
+ await this.updateMergeRequestLabels(mergeRequest, labels);
718
+ }
719
+ async mergeMergeRequest(mergeRequest) {
720
+ await this.requestJSON({
721
+ method: 'PUT',
722
+ url: `${this.repoURL}/merge_requests/${mergeRequest.iid}/merge`,
723
+ params: {
724
+ merge_commit_message: MERGE_COMMIT_MESSAGE,
725
+ squash_commit_message: MERGE_COMMIT_MESSAGE,
726
+ squash: this.squashMerges,
727
+ should_remove_source_branch: true
728
+ }
729
+ });
730
+ }
731
+ async publishUnpublishedEntry(collectionName, slug) {
732
+ const contentKey = generateContentKey(collectionName, slug);
733
+ const branch = branchFromContentKey(contentKey);
734
+ const mergeRequest = await this.getBranchMergeRequest(branch);
735
+ await this.mergeMergeRequest(mergeRequest);
736
+ }
737
+ async closeMergeRequest(mergeRequest) {
738
+ await this.requestJSON({
739
+ method: 'PUT',
740
+ url: `${this.repoURL}/merge_requests/${mergeRequest.iid}`,
741
+ params: {
742
+ state_event: 'close'
743
+ }
744
+ });
745
+ }
746
+ async getDefaultBranch() {
747
+ const branch = await this.getBranch(this.branch);
748
+ return branch;
749
+ }
750
+ async isShaExistsInBranch(branch, sha) {
751
+ const refs = await this.requestJSON({
752
+ url: `${this.repoURL}/repository/commits/${sha}/refs`,
753
+ params: {
754
+ type: 'branch'
755
+ }
756
+ });
757
+ return refs.some(r => r.name === branch);
758
+ }
759
+ async deleteBranch(branch) {
760
+ await this.request({
761
+ method: 'DELETE',
762
+ url: `${this.repoURL}/repository/branches/${encodeURIComponent(branch)}`
763
+ });
764
+ }
765
+ async deleteUnpublishedEntry(collectionName, slug) {
766
+ const contentKey = generateContentKey(collectionName, slug);
767
+ const branch = branchFromContentKey(contentKey);
768
+ const mergeRequest = await this.getBranchMergeRequest(branch);
769
+ await this.closeMergeRequest(mergeRequest);
770
+ await this.deleteBranch(branch);
771
+ }
772
+ async getMergeRequestStatues(mergeRequest, branch) {
773
+ const statuses = await this.requestJSON({
774
+ url: `${this.repoURL}/repository/commits/${mergeRequest.sha}/statuses`,
775
+ params: {
776
+ ref: branch
777
+ }
778
+ });
779
+ return statuses;
780
+ }
781
+ async getStatuses(collectionName, slug) {
782
+ const contentKey = generateContentKey(collectionName, slug);
783
+ const branch = branchFromContentKey(contentKey);
784
+ const mergeRequest = await this.getBranchMergeRequest(branch);
785
+ const statuses = await this.getMergeRequestStatues(mergeRequest, branch);
786
+ return statuses.map(({
787
+ name,
788
+ status,
789
+ target_url
790
+ }) => ({
791
+ context: name,
792
+ state: status === GitLabCommitStatuses.Success ? PreviewState.Success : PreviewState.Other,
793
+ target_url
794
+ }));
795
+ }
796
+ async getUnpublishedEntrySha(collection, slug) {
797
+ const contentKey = generateContentKey(collection, slug);
798
+ const branch = branchFromContentKey(contentKey);
799
+ const mergeRequest = await this.getBranchMergeRequest(branch);
800
+ return mergeRequest.sha;
801
+ }
802
+ }