@hadl-labs/changelog-github 0.1.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +3 -0
- package/dist/index.js +579 -0
- package/package.json +11 -6
- package/.changeset/README.md +0 -8
- package/.changeset/config.json +0 -11
- package/bun.lock +0 -734
- package/prettier.config.js +0 -8
- package/src/index.test.ts +0 -109
- package/src/index.ts +0 -113
- package/tsconfig.json +0 -29
package/dist/index.d.ts
ADDED
package/dist/index.js
ADDED
|
@@ -0,0 +1,579 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __toESM = (mod, isNodeMode, target) => {
|
|
8
|
+
target = mod != null ? __create(__getProtoOf(mod)) : {};
|
|
9
|
+
const to = isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target;
|
|
10
|
+
for (let key of __getOwnPropNames(mod))
|
|
11
|
+
if (!__hasOwnProp.call(to, key))
|
|
12
|
+
__defProp(to, key, {
|
|
13
|
+
get: () => mod[key],
|
|
14
|
+
enumerable: true
|
|
15
|
+
});
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __moduleCache = /* @__PURE__ */ new WeakMap;
|
|
19
|
+
var __toCommonJS = (from) => {
|
|
20
|
+
var entry = __moduleCache.get(from), desc;
|
|
21
|
+
if (entry)
|
|
22
|
+
return entry;
|
|
23
|
+
entry = __defProp({}, "__esModule", { value: true });
|
|
24
|
+
if (from && typeof from === "object" || typeof from === "function")
|
|
25
|
+
__getOwnPropNames(from).map((key) => !__hasOwnProp.call(entry, key) && __defProp(entry, key, {
|
|
26
|
+
get: () => from[key],
|
|
27
|
+
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
28
|
+
}));
|
|
29
|
+
__moduleCache.set(from, entry);
|
|
30
|
+
return entry;
|
|
31
|
+
};
|
|
32
|
+
var __commonJS = (cb, mod) => () => (mod || cb((mod = { exports: {} }).exports, mod), mod.exports);
|
|
33
|
+
var __export = (target, all) => {
|
|
34
|
+
for (var name in all)
|
|
35
|
+
__defProp(target, name, {
|
|
36
|
+
get: all[name],
|
|
37
|
+
enumerable: true,
|
|
38
|
+
configurable: true,
|
|
39
|
+
set: (newValue) => all[name] = () => newValue
|
|
40
|
+
});
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
// node_modules/node-fetch/browser.js
|
|
44
|
+
var require_browser = __commonJS((exports2, module2) => {
|
|
45
|
+
var getGlobal = function() {
|
|
46
|
+
if (typeof self !== "undefined") {
|
|
47
|
+
return self;
|
|
48
|
+
}
|
|
49
|
+
if (typeof window !== "undefined") {
|
|
50
|
+
return window;
|
|
51
|
+
}
|
|
52
|
+
if (typeof global !== "undefined") {
|
|
53
|
+
return global;
|
|
54
|
+
}
|
|
55
|
+
throw new Error("unable to locate global object");
|
|
56
|
+
};
|
|
57
|
+
var globalObject = getGlobal();
|
|
58
|
+
module2.exports = exports2 = globalObject.fetch;
|
|
59
|
+
if (globalObject.fetch) {
|
|
60
|
+
exports2.default = globalObject.fetch.bind(globalObject);
|
|
61
|
+
}
|
|
62
|
+
exports2.Headers = globalObject.Headers;
|
|
63
|
+
exports2.Request = globalObject.Request;
|
|
64
|
+
exports2.Response = globalObject.Response;
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
// node_modules/dataloader/index.js
|
|
68
|
+
var require_dataloader = __commonJS((exports2, module2) => {
|
|
69
|
+
function _classCallCheck(instance, Constructor) {
|
|
70
|
+
if (!(instance instanceof Constructor)) {
|
|
71
|
+
throw new TypeError("Cannot call a class as a function");
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
var DataLoader = function() {
|
|
75
|
+
function DataLoader2(batchLoadFn, options) {
|
|
76
|
+
_classCallCheck(this, DataLoader2);
|
|
77
|
+
if (typeof batchLoadFn !== "function") {
|
|
78
|
+
throw new TypeError("DataLoader must be constructed with a function which accepts " + ("Array<key> and returns Promise<Array<value>>, but got: " + batchLoadFn + "."));
|
|
79
|
+
}
|
|
80
|
+
this._batchLoadFn = batchLoadFn;
|
|
81
|
+
this._options = options;
|
|
82
|
+
this._promiseCache = getValidCacheMap(options);
|
|
83
|
+
this._queue = [];
|
|
84
|
+
}
|
|
85
|
+
DataLoader2.prototype.load = function load(key) {
|
|
86
|
+
var _this = this;
|
|
87
|
+
if (key === null || key === undefined) {
|
|
88
|
+
throw new TypeError("The loader.load() function must be called with a value," + ("but got: " + String(key) + "."));
|
|
89
|
+
}
|
|
90
|
+
var options = this._options;
|
|
91
|
+
var shouldBatch = !options || options.batch !== false;
|
|
92
|
+
var shouldCache = !options || options.cache !== false;
|
|
93
|
+
var cacheKeyFn = options && options.cacheKeyFn;
|
|
94
|
+
var cacheKey = cacheKeyFn ? cacheKeyFn(key) : key;
|
|
95
|
+
if (shouldCache) {
|
|
96
|
+
var cachedPromise = this._promiseCache.get(cacheKey);
|
|
97
|
+
if (cachedPromise) {
|
|
98
|
+
return cachedPromise;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
var promise = new Promise(function(resolve, reject) {
|
|
102
|
+
_this._queue.push({ key, resolve, reject });
|
|
103
|
+
if (_this._queue.length === 1) {
|
|
104
|
+
if (shouldBatch) {
|
|
105
|
+
enqueuePostPromiseJob(function() {
|
|
106
|
+
return dispatchQueue(_this);
|
|
107
|
+
});
|
|
108
|
+
} else {
|
|
109
|
+
dispatchQueue(_this);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
});
|
|
113
|
+
if (shouldCache) {
|
|
114
|
+
this._promiseCache.set(cacheKey, promise);
|
|
115
|
+
}
|
|
116
|
+
return promise;
|
|
117
|
+
};
|
|
118
|
+
DataLoader2.prototype.loadMany = function loadMany(keys) {
|
|
119
|
+
var _this2 = this;
|
|
120
|
+
if (!Array.isArray(keys)) {
|
|
121
|
+
throw new TypeError("The loader.loadMany() function must be called with Array<key> " + ("but got: " + keys + "."));
|
|
122
|
+
}
|
|
123
|
+
return Promise.all(keys.map(function(key) {
|
|
124
|
+
return _this2.load(key);
|
|
125
|
+
}));
|
|
126
|
+
};
|
|
127
|
+
DataLoader2.prototype.clear = function clear(key) {
|
|
128
|
+
var cacheKeyFn = this._options && this._options.cacheKeyFn;
|
|
129
|
+
var cacheKey = cacheKeyFn ? cacheKeyFn(key) : key;
|
|
130
|
+
this._promiseCache.delete(cacheKey);
|
|
131
|
+
return this;
|
|
132
|
+
};
|
|
133
|
+
DataLoader2.prototype.clearAll = function clearAll() {
|
|
134
|
+
this._promiseCache.clear();
|
|
135
|
+
return this;
|
|
136
|
+
};
|
|
137
|
+
DataLoader2.prototype.prime = function prime(key, value) {
|
|
138
|
+
var cacheKeyFn = this._options && this._options.cacheKeyFn;
|
|
139
|
+
var cacheKey = cacheKeyFn ? cacheKeyFn(key) : key;
|
|
140
|
+
if (this._promiseCache.get(cacheKey) === undefined) {
|
|
141
|
+
var promise = value instanceof Error ? Promise.reject(value) : Promise.resolve(value);
|
|
142
|
+
this._promiseCache.set(cacheKey, promise);
|
|
143
|
+
}
|
|
144
|
+
return this;
|
|
145
|
+
};
|
|
146
|
+
return DataLoader2;
|
|
147
|
+
}();
|
|
148
|
+
var enqueuePostPromiseJob = typeof process === "object" && typeof process.nextTick === "function" ? function(fn) {
|
|
149
|
+
if (!resolvedPromise) {
|
|
150
|
+
resolvedPromise = Promise.resolve();
|
|
151
|
+
}
|
|
152
|
+
resolvedPromise.then(function() {
|
|
153
|
+
return process.nextTick(fn);
|
|
154
|
+
});
|
|
155
|
+
} : setImmediate || setTimeout;
|
|
156
|
+
var resolvedPromise;
|
|
157
|
+
function dispatchQueue(loader) {
|
|
158
|
+
var queue = loader._queue;
|
|
159
|
+
loader._queue = [];
|
|
160
|
+
var maxBatchSize = loader._options && loader._options.maxBatchSize;
|
|
161
|
+
if (maxBatchSize && maxBatchSize > 0 && maxBatchSize < queue.length) {
|
|
162
|
+
for (var i = 0;i < queue.length / maxBatchSize; i++) {
|
|
163
|
+
dispatchQueueBatch(loader, queue.slice(i * maxBatchSize, (i + 1) * maxBatchSize));
|
|
164
|
+
}
|
|
165
|
+
} else {
|
|
166
|
+
dispatchQueueBatch(loader, queue);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
function dispatchQueueBatch(loader, queue) {
|
|
170
|
+
var keys = queue.map(function(_ref) {
|
|
171
|
+
var key = _ref.key;
|
|
172
|
+
return key;
|
|
173
|
+
});
|
|
174
|
+
var batchLoadFn = loader._batchLoadFn;
|
|
175
|
+
var batchPromise = batchLoadFn(keys);
|
|
176
|
+
if (!batchPromise || typeof batchPromise.then !== "function") {
|
|
177
|
+
return failedDispatch(loader, queue, new TypeError("DataLoader must be constructed with a function which accepts " + "Array<key> and returns Promise<Array<value>>, but the function did " + ("not return a Promise: " + String(batchPromise) + ".")));
|
|
178
|
+
}
|
|
179
|
+
batchPromise.then(function(values) {
|
|
180
|
+
if (!Array.isArray(values)) {
|
|
181
|
+
throw new TypeError("DataLoader must be constructed with a function which accepts " + "Array<key> and returns Promise<Array<value>>, but the function did " + ("not return a Promise of an Array: " + String(values) + "."));
|
|
182
|
+
}
|
|
183
|
+
if (values.length !== keys.length) {
|
|
184
|
+
throw new TypeError("DataLoader must be constructed with a function which accepts " + "Array<key> and returns Promise<Array<value>>, but the function did " + "not return a Promise of an Array of the same length as the Array " + "of keys." + (`
|
|
185
|
+
|
|
186
|
+
Keys:
|
|
187
|
+
` + String(keys)) + (`
|
|
188
|
+
|
|
189
|
+
Values:
|
|
190
|
+
` + String(values)));
|
|
191
|
+
}
|
|
192
|
+
queue.forEach(function(_ref2, index) {
|
|
193
|
+
var { resolve, reject } = _ref2;
|
|
194
|
+
var value = values[index];
|
|
195
|
+
if (value instanceof Error) {
|
|
196
|
+
reject(value);
|
|
197
|
+
} else {
|
|
198
|
+
resolve(value);
|
|
199
|
+
}
|
|
200
|
+
});
|
|
201
|
+
}).catch(function(error) {
|
|
202
|
+
return failedDispatch(loader, queue, error);
|
|
203
|
+
});
|
|
204
|
+
}
|
|
205
|
+
function failedDispatch(loader, queue, error) {
|
|
206
|
+
queue.forEach(function(_ref3) {
|
|
207
|
+
var { key, reject } = _ref3;
|
|
208
|
+
loader.clear(key);
|
|
209
|
+
reject(error);
|
|
210
|
+
});
|
|
211
|
+
}
|
|
212
|
+
function getValidCacheMap(options) {
|
|
213
|
+
var cacheMap = options && options.cacheMap;
|
|
214
|
+
if (!cacheMap) {
|
|
215
|
+
return new Map;
|
|
216
|
+
}
|
|
217
|
+
var cacheFunctions = ["get", "set", "delete", "clear"];
|
|
218
|
+
var missingFunctions = cacheFunctions.filter(function(fnName) {
|
|
219
|
+
return cacheMap && typeof cacheMap[fnName] !== "function";
|
|
220
|
+
});
|
|
221
|
+
if (missingFunctions.length !== 0) {
|
|
222
|
+
throw new TypeError("Custom cacheMap missing methods: " + missingFunctions.join(", "));
|
|
223
|
+
}
|
|
224
|
+
return cacheMap;
|
|
225
|
+
}
|
|
226
|
+
module2.exports = DataLoader;
|
|
227
|
+
});
|
|
228
|
+
|
|
229
|
+
// src/index.ts
|
|
230
|
+
var exports_src = {};
|
|
231
|
+
__export(exports_src, {
|
|
232
|
+
default: () => src_default
|
|
233
|
+
});
|
|
234
|
+
module.exports = __toCommonJS(exports_src);
|
|
235
|
+
|
|
236
|
+
// node_modules/@changesets/get-github-info/dist/changesets-get-github-info.esm.js
|
|
237
|
+
var import_node_fetch = __toESM(require_browser(), 1);
|
|
238
|
+
var import_dataloader = __toESM(require_dataloader(), 1);
|
|
239
|
+
function _defineProperty(obj, key, value) {
|
|
240
|
+
if (key in obj) {
|
|
241
|
+
Object.defineProperty(obj, key, {
|
|
242
|
+
value,
|
|
243
|
+
enumerable: true,
|
|
244
|
+
configurable: true,
|
|
245
|
+
writable: true
|
|
246
|
+
});
|
|
247
|
+
} else {
|
|
248
|
+
obj[key] = value;
|
|
249
|
+
}
|
|
250
|
+
return obj;
|
|
251
|
+
}
|
|
252
|
+
function ownKeys(object, enumerableOnly) {
|
|
253
|
+
var keys = Object.keys(object);
|
|
254
|
+
if (Object.getOwnPropertySymbols) {
|
|
255
|
+
var symbols = Object.getOwnPropertySymbols(object);
|
|
256
|
+
if (enumerableOnly)
|
|
257
|
+
symbols = symbols.filter(function(sym) {
|
|
258
|
+
return Object.getOwnPropertyDescriptor(object, sym).enumerable;
|
|
259
|
+
});
|
|
260
|
+
keys.push.apply(keys, symbols);
|
|
261
|
+
}
|
|
262
|
+
return keys;
|
|
263
|
+
}
|
|
264
|
+
function _objectSpread2(target) {
|
|
265
|
+
for (var i = 1;i < arguments.length; i++) {
|
|
266
|
+
var source = arguments[i] != null ? arguments[i] : {};
|
|
267
|
+
if (i % 2) {
|
|
268
|
+
ownKeys(Object(source), true).forEach(function(key) {
|
|
269
|
+
_defineProperty(target, key, source[key]);
|
|
270
|
+
});
|
|
271
|
+
} else if (Object.getOwnPropertyDescriptors) {
|
|
272
|
+
Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));
|
|
273
|
+
} else {
|
|
274
|
+
ownKeys(Object(source)).forEach(function(key) {
|
|
275
|
+
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
|
|
276
|
+
});
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
return target;
|
|
280
|
+
}
|
|
281
|
+
function _objectWithoutPropertiesLoose(source, excluded) {
|
|
282
|
+
if (source == null)
|
|
283
|
+
return {};
|
|
284
|
+
var target = {};
|
|
285
|
+
var sourceKeys = Object.keys(source);
|
|
286
|
+
var key, i;
|
|
287
|
+
for (i = 0;i < sourceKeys.length; i++) {
|
|
288
|
+
key = sourceKeys[i];
|
|
289
|
+
if (excluded.indexOf(key) >= 0)
|
|
290
|
+
continue;
|
|
291
|
+
target[key] = source[key];
|
|
292
|
+
}
|
|
293
|
+
return target;
|
|
294
|
+
}
|
|
295
|
+
function _objectWithoutProperties(source, excluded) {
|
|
296
|
+
if (source == null)
|
|
297
|
+
return {};
|
|
298
|
+
var target = _objectWithoutPropertiesLoose(source, excluded);
|
|
299
|
+
var key, i;
|
|
300
|
+
if (Object.getOwnPropertySymbols) {
|
|
301
|
+
var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
|
|
302
|
+
for (i = 0;i < sourceSymbolKeys.length; i++) {
|
|
303
|
+
key = sourceSymbolKeys[i];
|
|
304
|
+
if (excluded.indexOf(key) >= 0)
|
|
305
|
+
continue;
|
|
306
|
+
if (!Object.prototype.propertyIsEnumerable.call(source, key))
|
|
307
|
+
continue;
|
|
308
|
+
target[key] = source[key];
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
return target;
|
|
312
|
+
}
|
|
313
|
+
var _excluded = ["repo"];
|
|
314
|
+
var _excluded2 = ["repo"];
|
|
315
|
+
var validRepoNameRegex = /^[\w.-]+\/[\w.-]+$/;
|
|
316
|
+
function makeQuery(repos) {
|
|
317
|
+
return `
|
|
318
|
+
query {
|
|
319
|
+
${Object.keys(repos).map((repo, i) => `a${i}: repository(
|
|
320
|
+
owner: ${JSON.stringify(repo.split("/")[0])}
|
|
321
|
+
name: ${JSON.stringify(repo.split("/")[1])}
|
|
322
|
+
) {
|
|
323
|
+
${repos[repo].map((data) => data.kind === "commit" ? `a${data.commit}: object(expression: ${JSON.stringify(data.commit)}) {
|
|
324
|
+
... on Commit {
|
|
325
|
+
commitUrl
|
|
326
|
+
associatedPullRequests(first: 50) {
|
|
327
|
+
nodes {
|
|
328
|
+
number
|
|
329
|
+
url
|
|
330
|
+
mergedAt
|
|
331
|
+
author {
|
|
332
|
+
login
|
|
333
|
+
url
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
author {
|
|
338
|
+
user {
|
|
339
|
+
login
|
|
340
|
+
url
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
}}` : `pr__${data.pull}: pullRequest(number: ${data.pull}) {
|
|
344
|
+
url
|
|
345
|
+
author {
|
|
346
|
+
login
|
|
347
|
+
url
|
|
348
|
+
}
|
|
349
|
+
mergeCommit {
|
|
350
|
+
commitUrl
|
|
351
|
+
abbreviatedOid
|
|
352
|
+
}
|
|
353
|
+
}`).join(`
|
|
354
|
+
`)}
|
|
355
|
+
}`).join(`
|
|
356
|
+
`)}
|
|
357
|
+
}
|
|
358
|
+
`;
|
|
359
|
+
}
|
|
360
|
+
var GHDataLoader = new import_dataloader.default(async (requests) => {
|
|
361
|
+
if (!process.env.GITHUB_TOKEN) {
|
|
362
|
+
throw new Error("Please create a GitHub personal access token at https://github.com/settings/tokens/new with `read:user` and `repo:status` permissions and add it as the GITHUB_TOKEN environment variable");
|
|
363
|
+
}
|
|
364
|
+
let repos = {};
|
|
365
|
+
requests.forEach((_ref) => {
|
|
366
|
+
let {
|
|
367
|
+
repo
|
|
368
|
+
} = _ref, data2 = _objectWithoutProperties(_ref, _excluded);
|
|
369
|
+
if (repos[repo] === undefined) {
|
|
370
|
+
repos[repo] = [];
|
|
371
|
+
}
|
|
372
|
+
repos[repo].push(data2);
|
|
373
|
+
});
|
|
374
|
+
const data = await import_node_fetch.default("https://api.github.com/graphql", {
|
|
375
|
+
method: "POST",
|
|
376
|
+
headers: {
|
|
377
|
+
Authorization: `Token ${process.env.GITHUB_TOKEN}`
|
|
378
|
+
},
|
|
379
|
+
body: JSON.stringify({
|
|
380
|
+
query: makeQuery(repos)
|
|
381
|
+
})
|
|
382
|
+
}).then((x) => x.json());
|
|
383
|
+
if (data.errors) {
|
|
384
|
+
throw new Error(`An error occurred when fetching data from GitHub
|
|
385
|
+
${JSON.stringify(data.errors, null, 2)}`);
|
|
386
|
+
}
|
|
387
|
+
if (!data.data) {
|
|
388
|
+
throw new Error(`An error occurred when fetching data from GitHub
|
|
389
|
+
${JSON.stringify(data)}`);
|
|
390
|
+
}
|
|
391
|
+
let cleanedData = {};
|
|
392
|
+
Object.keys(repos).forEach((repo, index) => {
|
|
393
|
+
let output = {
|
|
394
|
+
commit: {},
|
|
395
|
+
pull: {}
|
|
396
|
+
};
|
|
397
|
+
cleanedData[repo] = output;
|
|
398
|
+
Object.entries(data.data[`a${index}`]).forEach(([field, value]) => {
|
|
399
|
+
if (field[0] === "a") {
|
|
400
|
+
output.commit[field.substring(1)] = value;
|
|
401
|
+
} else {
|
|
402
|
+
output.pull[field.replace("pr__", "")] = value;
|
|
403
|
+
}
|
|
404
|
+
});
|
|
405
|
+
});
|
|
406
|
+
return requests.map((_ref2) => {
|
|
407
|
+
let {
|
|
408
|
+
repo
|
|
409
|
+
} = _ref2, data2 = _objectWithoutProperties(_ref2, _excluded2);
|
|
410
|
+
return cleanedData[repo][data2.kind][data2.kind === "pull" ? data2.pull : data2.commit];
|
|
411
|
+
});
|
|
412
|
+
});
|
|
413
|
+
async function getInfo(request) {
|
|
414
|
+
if (!request.commit) {
|
|
415
|
+
throw new Error("Please pass a commit SHA to getInfo");
|
|
416
|
+
}
|
|
417
|
+
if (!request.repo) {
|
|
418
|
+
throw new Error("Please pass a GitHub repository in the form of userOrOrg/repoName to getInfo");
|
|
419
|
+
}
|
|
420
|
+
if (!validRepoNameRegex.test(request.repo)) {
|
|
421
|
+
throw new Error(`Please pass a valid GitHub repository in the form of userOrOrg/repoName to getInfo (it has to match the "${validRepoNameRegex.source}" pattern)`);
|
|
422
|
+
}
|
|
423
|
+
const data = await GHDataLoader.load(_objectSpread2({
|
|
424
|
+
kind: "commit"
|
|
425
|
+
}, request));
|
|
426
|
+
let user = null;
|
|
427
|
+
if (data.author && data.author.user) {
|
|
428
|
+
user = data.author.user;
|
|
429
|
+
}
|
|
430
|
+
let associatedPullRequest = data.associatedPullRequests && data.associatedPullRequests.nodes && data.associatedPullRequests.nodes.length ? data.associatedPullRequests.nodes.sort((a, b) => {
|
|
431
|
+
if (a.mergedAt === null && b.mergedAt === null) {
|
|
432
|
+
return 0;
|
|
433
|
+
}
|
|
434
|
+
if (a.mergedAt === null) {
|
|
435
|
+
return 1;
|
|
436
|
+
}
|
|
437
|
+
if (b.mergedAt === null) {
|
|
438
|
+
return -1;
|
|
439
|
+
}
|
|
440
|
+
a = new Date(a.mergedAt);
|
|
441
|
+
b = new Date(b.mergedAt);
|
|
442
|
+
return a > b ? 1 : a < b ? -1 : 0;
|
|
443
|
+
})[0] : null;
|
|
444
|
+
if (associatedPullRequest) {
|
|
445
|
+
user = associatedPullRequest.author;
|
|
446
|
+
}
|
|
447
|
+
return {
|
|
448
|
+
user: user ? user.login : null,
|
|
449
|
+
pull: associatedPullRequest ? associatedPullRequest.number : null,
|
|
450
|
+
links: {
|
|
451
|
+
commit: `[\`${request.commit.slice(0, 7)}\`](${data.commitUrl})`,
|
|
452
|
+
pull: associatedPullRequest ? `[#${associatedPullRequest.number}](${associatedPullRequest.url})` : null,
|
|
453
|
+
user: user ? `[@${user.login}](${user.url})` : null
|
|
454
|
+
}
|
|
455
|
+
};
|
|
456
|
+
}
|
|
457
|
+
async function getInfoFromPullRequest(request) {
|
|
458
|
+
if (request.pull === undefined) {
|
|
459
|
+
throw new Error("Please pass a pull request number");
|
|
460
|
+
}
|
|
461
|
+
if (!request.repo) {
|
|
462
|
+
throw new Error("Please pass a GitHub repository in the form of userOrOrg/repoName to getInfo");
|
|
463
|
+
}
|
|
464
|
+
if (!validRepoNameRegex.test(request.repo)) {
|
|
465
|
+
throw new Error(`Please pass a valid GitHub repository in the form of userOrOrg/repoName to getInfo (it has to match the "${validRepoNameRegex.source}" pattern)`);
|
|
466
|
+
}
|
|
467
|
+
const data = await GHDataLoader.load(_objectSpread2({
|
|
468
|
+
kind: "pull"
|
|
469
|
+
}, request));
|
|
470
|
+
let user = data === null || data === undefined ? undefined : data.author;
|
|
471
|
+
let commit = data === null || data === undefined ? undefined : data.mergeCommit;
|
|
472
|
+
return {
|
|
473
|
+
user: user ? user.login : null,
|
|
474
|
+
commit: commit ? commit.abbreviatedOid : null,
|
|
475
|
+
links: {
|
|
476
|
+
commit: commit ? `[\`${commit.abbreviatedOid.slice(0, 7)}\`](${commit.commitUrl})` : null,
|
|
477
|
+
pull: `[#${request.pull}](https://github.com/${request.repo}/pull/${request.pull})`,
|
|
478
|
+
user: user ? `[@${user.login}](${user.url})` : null
|
|
479
|
+
}
|
|
480
|
+
};
|
|
481
|
+
}
|
|
482
|
+
|
|
483
|
+
// src/index.ts
|
|
484
|
+
var changelogFunctions = {
|
|
485
|
+
getDependencyReleaseLine: async (changesets, dependenciesUpdated, options) => {
|
|
486
|
+
if (!options.repo) {
|
|
487
|
+
throw new Error(`Please provide a repo to this changelog generator like this:
|
|
488
|
+
"changelog": ["@changesets/changelog-github", { "repo": "org/repo" }]`);
|
|
489
|
+
}
|
|
490
|
+
if (dependenciesUpdated.length === 0)
|
|
491
|
+
return "";
|
|
492
|
+
const changesetLink = `- Updated dependencies [${(await Promise.all(changesets.map(async (cs) => {
|
|
493
|
+
if (cs.commit) {
|
|
494
|
+
let { links } = await getInfo({
|
|
495
|
+
repo: options["repo"],
|
|
496
|
+
commit: cs.commit
|
|
497
|
+
});
|
|
498
|
+
return links.commit;
|
|
499
|
+
}
|
|
500
|
+
return null;
|
|
501
|
+
}))).filter((_) => _).join(", ")}]:`;
|
|
502
|
+
const updatedDepenenciesList = dependenciesUpdated.map((dependency) => ` - ${dependency.name}@${dependency.newVersion}`);
|
|
503
|
+
return [changesetLink, ...updatedDepenenciesList].join(`
|
|
504
|
+
`);
|
|
505
|
+
},
|
|
506
|
+
getReleaseLine: async (changeset, _type, options) => {
|
|
507
|
+
if (!options || !options["repo"]) {
|
|
508
|
+
throw new Error(`Please provide a repo to this changelog generator like this:
|
|
509
|
+
"changelog": ["@changesets/changelog-github", { "repo": "org/repo" }]`);
|
|
510
|
+
}
|
|
511
|
+
let prFromSummary;
|
|
512
|
+
let commitFromSummary;
|
|
513
|
+
let usersFromSummary = [];
|
|
514
|
+
let clickupLinks = [];
|
|
515
|
+
const replacedChangelog = changeset.summary.replace(/^\s*(?:pr|pull|pull\s+request):\s*#?(\d+)/im, (_, pr) => {
|
|
516
|
+
let num = Number(pr);
|
|
517
|
+
if (!isNaN(num))
|
|
518
|
+
prFromSummary = num;
|
|
519
|
+
return "";
|
|
520
|
+
}).replace(/^\s*commit:\s*([^\s]+)/im, (_, commit) => {
|
|
521
|
+
commitFromSummary = commit;
|
|
522
|
+
return "";
|
|
523
|
+
}).replace(/^\s*(?:author|user):\s*@?([^\s]+)/gim, (_, user) => {
|
|
524
|
+
usersFromSummary.push(user);
|
|
525
|
+
return "";
|
|
526
|
+
}).replace(/^\s*clickup:\s*(https:\/\/app\.clickup\.com\/t\/[^\s]+)/gim, (_, link) => {
|
|
527
|
+
clickupLinks.push(link);
|
|
528
|
+
return "";
|
|
529
|
+
}).trim();
|
|
530
|
+
const [firstLine, ...futureLines] = replacedChangelog.split(`
|
|
531
|
+
`).map((l) => l.trimRight());
|
|
532
|
+
const links = await (async () => {
|
|
533
|
+
if (prFromSummary !== undefined) {
|
|
534
|
+
let { links: links2 } = await getInfoFromPullRequest({
|
|
535
|
+
repo: options["repo"],
|
|
536
|
+
pull: prFromSummary
|
|
537
|
+
});
|
|
538
|
+
if (commitFromSummary) {
|
|
539
|
+
const shortCommitId = commitFromSummary.slice(0, 7);
|
|
540
|
+
links2 = {
|
|
541
|
+
...links2,
|
|
542
|
+
commit: `[\`${shortCommitId}\`](https://github.com/${options["repo"]}/commit/${commitFromSummary})`
|
|
543
|
+
};
|
|
544
|
+
}
|
|
545
|
+
return links2;
|
|
546
|
+
}
|
|
547
|
+
const commitToFetchFrom = commitFromSummary || changeset.commit;
|
|
548
|
+
if (commitToFetchFrom) {
|
|
549
|
+
let { links: links2 } = await getInfo({
|
|
550
|
+
repo: options["repo"],
|
|
551
|
+
commit: commitToFetchFrom
|
|
552
|
+
});
|
|
553
|
+
return links2;
|
|
554
|
+
}
|
|
555
|
+
return {
|
|
556
|
+
commit: null,
|
|
557
|
+
pull: null,
|
|
558
|
+
user: null
|
|
559
|
+
};
|
|
560
|
+
})();
|
|
561
|
+
const users = usersFromSummary.length ? usersFromSummary.map((userFromSummary) => `[@${userFromSummary}](https://github.com/${userFromSummary})`).join(", ") : links.user;
|
|
562
|
+
const clickupPart = clickupLinks.length > 0 ? ` ClickUp: ${clickupLinks.map((link) => {
|
|
563
|
+
const taskId = link.split("/").pop();
|
|
564
|
+
return `[${taskId}](${link})`;
|
|
565
|
+
}).join(", ")}` : "";
|
|
566
|
+
const prefix = [
|
|
567
|
+
links.pull === null ? "" : ` ${links.pull}`,
|
|
568
|
+
links.commit === null ? "" : ` ${links.commit}`,
|
|
569
|
+
users === null ? "" : ` Thanks ${users}!`,
|
|
570
|
+
clickupPart
|
|
571
|
+
].join("");
|
|
572
|
+
return `
|
|
573
|
+
|
|
574
|
+
-${prefix ? `${prefix} -` : ""} ${firstLine}
|
|
575
|
+
${futureLines.map((l) => ` ${l}`).join(`
|
|
576
|
+
`)}`;
|
|
577
|
+
}
|
|
578
|
+
};
|
|
579
|
+
var src_default = changelogFunctions;
|
package/package.json
CHANGED
|
@@ -1,19 +1,24 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@hadl-labs/changelog-github",
|
|
3
|
-
"version": "0.
|
|
4
|
-
"
|
|
5
|
-
"types": "
|
|
6
|
-
"
|
|
3
|
+
"version": "0.4.0",
|
|
4
|
+
"main": "dist/index.js",
|
|
5
|
+
"types": "dist/index.d.ts",
|
|
6
|
+
"files": [
|
|
7
|
+
"dist"
|
|
8
|
+
],
|
|
7
9
|
"scripts": {
|
|
10
|
+
"build": "bun build src/index.ts --outdir dist --format cjs && tsc --emitDeclarationOnly --declaration --outDir dist",
|
|
8
11
|
"test": "bun test",
|
|
9
12
|
"typecheck": "tsc --noEmit",
|
|
10
13
|
"format": "prettier --check --log-level warn --cache --cache-strategy content '**/*.{js,ts,tsx,css,yml,yaml,json}'",
|
|
11
14
|
"format:write": "prettier --write --log-level warn --cache --cache-strategy content '**/*.{js,ts,tsx,css,yml,yaml,json}'",
|
|
12
|
-
"check-updates": "npx npm-check-updates --format group --interactive"
|
|
15
|
+
"check-updates": "npx npm-check-updates --format group --interactive",
|
|
16
|
+
"release": "bun run build && changeset publish"
|
|
13
17
|
},
|
|
14
18
|
"dependencies": {
|
|
15
19
|
"@changesets/get-github-info": "^0.6.0",
|
|
16
|
-
"@changesets/types": "^6.1.0"
|
|
20
|
+
"@changesets/types": "^6.1.0",
|
|
21
|
+
"@tsconfig/strictest": "^2.0.7"
|
|
17
22
|
},
|
|
18
23
|
"devDependencies": {
|
|
19
24
|
"@changesets/cli": "^2.29.7",
|
package/.changeset/README.md
DELETED
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
# Changesets
|
|
2
|
-
|
|
3
|
-
Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works
|
|
4
|
-
with multi-package repos, or single-package repos to help you version and publish your code. You can
|
|
5
|
-
find the full documentation for it [in our repository](https://github.com/changesets/changesets)
|
|
6
|
-
|
|
7
|
-
We have a quick list of common questions to get you started engaging with this project in
|
|
8
|
-
[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md)
|
package/.changeset/config.json
DELETED
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"$schema": "https://unpkg.com/@changesets/config@3.1.1/schema.json",
|
|
3
|
-
"changelog": ["@changesets/changelog-github", { "repo": "hadl-labs/changelog-github" }],
|
|
4
|
-
"commit": false,
|
|
5
|
-
"fixed": [],
|
|
6
|
-
"linked": [],
|
|
7
|
-
"access": "restricted",
|
|
8
|
-
"baseBranch": "master",
|
|
9
|
-
"updateInternalDependencies": "patch",
|
|
10
|
-
"ignore": []
|
|
11
|
-
}
|