spec-up-t 1.0.90 → 1.0.92
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/assets/compiled/body.js +1 -1
- package/assets/compiled/head.css +1 -1
- package/assets/css/index.css +202 -150
- package/assets/js/show-commit-hashes.js +2 -2
- package/index.js +0 -3
- package/package.json +2 -1
- package/src/add-remove-xref-source.js +171 -0
- package/src/collect-external-references.js +252 -0
- package/src/collectExternalReferences/fetchTermsFromGitHubRepository.js +237 -0
- package/src/{get-xtrefs-data → collectExternalReferences}/matchTerm.js +4 -3
- package/src/collectExternalReferences/processXTrefsData.js +53 -0
- package/src/config/paths.js +1 -0
- package/src/configure.js +96 -0
- package/src/init.js +2 -8
- package/src/prepare-tref.js +21 -4
- package/src/utils/doesUrlExist.js +18 -10
- package/src/utils/isLineWithDefinition.js +13 -0
- package/src/get-xtrefs-data/matchTerm.1.js +0 -23
- package/src/get-xtrefs-data/searchGitHubCode.1.js +0 -69
- package/src/get-xtrefs-data/searchGitHubCode.2.js +0 -77
- package/src/get-xtrefs-data/searchGitHubCode.3.js +0 -85
- package/src/get-xtrefs-data/searchGitHubCode.4.js +0 -92
- package/src/get-xtrefs-data/searchGitHubCode.5.js +0 -97
- package/src/get-xtrefs-data/searchGitHubCode.js +0 -97
- package/src/get-xtrefs-data.js +0 -222
- /package/src/{get-xtrefs-data → collectExternalReferences}/checkRateLimit.js +0 -0
- /package/src/{get-xtrefs-data → collectExternalReferences}/setupFetchHeaders.js +0 -0
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const { fetchTermsFromGitHubRepository } = require('./fetchTermsFromGitHubRepository.js');
|
|
3
|
+
const { matchTerm } = require('./matchTerm.js');
|
|
4
|
+
const { addPath, getPath, getAllPaths } = require('../config/paths');
|
|
5
|
+
|
|
6
|
+
// Directory to store cached files
|
|
7
|
+
const CACHE_DIR = getPath('githubcache');
|
|
8
|
+
|
|
9
|
+
async function processXTrefsData(allXTrefs, GITHUB_API_TOKEN, outputPathJSON, outputPathJS, outputPathJSTimeStamped, options) {
|
|
10
|
+
try {
|
|
11
|
+
|
|
12
|
+
// Clear the cache (remove the cache directory) if the cache option is set to false
|
|
13
|
+
if (options.cache === false) {
|
|
14
|
+
if (fs.existsSync(CACHE_DIR)) {
|
|
15
|
+
fs.rmdirSync(CACHE_DIR, { recursive: true });
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
// Ensure the cache directory exists, so that we can store the fetched data
|
|
20
|
+
if (!fs.existsSync(CACHE_DIR)) {
|
|
21
|
+
fs.mkdirSync(CACHE_DIR, { recursive: true });
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
for (let xtref of allXTrefs.xtrefs) {
|
|
25
|
+
// Go and look if the term is in the external repository and if so, get the commit hash, and other meta info plus the content of the file
|
|
26
|
+
const item = await fetchTermsFromGitHubRepository(GITHUB_API_TOKEN, xtref.term, xtref.owner, xtref.repo, xtref.terms_dir, options);
|
|
27
|
+
|
|
28
|
+
// // Check if fetchedData.data is defined
|
|
29
|
+
if (item !== null && matchTerm(item.content, xtref.term)) {
|
|
30
|
+
xtref.commitHash = item.sha;
|
|
31
|
+
xtref.content = item.content;
|
|
32
|
+
xtref.avatarUrl = item.repository.owner.avatar_url;
|
|
33
|
+
console.log(`\n SPEC-UP-T: Match found for term: ${xtref.term} in ${xtref.externalSpec};`);
|
|
34
|
+
} else {
|
|
35
|
+
xtref.commitHash = "not found";
|
|
36
|
+
xtref.content = "This term was not found in the external repository.";
|
|
37
|
+
console.log(`\n SPEC-UP-T: No match found for term: ${xtref.term} in ${xtref.externalSpec};`);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const allXTrefsStr = JSON.stringify(allXTrefs, null, 2);
|
|
42
|
+
fs.writeFileSync(outputPathJSON, allXTrefsStr, 'utf8');
|
|
43
|
+
const stringReadyForFileWrite = `const allXTrefs = ${allXTrefsStr};`;
|
|
44
|
+
fs.writeFileSync(outputPathJS, stringReadyForFileWrite, 'utf8');
|
|
45
|
+
fs.writeFileSync(outputPathJSTimeStamped, stringReadyForFileWrite, 'utf8');
|
|
46
|
+
|
|
47
|
+
require('../../index.js')({ nowatch: true });
|
|
48
|
+
} catch (error) {
|
|
49
|
+
console.error("An error occurred:", error);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
module.exports.processXTrefsData = processXTrefsData;
|
package/src/config/paths.js
CHANGED
package/src/configure.js
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This script configures the Spec-Up-T Starterpack by prompting the user for input
|
|
3
|
+
* and updating the specs.json file accordingly. It performs the following tasks:
|
|
4
|
+
*
|
|
5
|
+
* 1. Resolves the path to the specs.json file in the root directory.
|
|
6
|
+
* 2. Checks if the specs.json file exists, and exits with an error if it does not.
|
|
7
|
+
* 3. Creates a readline interface to prompt the user for input.
|
|
8
|
+
* 4. Defines a set of questions to gather information from the user.
|
|
9
|
+
*
|
|
10
|
+
* @module configure
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
const fs = require('fs');
|
|
14
|
+
const path = require('path');
|
|
15
|
+
const readline = require('readline');
|
|
16
|
+
|
|
17
|
+
// Resolve the path to specs.json in the root directory
|
|
18
|
+
const JSON_FILE_PATH = path.resolve(process.cwd(), 'specs.json');
|
|
19
|
+
|
|
20
|
+
// Key for accessing specs in the JSON file
|
|
21
|
+
const SPECS_KEY = 'specs';
|
|
22
|
+
|
|
23
|
+
// Check if the JSON file exists
|
|
24
|
+
if (!fs.existsSync(JSON_FILE_PATH)) {
|
|
25
|
+
console.error(`Error: ${JSON_FILE_PATH} does not exist.`);
|
|
26
|
+
process.exit(1);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// Create the readline interface
|
|
30
|
+
const rl = readline.createInterface({
|
|
31
|
+
input: process.stdin,
|
|
32
|
+
output: process.stdout,
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
// Questions for user input
|
|
36
|
+
const questions = [
|
|
37
|
+
{ field: 'title', prompt: 'Enter title: ', default: 'Spec-Up-T Starterpack' },
|
|
38
|
+
{ field: 'description', prompt: 'Enter description: ', default: 'Create technical specifications in markdown. Based on the original Spec-Up, extended with Terminology tooling' },
|
|
39
|
+
{ field: 'author', prompt: 'Enter author: ', default: 'Trust over IP Foundation' },
|
|
40
|
+
{ field: 'account', prompt: 'Enter account: ', default: 'trustoverip' },
|
|
41
|
+
{ field: 'repo', prompt: 'Enter repo: ', default: 'spec-up-t-starter-pack' },
|
|
42
|
+
];
|
|
43
|
+
|
|
44
|
+
const userResponses = {};
|
|
45
|
+
|
|
46
|
+
// Function to prompt the user for inputs
|
|
47
|
+
function collectUserInputs(index = 0) {
|
|
48
|
+
if (index === questions.length) {
|
|
49
|
+
rl.close();
|
|
50
|
+
applySpecFieldsToJSON();
|
|
51
|
+
return;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
const { prompt, default: defaultValue } = questions[index];
|
|
55
|
+
rl.question(`${prompt} (${defaultValue}): `, (answer) => {
|
|
56
|
+
userResponses[questions[index].field] = answer || defaultValue;
|
|
57
|
+
collectUserInputs(index + 1);
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Function to update JSON with user-provided spec fields
|
|
62
|
+
function applySpecFieldsToJSON() {
|
|
63
|
+
try {
|
|
64
|
+
const data = JSON.parse(fs.readFileSync(JSON_FILE_PATH, 'utf8'));
|
|
65
|
+
|
|
66
|
+
if (!data[SPECS_KEY] || !Array.isArray(data[SPECS_KEY]) || !data[SPECS_KEY][0]) {
|
|
67
|
+
console.error(`Error: Invalid JSON structure. "${SPECS_KEY}[0]" is missing.`);
|
|
68
|
+
process.exit(1);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// Ensure the "source" key exists in the JSON object
|
|
72
|
+
if (!data[SPECS_KEY][0].source) {
|
|
73
|
+
data[SPECS_KEY][0].source = {};
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// Iterate over user responses and assign fields accordingly
|
|
77
|
+
Object.entries(userResponses).forEach(([field, value]) => {
|
|
78
|
+
if (['account', 'repo'].includes(field)) {
|
|
79
|
+
// Add these fields to the "source" key
|
|
80
|
+
data[SPECS_KEY][0].source[field] = value;
|
|
81
|
+
} else {
|
|
82
|
+
// Add all other fields to the root of the JSON object
|
|
83
|
+
data[SPECS_KEY][0][field] = value;
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
fs.writeFileSync(JSON_FILE_PATH, JSON.stringify(data, null, 2), 'utf8');
|
|
88
|
+
console.log(`Successfully updated ${JSON_FILE_PATH}.`);
|
|
89
|
+
} catch (error) {
|
|
90
|
+
console.error(`Error: Could not update ${JSON_FILE_PATH}.`, error.message);
|
|
91
|
+
process.exit(1);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// Start user input collection
|
|
96
|
+
collectUserInputs();
|
package/src/init.js
CHANGED
|
@@ -1,25 +1,19 @@
|
|
|
1
|
-
require('dotenv').config();
|
|
2
1
|
const fs = require('fs-extra');
|
|
3
2
|
const path = require('path');
|
|
4
|
-
const { exec } = require('child_process');
|
|
5
3
|
const outputDir = path.join(process.cwd(), 'output');
|
|
6
4
|
const initFlagPath = path.join(outputDir, 'init.flag');
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
const updateXTrefs = require('./get-xtrefs-data.js').updateXTrefs;
|
|
10
|
-
const { prepareTref } = require('./prepare-tref');
|
|
5
|
+
const collectExternalReferences = require('./collect-external-references.js').collectExternalReferences;
|
|
11
6
|
|
|
12
7
|
async function initialize() {
|
|
13
8
|
try {
|
|
14
9
|
// Check if the init script has already run
|
|
15
10
|
if (await fs.pathExists(initFlagPath)) {
|
|
16
|
-
console.log('Initialization has already been completed.');
|
|
17
11
|
return;
|
|
18
12
|
}
|
|
19
13
|
|
|
20
14
|
// Place the init script here
|
|
21
15
|
|
|
22
|
-
|
|
16
|
+
collectExternalReferences(process.env.GITHUB_API_TOKEN, false);
|
|
23
17
|
// prepareTref(path.join(config.specs[0].spec_directory, config.specs[0].spec_terms_directory));
|
|
24
18
|
|
|
25
19
|
// End of the init script
|
package/src/prepare-tref.js
CHANGED
|
@@ -10,6 +10,7 @@
|
|
|
10
10
|
|
|
11
11
|
const fs = require('fs');
|
|
12
12
|
const path = require('path');
|
|
13
|
+
const dedent = require('dedent');
|
|
13
14
|
|
|
14
15
|
function getLocalXTrefContent(externalSpec, term) {
|
|
15
16
|
const filePath = path.join('output', 'xtrefs-data.json');
|
|
@@ -18,7 +19,14 @@ function getLocalXTrefContent(externalSpec, term) {
|
|
|
18
19
|
|
|
19
20
|
for (const xtref of xtrefs) {
|
|
20
21
|
if (xtref.externalSpec === externalSpec && xtref.term === term) {
|
|
21
|
-
return {
|
|
22
|
+
return {
|
|
23
|
+
content: xtref.content,
|
|
24
|
+
commitHash: xtref.commitHash,
|
|
25
|
+
owner: xtref.owner,
|
|
26
|
+
repo: xtref.repo,
|
|
27
|
+
repoUrl: xtref.repoUrl,
|
|
28
|
+
avatarUrl: xtref.avatarUrl
|
|
29
|
+
};
|
|
22
30
|
}
|
|
23
31
|
}
|
|
24
32
|
|
|
@@ -67,7 +75,6 @@ function prepareTref(directory) {
|
|
|
67
75
|
if (match) {
|
|
68
76
|
const result = match[1].split(',').map(term => term.trim());
|
|
69
77
|
localXTrefContent = getLocalXTrefContent(result[0], result[1]);
|
|
70
|
-
|
|
71
78
|
/*
|
|
72
79
|
|
|
73
80
|
Remove the `[[def: ...]]:` lines from the content.
|
|
@@ -85,12 +92,22 @@ function prepareTref(directory) {
|
|
|
85
92
|
|
|
86
93
|
localXTrefContent.content = localXTrefContent.content.replace(defPart, '');
|
|
87
94
|
|
|
88
|
-
|
|
95
|
+
const readyForWrite = dedent`
|
|
96
|
+
${match[0]}
|
|
97
|
+
|
|
98
|
+
<!-- This is a copy of the saved remote text. Remove it if you like. It is automatically (re)generated -->
|
|
99
|
+
|
|
100
|
+
~ <span class="meta-info"><span> ${localXTrefContent.owner}</span> <span>[${localXTrefContent.repo}](${localXTrefContent.repoUrl})</span> <span class="commit-hash">Commit Hash: ${localXTrefContent.commitHash}</span></span>
|
|
101
|
+
|
|
102
|
+
${localXTrefContent.content}
|
|
103
|
+
`;
|
|
104
|
+
|
|
105
|
+
fs.writeFileSync(itemPath, readyForWrite, 'utf8');
|
|
89
106
|
}
|
|
90
107
|
}
|
|
91
108
|
}
|
|
92
109
|
} catch (err) {
|
|
93
|
-
fs.writeFileSync(itemPath, match[0] + '\n\n' + '<!-- This is a copy of the saved remote text. Remove it if you like. It is automatically (re)generated
|
|
110
|
+
fs.writeFileSync(itemPath, match[0] + '\n\n' + '<!-- This is a copy of the saved remote text. Remove it if you like. It is automatically (re)generated -->\n\nNothing found, so nothing to show.', 'utf8');
|
|
94
111
|
// console.error(`\n SPEC-UP-T: Error reading or writing file ${item.name}: ${err}` + "\n");
|
|
95
112
|
}
|
|
96
113
|
}
|
|
@@ -1,15 +1,23 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
const axios = require('axios');
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Checks if a URL returns a 200 status code.
|
|
5
|
+
* @param {string} url - The URL to check.
|
|
6
|
+
* @returns {Promise<boolean>} - True if the URL exists (200), false otherwise.
|
|
7
|
+
*/
|
|
8
|
+
async function doesUrlExist(url) {
|
|
3
9
|
try {
|
|
4
|
-
const response = await
|
|
5
|
-
|
|
6
|
-
return true;
|
|
7
|
-
} else {
|
|
8
|
-
return false;
|
|
9
|
-
}
|
|
10
|
+
const response = await axios.head(url, { timeout: 5000 });
|
|
11
|
+
return response.status === 200;
|
|
10
12
|
} catch (error) {
|
|
11
|
-
|
|
13
|
+
if (error.response && error.response.status === 404) {
|
|
14
|
+
return false; // URL does not exist
|
|
15
|
+
}
|
|
16
|
+
if (error.code === 'ENOTFOUND' || error.code === 'ECONNABORTED') {
|
|
17
|
+
return false; // Network issues
|
|
18
|
+
}
|
|
19
|
+
return false; // Fail-safe return
|
|
12
20
|
}
|
|
13
21
|
}
|
|
14
22
|
|
|
15
|
-
exports
|
|
23
|
+
module.exports = { doesUrlExist };
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
async function isLineWithDefinition(line) {
|
|
2
|
+
line = line.trim();
|
|
3
|
+
// Check if the string starts with `[[def:` and ends with `]]`
|
|
4
|
+
if (line.startsWith('[[def:') && line.endsWith(']]')) {
|
|
5
|
+
// console.log('String starts with `[[def:` and ends with `]]`');
|
|
6
|
+
return true;
|
|
7
|
+
} else {
|
|
8
|
+
// console.log('String does not start with `[[def:` or end with `]]`');
|
|
9
|
+
return false;
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
exports.isLineWithDefinition = isLineWithDefinition;
|
|
@@ -1,23 +0,0 @@
|
|
|
1
|
-
function matchTerm(text, term) {
|
|
2
|
-
if (text && typeof text === 'string') {
|
|
3
|
-
const firstLine = text.split('\n')[0].trim();
|
|
4
|
-
|
|
5
|
-
// Check if the string starts with `[[def:` and ends with `]]`
|
|
6
|
-
if (!firstLine.startsWith('[[def:') || !firstLine.endsWith(']]')) {
|
|
7
|
-
console.log('String does not start with `[[def:` or end with `]]`');
|
|
8
|
-
return false;
|
|
9
|
-
}
|
|
10
|
-
|
|
11
|
-
// Remove `[[def:` from the beginning and `]]` from the end
|
|
12
|
-
let relevantPart = firstLine.slice(7, -2);
|
|
13
|
-
|
|
14
|
-
// Split the string on `,` and trim the array elements
|
|
15
|
-
let termsArray = relevantPart.split(',').map(term => term.trim());
|
|
16
|
-
|
|
17
|
-
// Check if the term is in the array
|
|
18
|
-
return termsArray.includes(term);
|
|
19
|
-
} else {
|
|
20
|
-
console.error('Invalid text:', text);
|
|
21
|
-
}
|
|
22
|
-
}
|
|
23
|
-
exports.matchTerm = matchTerm;
|
|
@@ -1,69 +0,0 @@
|
|
|
1
|
-
async function searchGitHubCode(GITHUB_API_TOKEN, searchString, owner, repo, subdirectory) {
|
|
2
|
-
const { Octokit } = await import("octokit");
|
|
3
|
-
const { throttling } = await import("@octokit/plugin-throttling");
|
|
4
|
-
|
|
5
|
-
// Create a throttled Octokit instance
|
|
6
|
-
const ThrottledOctokit = Octokit.plugin(throttling);
|
|
7
|
-
const octokit = new ThrottledOctokit({
|
|
8
|
-
auth: GITHUB_API_TOKEN,
|
|
9
|
-
throttle: {
|
|
10
|
-
onRateLimit: (retryAfter, options) => {
|
|
11
|
-
console.warn(`Request quota exhausted for request ${options.method} ${options.url}`);
|
|
12
|
-
if (options.request.retryCount <= 1) {
|
|
13
|
-
console.log(`Retrying after ${retryAfter} seconds...`);
|
|
14
|
-
return true;
|
|
15
|
-
}
|
|
16
|
-
},
|
|
17
|
-
onAbuseLimit: (retryAfter, options) => {
|
|
18
|
-
console.warn(`Abuse detected for request ${options.method} ${options.url}`);
|
|
19
|
-
},
|
|
20
|
-
onSecondaryRateLimit: (retryAfter, options) => {
|
|
21
|
-
console.warn(`Secondary rate limit hit for request ${options.method} ${options.url}`);
|
|
22
|
-
if (options.request.retryCount <= 1) {
|
|
23
|
-
console.log(`Retrying after ${retryAfter} seconds...`);
|
|
24
|
-
return true;
|
|
25
|
-
}
|
|
26
|
-
},
|
|
27
|
-
},
|
|
28
|
-
});
|
|
29
|
-
|
|
30
|
-
try {
|
|
31
|
-
// Perform the search using Octokit with exact match
|
|
32
|
-
const searchResponse = await octokit.rest.search.code({
|
|
33
|
-
// q: `${searchString} repo:${owner}/${repo}`, // Fuzzy search
|
|
34
|
-
q: `"${searchString}" repo:${owner}/${repo} path:${subdirectory}`, // Use quotation marks for exact match
|
|
35
|
-
// q: `"${searchString}" repo:${owner}/${repo} case:true`, // DOES NOT WORK Use quotation marks for exact match. Case sensitive search
|
|
36
|
-
});
|
|
37
|
-
|
|
38
|
-
// Log the search results
|
|
39
|
-
console.log("Total results:", searchResponse.data.total_count);
|
|
40
|
-
|
|
41
|
-
// const rateLimitResponse = await octokit.rest.rateLimit.get();
|
|
42
|
-
// console.log("Rate limit:", rateLimitResponse.data);
|
|
43
|
-
|
|
44
|
-
// Fetch the content of each file
|
|
45
|
-
for (const item of searchResponse.data.items) {
|
|
46
|
-
let content = "";
|
|
47
|
-
const fileContentResponse = await octokit.rest.repos.getContent({
|
|
48
|
-
owner: item.repository.owner.login, // Repository owner
|
|
49
|
-
repo: item.repository.name, // Repository name
|
|
50
|
-
path: item.path, // File path
|
|
51
|
-
});
|
|
52
|
-
|
|
53
|
-
// Decode the file content (it's base64-encoded)
|
|
54
|
-
if (fileContentResponse.data.content) {
|
|
55
|
-
content = Buffer.from(fileContentResponse.data.content, "base64").toString("utf-8");
|
|
56
|
-
} else {
|
|
57
|
-
// If the file is larger than 1 MB, GitHub's API will return a download URL instead of the content.
|
|
58
|
-
console.log("File is too large. Download URL:", fileContentResponse.data.download_url);
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
item.content = content;
|
|
62
|
-
}
|
|
63
|
-
return searchResponse;
|
|
64
|
-
} catch (error) {
|
|
65
|
-
console.error("Error searching GitHub or fetching file content:", error);
|
|
66
|
-
}
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
exports.searchGitHubCode = searchGitHubCode;
|
|
@@ -1,77 +0,0 @@
|
|
|
1
|
-
async function searchGitHubCode(GITHUB_API_TOKEN, searchString, owner, repo, subdirectory) {
|
|
2
|
-
const { Octokit } = await import("octokit");
|
|
3
|
-
const { throttling } = await import("@octokit/plugin-throttling");
|
|
4
|
-
|
|
5
|
-
// Create a throttled Octokit instance
|
|
6
|
-
const ThrottledOctokit = Octokit.plugin(throttling);
|
|
7
|
-
const octokit = new ThrottledOctokit({
|
|
8
|
-
auth: GITHUB_API_TOKEN,
|
|
9
|
-
throttle: {
|
|
10
|
-
onRateLimit: (retryAfter, options) => {
|
|
11
|
-
console.warn(`Request quota exhausted for request ${options.method} ${options.url}`);
|
|
12
|
-
if (options.request.retryCount <= 1) {
|
|
13
|
-
console.log(`Retrying after ${retryAfter} seconds...`);
|
|
14
|
-
return true;
|
|
15
|
-
}
|
|
16
|
-
},
|
|
17
|
-
onAbuseLimit: (retryAfter, options) => {
|
|
18
|
-
console.warn(`Abuse detected for request ${options.method} ${options.url}`);
|
|
19
|
-
},
|
|
20
|
-
onSecondaryRateLimit: (retryAfter, options) => {
|
|
21
|
-
console.warn(`Secondary rate limit hit for request ${options.method} ${options.url}`);
|
|
22
|
-
if (options.request.retryCount <= 1) {
|
|
23
|
-
console.log(`Retrying after ${retryAfter} seconds...`);
|
|
24
|
-
return true;
|
|
25
|
-
}
|
|
26
|
-
},
|
|
27
|
-
},
|
|
28
|
-
});
|
|
29
|
-
|
|
30
|
-
try {
|
|
31
|
-
// Perform the search using Octokit with exact match
|
|
32
|
-
const searchResponse = await octokit.rest.search.code({
|
|
33
|
-
q: `"${searchString}" repo:${owner}/${repo} path:${subdirectory}`, // Exact match in subdirectory
|
|
34
|
-
});
|
|
35
|
-
|
|
36
|
-
// Log the search results
|
|
37
|
-
console.log("Total results:", searchResponse.data.total_count);
|
|
38
|
-
|
|
39
|
-
// Fetch the content of each file
|
|
40
|
-
for (const item of searchResponse.data.items) {
|
|
41
|
-
// Check if the match is in the first line using text_matches
|
|
42
|
-
const isFirstLineMatch = item.text_matches.some(match =>
|
|
43
|
-
match.fragment.split("\n")[0].includes(searchString)
|
|
44
|
-
);
|
|
45
|
-
|
|
46
|
-
if (!isFirstLineMatch) {
|
|
47
|
-
console.log(`Skipping ${item.path}: Match not in the first line.`);
|
|
48
|
-
continue; // Skip this file
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
// Fetch file content
|
|
52
|
-
let content = "";
|
|
53
|
-
const fileContentResponse = await octokit.rest.repos.getContent({
|
|
54
|
-
owner: item.repository.owner.login, // Repository owner
|
|
55
|
-
repo: item.repository.name, // Repository name
|
|
56
|
-
path: item.path, // File path
|
|
57
|
-
});
|
|
58
|
-
|
|
59
|
-
// Decode the file content (it's base64-encoded)
|
|
60
|
-
if (fileContentResponse.data.content) {
|
|
61
|
-
content = Buffer.from(fileContentResponse.data.content, "base64").toString("utf-8");
|
|
62
|
-
} else {
|
|
63
|
-
// If the file is larger than 1 MB, GitHub's API will return a download URL instead of the content.
|
|
64
|
-
console.log("File is too large. Download URL:", fileContentResponse.data.download_url);
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
// Attach the content to the item
|
|
68
|
-
item.content = content;
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
return searchResponse;
|
|
72
|
-
} catch (error) {
|
|
73
|
-
console.error("Error searching GitHub or fetching file content:", error);
|
|
74
|
-
}
|
|
75
|
-
}
|
|
76
|
-
|
|
77
|
-
exports.searchGitHubCode = searchGitHubCode;
|
|
@@ -1,85 +0,0 @@
|
|
|
1
|
-
async function searchGitHubCode(GITHUB_API_TOKEN, searchString, owner, repo, subdirectory) {
|
|
2
|
-
const { Octokit } = await import("octokit");
|
|
3
|
-
const { throttling } = await import("@octokit/plugin-throttling");
|
|
4
|
-
|
|
5
|
-
// Create a throttled Octokit instance
|
|
6
|
-
const ThrottledOctokit = Octokit.plugin(throttling);
|
|
7
|
-
const octokit = new ThrottledOctokit({
|
|
8
|
-
auth: GITHUB_API_TOKEN,
|
|
9
|
-
throttle: {
|
|
10
|
-
onRateLimit: (retryAfter, options) => {
|
|
11
|
-
console.warn(`Request quota exhausted for request ${options.method} ${options.url}`);
|
|
12
|
-
if (options.request.retryCount <= 1) {
|
|
13
|
-
console.log(`Retrying after ${retryAfter} seconds...`);
|
|
14
|
-
return true;
|
|
15
|
-
}
|
|
16
|
-
},
|
|
17
|
-
onAbuseLimit: (retryAfter, options) => {
|
|
18
|
-
console.warn(`Abuse detected for request ${options.method} ${options.url}`);
|
|
19
|
-
},
|
|
20
|
-
onSecondaryRateLimit: (retryAfter, options) => {
|
|
21
|
-
console.warn(`Secondary rate limit hit for request ${options.method} ${options.url}`);
|
|
22
|
-
if (options.request.retryCount <= 1) {
|
|
23
|
-
console.log(`Retrying after ${retryAfter} seconds...`);
|
|
24
|
-
return true;
|
|
25
|
-
}
|
|
26
|
-
},
|
|
27
|
-
},
|
|
28
|
-
});
|
|
29
|
-
|
|
30
|
-
try {
|
|
31
|
-
// Perform the search using Octokit with exact match
|
|
32
|
-
const searchResponse = await octokit.rest.search.code({
|
|
33
|
-
q: `"${searchString}" repo:${owner}/${repo} path:${subdirectory}`, // Exact match in subdirectory
|
|
34
|
-
headers: {
|
|
35
|
-
Accept: "application/vnd.github.v3.text-match+json", // Include text-match media type
|
|
36
|
-
},
|
|
37
|
-
});
|
|
38
|
-
|
|
39
|
-
// Log the search results
|
|
40
|
-
console.log("Total results:", searchResponse.data.total_count);
|
|
41
|
-
|
|
42
|
-
// Fetch the content of each file
|
|
43
|
-
for (const item of searchResponse.data.items) {
|
|
44
|
-
// Check if the match is in the first line using text_matches
|
|
45
|
-
if (!item.text_matches) {
|
|
46
|
-
console.log(`Skipping ${item.path}: No text matches found.`);
|
|
47
|
-
continue;
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
const isFirstLineMatch = item.text_matches.some(match =>
|
|
51
|
-
match.fragment.split("\n")[0].includes(searchString)
|
|
52
|
-
);
|
|
53
|
-
|
|
54
|
-
if (!isFirstLineMatch) {
|
|
55
|
-
console.log(`Skipping ${item.path}: Match not in the first line.`);
|
|
56
|
-
continue; // Skip this file
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
// Fetch file content
|
|
60
|
-
let content = "";
|
|
61
|
-
const fileContentResponse = await octokit.rest.repos.getContent({
|
|
62
|
-
owner: item.repository.owner.login, // Repository owner
|
|
63
|
-
repo: item.repository.name, // Repository name
|
|
64
|
-
path: item.path, // File path
|
|
65
|
-
});
|
|
66
|
-
|
|
67
|
-
// Decode the file content (it's base64-encoded)
|
|
68
|
-
if (fileContentResponse.data.content) {
|
|
69
|
-
content = Buffer.from(fileContentResponse.data.content, "base64").toString("utf-8");
|
|
70
|
-
} else {
|
|
71
|
-
// If the file is larger than 1 MB, GitHub's API will return a download URL instead of the content.
|
|
72
|
-
console.log("File is too large. Download URL:", fileContentResponse.data.download_url);
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
// Attach the content to the item
|
|
76
|
-
item.content = content;
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
return searchResponse;
|
|
80
|
-
} catch (error) {
|
|
81
|
-
console.error("Error searching GitHub or fetching file content:", error);
|
|
82
|
-
}
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
exports.searchGitHubCode = searchGitHubCode;
|
|
@@ -1,92 +0,0 @@
|
|
|
1
|
-
async function searchGitHubCode(GITHUB_API_TOKEN, searchString, owner, repo, subdirectory) {
|
|
2
|
-
const { Octokit } = await import("octokit");
|
|
3
|
-
const { throttling } = await import("@octokit/plugin-throttling");
|
|
4
|
-
|
|
5
|
-
// Create a throttled Octokit instance
|
|
6
|
-
const ThrottledOctokit = Octokit.plugin(throttling);
|
|
7
|
-
const octokit = new ThrottledOctokit({
|
|
8
|
-
auth: GITHUB_API_TOKEN,
|
|
9
|
-
throttle: {
|
|
10
|
-
onRateLimit: (retryAfter, options) => {
|
|
11
|
-
console.warn(`Request quota exhausted for request ${options.method} ${options.url}`);
|
|
12
|
-
if (options.request.retryCount <= 1) {
|
|
13
|
-
console.log(`Retrying after ${retryAfter} seconds...`);
|
|
14
|
-
return true;
|
|
15
|
-
}
|
|
16
|
-
},
|
|
17
|
-
onAbuseLimit: (retryAfter, options) => {
|
|
18
|
-
console.warn(`Abuse detected for request ${options.method} ${options.url}`);
|
|
19
|
-
},
|
|
20
|
-
onSecondaryRateLimit: (retryAfter, options) => {
|
|
21
|
-
console.warn(`Secondary rate limit hit for request ${options.method} ${options.url}`);
|
|
22
|
-
if (options.request.retryCount <= 1) {
|
|
23
|
-
console.log(`Retrying after ${retryAfter} seconds...`);
|
|
24
|
-
return true;
|
|
25
|
-
}
|
|
26
|
-
},
|
|
27
|
-
},
|
|
28
|
-
});
|
|
29
|
-
|
|
30
|
-
try {
|
|
31
|
-
// Perform the search using Octokit with exact match
|
|
32
|
-
const searchResponse = await octokit.rest.search.code({
|
|
33
|
-
q: `"${searchString}" repo:${owner}/${repo} path:${subdirectory}`, // Exact match in subdirectory
|
|
34
|
-
headers: {
|
|
35
|
-
Accept: "application/vnd.github.v3.text-match+json", // Include text-match media type
|
|
36
|
-
},
|
|
37
|
-
});
|
|
38
|
-
|
|
39
|
-
// Log the search results
|
|
40
|
-
console.log("Total results:", searchResponse.data.total_count);
|
|
41
|
-
|
|
42
|
-
// Fetch the content of each file
|
|
43
|
-
for (const item of searchResponse.data.items) {
|
|
44
|
-
// Check if text_matches exists and is not empty
|
|
45
|
-
if (!item.text_matches || item.text_matches.length === 0) {
|
|
46
|
-
console.log(`Skipping ${item.path}: No text matches found.`);
|
|
47
|
-
continue;
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
// Check if the match is in the first line using text_matches
|
|
51
|
-
const isFirstLineMatch = item.text_matches.some(match => {
|
|
52
|
-
if (!match.fragment) {
|
|
53
|
-
console.log(`Skipping ${item.path}: No fragment found in text match.`);
|
|
54
|
-
return false;
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
const firstLine = match.fragment.split("\n")[0];
|
|
58
|
-
return firstLine.includes(searchString);
|
|
59
|
-
});
|
|
60
|
-
|
|
61
|
-
if (!isFirstLineMatch) {
|
|
62
|
-
console.log(`Skipping ${item.path}: Match not in the first line.`);
|
|
63
|
-
continue; // Skip this file
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
// Fetch file content
|
|
67
|
-
let content = "";
|
|
68
|
-
const fileContentResponse = await octokit.rest.repos.getContent({
|
|
69
|
-
owner: item.repository.owner.login, // Repository owner
|
|
70
|
-
repo: item.repository.name, // Repository name
|
|
71
|
-
path: item.path, // File path
|
|
72
|
-
});
|
|
73
|
-
|
|
74
|
-
// Decode the file content (it's base64-encoded)
|
|
75
|
-
if (fileContentResponse.data.content) {
|
|
76
|
-
content = Buffer.from(fileContentResponse.data.content, "base64").toString("utf-8");
|
|
77
|
-
} else {
|
|
78
|
-
// If the file is larger than 1 MB, GitHub's API will return a download URL instead of the content.
|
|
79
|
-
console.log("File is too large. Download URL:", fileContentResponse.data.download_url);
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
// Attach the content to the item
|
|
83
|
-
item.content = content;
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
return searchResponse;
|
|
87
|
-
} catch (error) {
|
|
88
|
-
console.error("Error searching GitHub or fetching file content:", error);
|
|
89
|
-
}
|
|
90
|
-
}
|
|
91
|
-
|
|
92
|
-
exports.searchGitHubCode = searchGitHubCode;
|