spec-up-t 1.0.88 → 1.0.90
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +361 -341
- package/package.json +4 -1
- package/src/config/paths.js +46 -0
- package/src/create-versions-index.js +1 -1
- package/src/fix-markdown-files.js +1 -2
- package/src/get-xtrefs-data/checkRateLimit.js +17 -0
- package/src/get-xtrefs-data/matchTerm.1.js +23 -0
- package/src/get-xtrefs-data/matchTerm.js +26 -0
- package/src/get-xtrefs-data/searchGitHubCode.1.js +69 -0
- package/src/get-xtrefs-data/searchGitHubCode.2.js +77 -0
- package/src/get-xtrefs-data/searchGitHubCode.3.js +85 -0
- package/src/get-xtrefs-data/searchGitHubCode.4.js +92 -0
- package/src/get-xtrefs-data/searchGitHubCode.5.js +97 -0
- package/src/get-xtrefs-data/searchGitHubCode.js +97 -0
- package/src/get-xtrefs-data/setupFetchHeaders.js +14 -0
- package/src/get-xtrefs-data.js +96 -144
- package/src/init.js +36 -0
- package/src/json-key-validator.js +1 -1
- package/src/prepare-tref.js +16 -3
- package/src/utils/doesUrlExist.js +15 -0
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
async function searchGitHubCode(GITHUB_API_TOKEN, searchString, owner, repo, subdirectory) {
|
|
2
|
+
const { Octokit } = await import("octokit");
|
|
3
|
+
const { throttling } = await import("@octokit/plugin-throttling");
|
|
4
|
+
|
|
5
|
+
// Create a throttled Octokit instance
|
|
6
|
+
const ThrottledOctokit = Octokit.plugin(throttling);
|
|
7
|
+
const octokit = new ThrottledOctokit({
|
|
8
|
+
auth: GITHUB_API_TOKEN,
|
|
9
|
+
throttle: {
|
|
10
|
+
onRateLimit: (retryAfter, options) => {
|
|
11
|
+
console.warn(`Request quota exhausted for request ${options.method} ${options.url}`);
|
|
12
|
+
if (options.request.retryCount <= 1) {
|
|
13
|
+
console.log(`Retrying after ${retryAfter} seconds...`);
|
|
14
|
+
return true;
|
|
15
|
+
}
|
|
16
|
+
},
|
|
17
|
+
onAbuseLimit: (retryAfter, options) => {
|
|
18
|
+
console.warn(`Abuse detected for request ${options.method} ${options.url}`);
|
|
19
|
+
},
|
|
20
|
+
onSecondaryRateLimit: (retryAfter, options) => {
|
|
21
|
+
console.warn(`Secondary rate limit hit for request ${options.method} ${options.url}`);
|
|
22
|
+
if (options.request.retryCount <= 1) {
|
|
23
|
+
console.log(`Retrying after ${retryAfter} seconds...`);
|
|
24
|
+
return true;
|
|
25
|
+
}
|
|
26
|
+
},
|
|
27
|
+
},
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
try {
|
|
31
|
+
// Perform the search using Octokit with exact match
|
|
32
|
+
const searchResponse = await octokit.rest.search.code({
|
|
33
|
+
q: `"${searchString}" repo:${owner}/${repo} path:${subdirectory}`, // Exact match in subdirectory
|
|
34
|
+
headers: {
|
|
35
|
+
Accept: "application/vnd.github.v3.text-match+json", // Include text-match media type
|
|
36
|
+
},
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
// Log the search results
|
|
40
|
+
console.log("Total results:", searchResponse.data.total_count);
|
|
41
|
+
|
|
42
|
+
// Fetch the content of each file
|
|
43
|
+
for (const item of searchResponse.data.items) {
|
|
44
|
+
// Check if text_matches exists and is not empty
|
|
45
|
+
if (!item.text_matches || item.text_matches.length === 0) {
|
|
46
|
+
console.log(`Skipping ${item.path}: No text matches found.`);
|
|
47
|
+
continue;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Check if the match is in the first line using text_matches
|
|
51
|
+
const isFirstLineMatch = item.text_matches.some(match => {
|
|
52
|
+
if (!match.fragment) {
|
|
53
|
+
console.log(`Skipping ${item.path}: No fragment found in text match.`);
|
|
54
|
+
return false;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const firstLine = match.fragment.split("\n")[0];
|
|
58
|
+
return firstLine.includes(searchString);
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
if (!isFirstLineMatch) {
|
|
62
|
+
console.log(`Skipping ${item.path}: Match not in the first line.`);
|
|
63
|
+
continue; // Skip this file
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// Fetch file content
|
|
67
|
+
let content = "";
|
|
68
|
+
try {
|
|
69
|
+
const fileContentResponse = await octokit.rest.repos.getContent({
|
|
70
|
+
owner: item.repository.owner.login, // Repository owner
|
|
71
|
+
repo: item.repository.name, // Repository name
|
|
72
|
+
path: item.path, // File path
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
// Decode the file content (it's base64-encoded)
|
|
76
|
+
if (fileContentResponse.data.content) {
|
|
77
|
+
content = Buffer.from(fileContentResponse.data.content, "base64").toString("utf-8");
|
|
78
|
+
} else {
|
|
79
|
+
// If the file is larger than 1 MB, GitHub's API will return a download URL instead of the content.
|
|
80
|
+
console.log("File is too large. Download URL:", fileContentResponse.data.download_url);
|
|
81
|
+
}
|
|
82
|
+
} catch (error) {
|
|
83
|
+
console.error(`Error fetching content for ${item.path}:`, error);
|
|
84
|
+
content = ""; // Set content to an empty string if there's an error
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// Attach the content to the item
|
|
88
|
+
item.content = content;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
return searchResponse;
|
|
92
|
+
} catch (error) {
|
|
93
|
+
console.error("Error searching GitHub or fetching file content:", error);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
exports.searchGitHubCode = searchGitHubCode;
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
function setupFetchHeaders(GITHUB_API_TOKEN) {
|
|
2
|
+
const fetchHeaders = {
|
|
3
|
+
'Accept': 'application/vnd.github.v3+json'
|
|
4
|
+
};
|
|
5
|
+
|
|
6
|
+
if (GITHUB_API_TOKEN) {
|
|
7
|
+
fetchHeaders['Authorization'] = `token ${GITHUB_API_TOKEN}`;
|
|
8
|
+
} else {
|
|
9
|
+
console.log('\n SPEC-UP-T: There is no GitHub token set up. Therefore, you are more likely to be at your limit of GitHub API requests. If you run into the limit, create a token and search the documentation on this topic.\n');
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
return fetchHeaders;
|
|
13
|
+
}
|
|
14
|
+
exports.setupFetchHeaders = setupFetchHeaders;
|
package/src/get-xtrefs-data.js
CHANGED
|
@@ -10,9 +10,33 @@
|
|
|
10
10
|
* @since 2024-06-09
|
|
11
11
|
*/
|
|
12
12
|
|
|
13
|
-
|
|
14
13
|
const fs = require('fs-extra');
|
|
14
|
+
const path = require('path');
|
|
15
|
+
const readlineSync = require('readline-sync');
|
|
16
|
+
const { searchGitHubCode } = require('./get-xtrefs-data/searchGitHubCode.js');
|
|
17
|
+
const { matchTerm } = require('./get-xtrefs-data/matchTerm');
|
|
15
18
|
const config = fs.readJsonSync('specs.json');
|
|
19
|
+
const { doesUrlExist } = require('./utils/doesUrlExist.js');
|
|
20
|
+
const { addPath, getPath, getAllPaths } = require('./config/paths');
|
|
21
|
+
|
|
22
|
+
const externalSpecsRepos = config.specs[0].external_specs;
|
|
23
|
+
|
|
24
|
+
// Check if the URLs for the external specs repositories are valid, and prompt the user to abort if they are not.
|
|
25
|
+
externalSpecsRepos.forEach(repo => {
|
|
26
|
+
// Construct the URL for the terms directory of the repository
|
|
27
|
+
|
|
28
|
+
doesUrlExist(repo.url, repo.terms_dir).then(exists => {
|
|
29
|
+
if (!exists) {
|
|
30
|
+
const userInput = readlineSync.question(`\n SPEC-UP-T: This external reference is not a valid URL:\n Repository: ${repo.url},\n Terms directory: ${repo.terms_dir}\n Do you want to stop? (yes/no): `);
|
|
31
|
+
if (userInput.toLowerCase() === 'yes' || userInput.toLowerCase() === 'y') {
|
|
32
|
+
console.log('Stopping...');
|
|
33
|
+
process.exit(1);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}).catch(error => {
|
|
37
|
+
console.error('\n SPEC-UP-T:Error checking URL existence:', error);
|
|
38
|
+
});
|
|
39
|
+
});
|
|
16
40
|
|
|
17
41
|
// Collect all directories that contain files with a term and definition
|
|
18
42
|
// This maps over the specs in the config file and constructs paths to directories
|
|
@@ -34,110 +58,11 @@ const outputPathJSON = 'output/xtrefs-data.json';
|
|
|
34
58
|
const outputPathJS = 'output/xtrefs-data.js';
|
|
35
59
|
const outputPathJSTimeStamped = 'output/xtrefs-history/xtrefs-data-' + Date.now() + '.js';
|
|
36
60
|
|
|
37
|
-
function setupFetchHeaders(GITHUB_API_TOKEN) {
|
|
38
|
-
const fetchHeaders = {
|
|
39
|
-
'Accept': 'application/vnd.github.v3+json'
|
|
40
|
-
};
|
|
41
|
-
|
|
42
|
-
if (GITHUB_API_TOKEN) {
|
|
43
|
-
fetchHeaders['Authorization'] = `token ${GITHUB_API_TOKEN}`;
|
|
44
|
-
} else {
|
|
45
|
-
console.log('\n SPEC-UP-T: There is no GitHub token set up. Therefore, you are more likely to be at your limit of GitHub API requests. If you run into the limit, create a token and search the documentation on this topic.\n');
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
return fetchHeaders;
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
// Function to check the rate limit of the GitHub API
|
|
52
|
-
function checkRateLimit(response) {
|
|
53
|
-
if (response.status === 403 && response.headers.get('X-RateLimit-Remaining') === '0') {
|
|
54
|
-
const resetTime = new Date(response.headers.get('X-RateLimit-Reset') * 1000);
|
|
55
|
-
console.error(`\n SPEC-UP-T: Github API rate limit exceeded. Try again after ${resetTime}. See https://trustoverip.github.io/spec-up-t-website/docs/github-token/ for more info.` + "\n");
|
|
56
|
-
return true;
|
|
57
|
-
} else {
|
|
58
|
-
console.log(`\n SPEC-UP-T: Github API rate limit: ${response.headers.get('X-RateLimit-Remaining')} requests remaining. See https://trustoverip.github.io/spec-up-t-website/docs/github-token/ for more info.` + "\n");
|
|
59
|
-
}
|
|
60
|
-
return false;
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
// Function to fetch term information from GitHub, including commit hash and content.
|
|
64
|
-
async function fetchTermInfoFromGithub(GITHUB_API_TOKEN, xtref) {
|
|
65
|
-
try {
|
|
66
|
-
// prerequisite: filename should be the term in the match object with spaces replaced by dashes and all lowercase
|
|
67
|
-
//TODO: Loop through all markdown files to find the term and get the filename, instead of assuming that the filename is the term with spaces replaced by dashes and all lowercase
|
|
68
|
-
const url = `https://api.github.com/repos/${xtref.owner}/${xtref.repo}/commits?path=${xtref.terms_dir}/${xtref.term.replace(/ /g, '-').toLowerCase()}.md&per_page=1`;
|
|
69
|
-
const response = await fetch(url, { headers: setupFetchHeaders(GITHUB_API_TOKEN) });
|
|
70
|
-
|
|
71
|
-
// Check for rate limit before proceeding
|
|
72
|
-
if (checkRateLimit(response)) {
|
|
73
|
-
return;
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
if (response.ok) {
|
|
77
|
-
const data = await response.json();
|
|
78
|
-
if (data.length > 0) {
|
|
79
|
-
const commitHash = data[0].sha;
|
|
80
|
-
const content = await fetchFileContentFromCommit(GITHUB_API_TOKEN, xtref.owner, xtref.repo, commitHash, `${xtref.terms_dir}/${xtref.term.replace(/ /g, '-').toLowerCase()}.md`);
|
|
81
|
-
return { commitHash, content };
|
|
82
|
-
}
|
|
83
|
-
} else {
|
|
84
|
-
console.error(`\n SPEC-UP-T: Failed to fetch commit hash for ${xtref.term}: ${response.statusText}\n`);
|
|
85
|
-
return { commitHash: null, content: null };
|
|
86
|
-
}
|
|
87
|
-
} catch (error) {
|
|
88
|
-
console.error(`\n SPEC-UP-T: Error fetching data for term ${xtref.term}: ${error.message}\n`);
|
|
89
|
-
}
|
|
90
|
-
return null;
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
// Function to fetch the content of a file from a specific commit in a GitHub repository.
|
|
94
|
-
async function fetchFileContentFromCommit(GITHUB_API_TOKEN, owner, repo, commitHash, filePath) {
|
|
95
|
-
const MAX_RETRIES = 3;
|
|
96
|
-
let retries = 0;
|
|
97
|
-
|
|
98
|
-
while (retries < MAX_RETRIES) {
|
|
99
|
-
try {
|
|
100
|
-
const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${commitHash}?recursive=1`;
|
|
101
|
-
const treeResponse = await fetch(treeUrl, { headers: setupFetchHeaders(GITHUB_API_TOKEN) });
|
|
102
|
-
|
|
103
|
-
if (treeResponse.ok) {
|
|
104
|
-
const treeData = await treeResponse.json();
|
|
105
|
-
const file = treeData.tree.find(item => item.path === filePath);
|
|
106
|
-
if (file) {
|
|
107
|
-
const fileContentResponse = await fetch(file.url);
|
|
108
|
-
const fileContentData = await fileContentResponse.json();
|
|
109
|
-
if (fileContentData.content) {
|
|
110
|
-
return Buffer.from(fileContentData.content, 'base64').toString('utf-8');
|
|
111
|
-
} else {
|
|
112
|
-
console.error('Error: fileContentData.content is undefined');
|
|
113
|
-
}
|
|
114
|
-
} else {
|
|
115
|
-
console.error(`Error: File ${filePath} not found in commit ${commitHash}`);
|
|
116
|
-
}
|
|
117
|
-
} else if (treeResponse.status === 403 && treeResponse.headers.get('X-RateLimit-Remaining') === '0') {
|
|
118
|
-
const resetTime = treeResponse.headers.get('X-RateLimit-Reset');
|
|
119
|
-
const waitTime = resetTime ? (resetTime - Math.floor(Date.now() / 1000)) * 1000 : 60000;
|
|
120
|
-
console.warn(`Rate limit exceeded. Retrying in ${waitTime / 1000} seconds...`);
|
|
121
|
-
await new Promise(resolve => setTimeout(resolve, waitTime));
|
|
122
|
-
retries++;
|
|
123
|
-
} else {
|
|
124
|
-
console.error(`Error: Failed to fetch tree from ${treeUrl}`);
|
|
125
|
-
break;
|
|
126
|
-
}
|
|
127
|
-
} catch (error) {
|
|
128
|
-
console.error(`Error fetching file content: ${error.message}`);
|
|
129
|
-
break;
|
|
130
|
-
}
|
|
131
|
-
}
|
|
132
|
-
|
|
133
|
-
return null;
|
|
134
|
-
}
|
|
135
|
-
|
|
136
61
|
function updateXTrefs(GITHUB_API_TOKEN, skipExisting) {
|
|
137
62
|
// Function to extend xtref objects with additional information, such as repository URL and directory information.
|
|
138
63
|
function extendXTrefs(config, xtrefs) {
|
|
139
64
|
if (config.specs[0].external_specs_repos) {
|
|
140
|
-
console.log("\n SPEC-UP-T: PLEASE NOTE: Your specs.json file is outdated (not your fault, we changed something). Use this one: https://github.com/trustoverip/spec-up-t-starter-pack/blob/main/spec-up-t-
|
|
65
|
+
console.log("\n SPEC-UP-T: PLEASE NOTE: Your specs.json file is outdated (not your fault, we changed something). Use this one: https://github.com/trustoverip/spec-up-t-starter-pack/blob/main/spec-up-t-boilerplate/specs.json or e-mail kor@dwarshuis.com for help. \n");
|
|
141
66
|
return;
|
|
142
67
|
}
|
|
143
68
|
|
|
@@ -174,6 +99,23 @@ function updateXTrefs(GITHUB_API_TOKEN, skipExisting) {
|
|
|
174
99
|
});
|
|
175
100
|
}
|
|
176
101
|
|
|
102
|
+
// Function to check if an xtref is in the markdown content
|
|
103
|
+
function isXTrefInMarkdown(xtref, markdownContent) {
|
|
104
|
+
// const regex = new RegExp(`\\[\\[xref:${xref.term}\\]\\]`, 'g');
|
|
105
|
+
const regex = new RegExp(`\\[\\[(?:x|t)ref:${xtref.term}\\]\\]`, 'g');
|
|
106
|
+
const result = regex.test(markdownContent);
|
|
107
|
+
return result;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// Function to process and clean up xref / tref strings found in the markdown file, returning an object with `externalSpec` and `term` properties.
|
|
111
|
+
function processXTref(xtref) {
|
|
112
|
+
let [externalSpec, term] = xtref.replace(/\[\[(?:xref|tref):/, '').replace(/\]\]/, '').trim().split(/,/, 2);
|
|
113
|
+
return {
|
|
114
|
+
externalSpec: externalSpec.trim(),
|
|
115
|
+
term: term.trim()
|
|
116
|
+
};
|
|
117
|
+
}
|
|
118
|
+
|
|
177
119
|
// Initialize an object to store all xtrefs.
|
|
178
120
|
let allXTrefs = { xtrefs: [] };
|
|
179
121
|
|
|
@@ -183,17 +125,10 @@ function updateXTrefs(GITHUB_API_TOKEN, skipExisting) {
|
|
|
183
125
|
allXTrefs = existingXTrefs && existingXTrefs.xtrefs ? existingXTrefs : { xtrefs: [] };
|
|
184
126
|
}
|
|
185
127
|
|
|
186
|
-
// Function to check if an xtref is in the markdown content
|
|
187
|
-
function isXTrefInMarkdown(xtref, markdownContent) {
|
|
188
|
-
// const regex = new RegExp(`\\[\\[xref:${xref.term}\\]\\]`, 'g');
|
|
189
|
-
const regex = new RegExp(`\\[\\[(?:x|t)ref:${xtref.term}\\]\\]`, 'g');
|
|
190
|
-
const result = regex.test(markdownContent);
|
|
191
|
-
return result;
|
|
192
|
-
}
|
|
193
|
-
|
|
194
128
|
// Collect all markdown content
|
|
195
129
|
let allMarkdownContent = '';
|
|
196
130
|
|
|
131
|
+
// Read all main repo Markdown files from a list of directories and concatenate their content into a single string.
|
|
197
132
|
specTermsDirectories.forEach(specDirectory => {
|
|
198
133
|
fs.readdirSync(specDirectory).forEach(file => {
|
|
199
134
|
if (file.endsWith('.md')) {
|
|
@@ -210,10 +145,13 @@ function updateXTrefs(GITHUB_API_TOKEN, skipExisting) {
|
|
|
210
145
|
|
|
211
146
|
// Add new entries if they are in the markdown
|
|
212
147
|
const regex = /\[\[(?:xref|tref):.*?\]\]/g;
|
|
148
|
+
|
|
149
|
+
// `regex` is the regular expression object, and `allMarkdownContent` is the string being tested. The test method returns a boolean value: true if the pattern is found within the string, and false otherwise.
|
|
213
150
|
if (regex.test(allMarkdownContent)) {
|
|
214
151
|
const xtrefs = allMarkdownContent.match(regex);
|
|
215
152
|
xtrefs.forEach(xtref => {
|
|
216
153
|
const newXTrefObj = processXTref(xtref);
|
|
154
|
+
// Ensure that newXTrefObj is only added to the xtrefs array if there isn't already an object with the same term and externalSpec properties. This helps maintain the uniqueness of entries in the array based on these two properties.
|
|
217
155
|
if (!allXTrefs.xtrefs.some(existingXTref =>
|
|
218
156
|
existingXTref.term === newXTrefObj.term && existingXTref.externalSpec === newXTrefObj.externalSpec)) {
|
|
219
157
|
allXTrefs.xtrefs.push(newXTrefObj);
|
|
@@ -221,48 +159,62 @@ function updateXTrefs(GITHUB_API_TOKEN, skipExisting) {
|
|
|
221
159
|
});
|
|
222
160
|
};
|
|
223
161
|
|
|
224
|
-
//
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
externalSpec: externalSpec.trim(),
|
|
229
|
-
term: term.trim()
|
|
230
|
-
};
|
|
231
|
-
}
|
|
162
|
+
// Example at this point:
|
|
163
|
+
// allXTrefs.xtrefs: [
|
|
164
|
+
// { externalSpec: 'kmg-1', term: 'authentic-chained-data-container' },
|
|
165
|
+
// ]
|
|
232
166
|
|
|
233
167
|
// Extend each xref with additional data and fetch commit information from GitHub.
|
|
234
168
|
extendXTrefs(config, allXTrefs.xtrefs);
|
|
235
169
|
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
170
|
+
// Example at this point:
|
|
171
|
+
// allXTrefs.xtrefs: [
|
|
172
|
+
// {
|
|
173
|
+
// externalSpec: 'kmg-1',
|
|
174
|
+
// term: 'authentic-chained-data-container',
|
|
175
|
+
// repoUrl: 'https://github.com/henkvancann/keri-main-glossary',
|
|
176
|
+
// terms_dir: 'spec/terms-definitions',
|
|
177
|
+
// owner: 'henkvancann',
|
|
178
|
+
// repo: 'keri-main-glossary',
|
|
179
|
+
// site: null
|
|
180
|
+
// }
|
|
181
|
+
// ]
|
|
182
|
+
|
|
183
|
+
(async () => {
|
|
184
|
+
try {
|
|
185
|
+
for (let xtref of allXTrefs.xtrefs) {
|
|
186
|
+
const fetchedData = await searchGitHubCode(GITHUB_API_TOKEN, xtref.term, xtref.owner, xtref.repo, xtref.terms_dir);
|
|
187
|
+
if (fetchedData.data.items.length === 0) {
|
|
188
|
+
xtref.commitHash = "not found";
|
|
189
|
+
xtref.content = "This term was not found in the external repository.";
|
|
190
|
+
} else {
|
|
191
|
+
fetchedData.data.items.forEach(item => {
|
|
192
|
+
// If the term is found according to the matchTerm function (in the first line, line should start with “[[def:), etc) add the commit hash and content to the xtref object
|
|
193
|
+
if (matchTerm(item.content, xtref.term)) {
|
|
194
|
+
xtref.commitHash = item.sha;
|
|
195
|
+
xtref.content = item.content;
|
|
196
|
+
console.log(`\n SPEC-UP-T: Match found for term: ${xtref.term} in ${xtref.externalSpec};`);
|
|
197
|
+
} else {
|
|
198
|
+
xtref.commitHash = "not found";
|
|
199
|
+
xtref.content = "This term was not found in the external repository.";
|
|
200
|
+
console.log(`\n SPEC-UP-T: No match found for term: ${xtref.term} in ${xtref.externalSpec};`);
|
|
201
|
+
}
|
|
202
|
+
});
|
|
249
203
|
}
|
|
250
|
-
}
|
|
251
|
-
}
|
|
204
|
+
}
|
|
252
205
|
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
fs.writeFileSync(outputPathJS, stringReadyForFileWrite, 'utf8');
|
|
259
|
-
fs.writeFileSync(outputPathJSTimeStamped, stringReadyForFileWrite, 'utf8');
|
|
206
|
+
const allXTrefsStr = JSON.stringify(allXTrefs, null, 2);
|
|
207
|
+
fs.writeFileSync(outputPathJSON, allXTrefsStr, 'utf8');
|
|
208
|
+
const stringReadyForFileWrite = `const allXTrefs = ${allXTrefsStr};`;
|
|
209
|
+
fs.writeFileSync(outputPathJS, stringReadyForFileWrite, 'utf8');
|
|
210
|
+
fs.writeFileSync(outputPathJSTimeStamped, stringReadyForFileWrite, 'utf8');
|
|
260
211
|
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
212
|
+
// Run the render function to update the HTML file
|
|
213
|
+
require('../index.js')({ nowatch: true });
|
|
214
|
+
} catch (error) {
|
|
215
|
+
console.error('An error occurred:', error);
|
|
216
|
+
}
|
|
217
|
+
})();
|
|
266
218
|
}
|
|
267
219
|
|
|
268
220
|
module.exports = {
|
package/src/init.js
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
require('dotenv').config();
|
|
2
|
+
const fs = require('fs-extra');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const { exec } = require('child_process');
|
|
5
|
+
const outputDir = path.join(process.cwd(), 'output');
|
|
6
|
+
const initFlagPath = path.join(outputDir, 'init.flag');
|
|
7
|
+
let config = fs.readJsonSync(path.join(process.cwd(), 'specs.json'));
|
|
8
|
+
|
|
9
|
+
const updateXTrefs = require('./get-xtrefs-data.js').updateXTrefs;
|
|
10
|
+
const { prepareTref } = require('./prepare-tref');
|
|
11
|
+
|
|
12
|
+
async function initialize() {
|
|
13
|
+
try {
|
|
14
|
+
// Check if the init script has already run
|
|
15
|
+
if (await fs.pathExists(initFlagPath)) {
|
|
16
|
+
console.log('Initialization has already been completed.');
|
|
17
|
+
return;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
// Place the init script here
|
|
21
|
+
|
|
22
|
+
updateXTrefs(process.env.GITHUB_API_TOKEN, false);
|
|
23
|
+
// prepareTref(path.join(config.specs[0].spec_directory, config.specs[0].spec_terms_directory));
|
|
24
|
+
|
|
25
|
+
// End of the init script
|
|
26
|
+
|
|
27
|
+
// Create the init flag file
|
|
28
|
+
await fs.writeFile(initFlagPath, 'Initialization completed.');
|
|
29
|
+
|
|
30
|
+
console.log('Initialization complete.');
|
|
31
|
+
} catch (error) {
|
|
32
|
+
console.error(`Initialization failed: ${error.message}`);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
module.exports = { initialize };
|
|
@@ -34,7 +34,7 @@ function checkKeysSync(object, expectedKeys, parentKey = '') {
|
|
|
34
34
|
} else if (typeof object === 'object') {
|
|
35
35
|
// If the key is missing from the object, log an error
|
|
36
36
|
if (!(key in object)) {
|
|
37
|
-
console.error(`\n SPEC-UP-T: Error: Missing key '${key}' in ${parentKey}\n We cannot guarantee that Spec-Up-T will work properly.\n Here is an example specs.json file:\n https://github.com/trustoverip/spec-up-t-starter-pack/blob/main/spec-up-t-
|
|
37
|
+
console.error(`\n SPEC-UP-T: Error: Missing key '${key}' in ${parentKey}\n We cannot guarantee that Spec-Up-T will work properly.\n Here is an example specs.json file:\n https://github.com/trustoverip/spec-up-t-starter-pack/blob/main/spec-up-t-boilerplate/specs.json` + "\n");
|
|
38
38
|
errorFound = true;
|
|
39
39
|
pauseForEnterSync(); // Pause synchronously to allow user to acknowledge the error
|
|
40
40
|
}
|
package/src/prepare-tref.js
CHANGED
|
@@ -1,3 +1,13 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @file prepare-tref.js
|
|
3
|
+
* @description This script contains functions to process and prepare term references (trefs) from markdown files.
|
|
4
|
+
* It includes functionality to read local xtref content from a JSON file and process markdown files in a directory recursively. This script inserts the content of the local xtref (in JSON) into the markdown files. It also inserts a note that this content is temporary and can be removed.
|
|
5
|
+
*
|
|
6
|
+
* @module prepareTref
|
|
7
|
+
* @requires fs
|
|
8
|
+
* @requires path
|
|
9
|
+
*/
|
|
10
|
+
|
|
1
11
|
const fs = require('fs');
|
|
2
12
|
const path = require('path');
|
|
3
13
|
|
|
@@ -70,15 +80,18 @@ function prepareTref(directory) {
|
|
|
70
80
|
|
|
71
81
|
The g flag ensures that all occurrences in the string are replaced.
|
|
72
82
|
*/
|
|
73
|
-
const defPart = /\[\[def: .*?\]\]:/g;
|
|
83
|
+
// const defPart = /\[\[def: .*?\]\]:/g;
|
|
84
|
+
const defPart = /\[\[def: ([^,]+),.*?\]\]/g;
|
|
85
|
+
|
|
74
86
|
localXTrefContent.content = localXTrefContent.content.replace(defPart, '');
|
|
75
87
|
|
|
76
|
-
fs.writeFileSync(itemPath, match[0] + '\n\n' + '<!-- This is a copy of the saved remote text. Remove it if you like. It is automatically (re)generated --><span class="transcluded-xref-term">transcluded xref</span>' + '\n\n~ Commit Hash: ' + localXTrefContent.commitHash + localXTrefContent.content, 'utf8');
|
|
88
|
+
fs.writeFileSync(itemPath, match[0] + '\n\n' + '<!-- This is a copy of the saved remote text. Remove it if you like. It is automatically (re)generated --><span class="transcluded-xref-term">transcluded xref</span>' + '\n\n~ Commit Hash: ' + localXTrefContent.commitHash + '\n\n\n' + localXTrefContent.content, 'utf8');
|
|
77
89
|
}
|
|
78
90
|
}
|
|
79
91
|
}
|
|
80
92
|
} catch (err) {
|
|
81
|
-
|
|
93
|
+
fs.writeFileSync(itemPath, match[0] + '\n\n' + '<!-- This is a copy of the saved remote text. Remove it if you like. It is automatically (re)generated --> Nothing found, so nothing to show.', 'utf8');
|
|
94
|
+
// console.error(`\n SPEC-UP-T: Error reading or writing file ${item.name}: ${err}` + "\n");
|
|
82
95
|
}
|
|
83
96
|
}
|
|
84
97
|
});
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
async function doesUrlExist(repo, termsDir) {
|
|
2
|
+
const url = `${repo}/blob/main/${termsDir}`;
|
|
3
|
+
try {
|
|
4
|
+
const response = await fetch(url, { method: 'HEAD' });
|
|
5
|
+
if (response.ok) {
|
|
6
|
+
return true;
|
|
7
|
+
} else {
|
|
8
|
+
return false;
|
|
9
|
+
}
|
|
10
|
+
} catch (error) {
|
|
11
|
+
console.log('Error:', error);
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
exports.doesUrlExist = doesUrlExist;
|