@redpanda-data/docs-extensions-and-macros 4.13.3 → 4.13.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/doc-tools-mcp.js +3 -0
- package/bin/doc-tools.js +3 -0
- package/cli-utils/octokit-client.js +36 -0
- package/extensions/generate-rp-connect-info.js +3 -5
- package/package.json +2 -1
- package/tools/cloud-regions/generate-cloud-regions.js +2 -2
- package/tools/fetch-from-github.js +4 -17
- package/tools/get-console-version.js +2 -7
- package/tools/get-redpanda-version.js +2 -7
- package/tools/redpanda-connect/connector-binary-analyzer.js +1 -11
- package/tools/redpanda-connect/helpers/buildConfigYaml.js +8 -3
- package/tools/redpanda-connect/helpers/renderObjectField.js +6 -1
- package/tools/redpanda-connect/pr-summary-formatter.js +9 -6
- package/tools/redpanda-connect/rpcn-connector-docs-handler.js +177 -8
package/bin/doc-tools-mcp.js
CHANGED
|
@@ -15,6 +15,9 @@
|
|
|
15
15
|
* - Telemetry: Usage tracking for adoption metrics
|
|
16
16
|
*/
|
|
17
17
|
|
|
18
|
+
// Load environment variables from .env file if it exists
|
|
19
|
+
require('dotenv').config();
|
|
20
|
+
|
|
18
21
|
const fs = require('fs');
|
|
19
22
|
const path = require('path');
|
|
20
23
|
const { Server } = require('@modelcontextprotocol/sdk/server/index.js');
|
package/bin/doc-tools.js
CHANGED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const { Octokit } = require('@octokit/rest')
|
|
4
|
+
const { getGitHubToken } = require('./github-token')
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Shared Octokit client instance for GitHub API access
|
|
8
|
+
* Configured with optional authentication and retry logic
|
|
9
|
+
*
|
|
10
|
+
* This singleton instance is shared across all doc-tools modules to:
|
|
11
|
+
* - Avoid redundant initialization
|
|
12
|
+
* - Share rate limit tracking
|
|
13
|
+
* - Centralize GitHub API configuration
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
// Get authentication token from environment
|
|
17
|
+
const token = getGitHubToken()
|
|
18
|
+
|
|
19
|
+
// Configure Octokit options
|
|
20
|
+
const octokitOptions = {
|
|
21
|
+
userAgent: 'redpanda-docs-tools',
|
|
22
|
+
retry: {
|
|
23
|
+
enabled: true,
|
|
24
|
+
retries: 3
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// Only add auth if token is available
|
|
29
|
+
if (token) {
|
|
30
|
+
octokitOptions.auth = token
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// Create singleton instance
|
|
34
|
+
const octokit = new Octokit(octokitOptions)
|
|
35
|
+
|
|
36
|
+
module.exports = octokit
|
|
@@ -25,11 +25,9 @@ module.exports.register = function ({ config }) {
|
|
|
25
25
|
// Use csvpath (legacy) or csvPath
|
|
26
26
|
const localCsvPath = csvpath || null
|
|
27
27
|
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
const token = getGitHubToken()
|
|
32
|
-
return token ? new Octokit({ auth: token }) : new Octokit()
|
|
28
|
+
function loadOctokit () {
|
|
29
|
+
// Use shared Octokit client
|
|
30
|
+
return require('../cli-utils/octokit-client')
|
|
33
31
|
}
|
|
34
32
|
|
|
35
33
|
// Use 'on' and return the promise so Antora waits for async completion
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@redpanda-data/docs-extensions-and-macros",
|
|
3
|
-
"version": "4.13.
|
|
3
|
+
"version": "4.13.5",
|
|
4
4
|
"description": "Antora extensions and macros developed for Redpanda documentation.",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"antora",
|
|
@@ -103,6 +103,7 @@
|
|
|
103
103
|
"chalk": "4.1.2",
|
|
104
104
|
"cheerio": "^1.1.2",
|
|
105
105
|
"commander": "^14.0.0",
|
|
106
|
+
"dotenv": "^16.6.1",
|
|
106
107
|
"glob": "^11.0.0",
|
|
107
108
|
"gulp": "^4.0.2",
|
|
108
109
|
"gulp-connect": "^5.7.0",
|
|
@@ -70,8 +70,8 @@ function displayClusterType(ct) {
|
|
|
70
70
|
*/
|
|
71
71
|
async function fetchYaml({ owner, repo, path, ref = 'main', token }) {
|
|
72
72
|
try {
|
|
73
|
-
|
|
74
|
-
const octokit =
|
|
73
|
+
// Use shared Octokit client
|
|
74
|
+
const octokit = require('../../cli-utils/octokit-client');
|
|
75
75
|
|
|
76
76
|
console.log(`[cloud-regions] INFO: Fetching ${owner}/${repo}/${path}@${ref} via GitHub API`);
|
|
77
77
|
|
|
@@ -1,23 +1,10 @@
|
|
|
1
1
|
const fs = require('fs');
|
|
2
2
|
const path = require('path');
|
|
3
3
|
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
octokitInstance = process.env.VBOT_GITHUB_API_TOKEN
|
|
9
|
-
? new Octokit({
|
|
10
|
-
auth: process.env.VBOT_GITHUB_API_TOKEN,
|
|
11
|
-
})
|
|
12
|
-
: new Octokit();
|
|
13
|
-
|
|
14
|
-
if (!process.env.VBOT_GITHUB_API_TOKEN) {
|
|
15
|
-
console.info(
|
|
16
|
-
'No GitHub token found (VBOT_GITHUB_API_TOKEN).'
|
|
17
|
-
);
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
|
-
return octokitInstance;
|
|
4
|
+
// Use shared Octokit client
|
|
5
|
+
function loadOctokit() {
|
|
6
|
+
const octokit = require('../cli-utils/octokit-client');
|
|
7
|
+
return octokit;
|
|
21
8
|
}
|
|
22
9
|
|
|
23
10
|
async function saveFile(content, saveDir, filename) {
|
|
@@ -23,13 +23,8 @@ module.exports = async function getConsoleVersion({ beta = false, fromAntora = f
|
|
|
23
23
|
useBeta = getPrereleaseFromAntora();
|
|
24
24
|
}
|
|
25
25
|
|
|
26
|
-
//
|
|
27
|
-
const
|
|
28
|
-
const { Octokit } = await import('@octokit/rest');
|
|
29
|
-
const token = getGitHubToken();
|
|
30
|
-
const octokit = token
|
|
31
|
-
? new Octokit({ auth: token })
|
|
32
|
-
: new Octokit();
|
|
26
|
+
// Use shared Octokit client
|
|
27
|
+
const octokit = require('../cli-utils/octokit-client');
|
|
33
28
|
|
|
34
29
|
// Fetch latest release info
|
|
35
30
|
let data;
|
|
@@ -19,13 +19,8 @@ module.exports = async function getRedpandaVersion({ beta = false, fromAntora =
|
|
|
19
19
|
useBeta = getPrereleaseFromAntora();
|
|
20
20
|
}
|
|
21
21
|
|
|
22
|
-
//
|
|
23
|
-
const
|
|
24
|
-
const { Octokit } = await import('@octokit/rest');
|
|
25
|
-
const token = getGitHubToken();
|
|
26
|
-
const octokit = token
|
|
27
|
-
? new Octokit({ auth: token })
|
|
28
|
-
: new Octokit();
|
|
22
|
+
// Use shared Octokit client
|
|
23
|
+
const octokit = require('../cli-utils/octokit-client');
|
|
29
24
|
|
|
30
25
|
// Fetch version data
|
|
31
26
|
let data;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
const
|
|
1
|
+
const octokit = require('../../cli-utils/octokit-client');
|
|
2
2
|
const { execSync, spawnSync } = require('child_process');
|
|
3
3
|
const fs = require('fs');
|
|
4
4
|
const path = require('path');
|
|
@@ -13,16 +13,6 @@ const https = require('https');
|
|
|
13
13
|
* - Which connectors are self-hosted only
|
|
14
14
|
*/
|
|
15
15
|
|
|
16
|
-
// Initialize Octokit with optional authentication
|
|
17
|
-
const octokit = new Octokit({
|
|
18
|
-
auth: process.env.GITHUB_TOKEN,
|
|
19
|
-
userAgent: 'redpanda-docs-tools',
|
|
20
|
-
retry: {
|
|
21
|
-
enabled: true,
|
|
22
|
-
retries: 3
|
|
23
|
-
}
|
|
24
|
-
});
|
|
25
|
-
|
|
26
16
|
const REPO_OWNER = 'redpanda-data';
|
|
27
17
|
const REPO_NAME = 'connect';
|
|
28
18
|
|
|
@@ -36,11 +36,16 @@ module.exports = function buildConfigYaml(type, connectorName, children, include
|
|
|
36
36
|
return; // skip deprecated fields
|
|
37
37
|
}
|
|
38
38
|
if (!includeAdvanced && field.is_advanced) {
|
|
39
|
-
return; // skip advanced fields in
|
|
39
|
+
return; // skip advanced fields in "common" mode
|
|
40
40
|
}
|
|
41
41
|
|
|
42
|
-
if
|
|
43
|
-
|
|
42
|
+
// Check if this is an array-of-objects (e.g., client_certs[])
|
|
43
|
+
// These should render as empty arrays, not expanded object structures
|
|
44
|
+
if (field.kind === 'array' && field.type === 'object' && Array.isArray(field.children)) {
|
|
45
|
+
// Render as array leaf (e.g., "client_certs: []")
|
|
46
|
+
lines.push(renderLeafField(field, baseIndent));
|
|
47
|
+
} else if (field.type === 'object' && Array.isArray(field.children)) {
|
|
48
|
+
// Render nested object (plain object, not array)
|
|
44
49
|
const nestedLines = renderObjectField(field, baseIndent);
|
|
45
50
|
lines.push(...nestedLines);
|
|
46
51
|
} else {
|
|
@@ -28,7 +28,12 @@ module.exports = function renderObjectField(field, indentLevel) {
|
|
|
28
28
|
if (child.is_deprecated) {
|
|
29
29
|
return; // skip entirely
|
|
30
30
|
}
|
|
31
|
-
if
|
|
31
|
+
// Check if this is an array-of-objects (e.g., client_certs[])
|
|
32
|
+
// These should render as empty arrays, not expanded object structures
|
|
33
|
+
if (child.kind === 'array' && child.type === 'object' && Array.isArray(child.children)) {
|
|
34
|
+
// Render as array leaf (e.g., "client_certs: []")
|
|
35
|
+
lines.push(renderLeafField(child, childIndent));
|
|
36
|
+
} else if (Array.isArray(child.children) && child.children.length > 0) {
|
|
32
37
|
// Nested object → recurse
|
|
33
38
|
lines.push(...renderObjectField(child, childIndent));
|
|
34
39
|
} else {
|
|
@@ -397,8 +397,9 @@ function generatePRSummary(diffData, binaryAnalysis = null, draftedConnectors =
|
|
|
397
397
|
lines.push('');
|
|
398
398
|
cloudSupportedNew.forEach(c => {
|
|
399
399
|
lines.push(`- **${c.name}** (${c.type}, ${c.status})`);
|
|
400
|
-
|
|
401
|
-
|
|
400
|
+
const desc = c.summary || c.description;
|
|
401
|
+
if (desc) {
|
|
402
|
+
const shortDesc = truncateToSentence(desc, 2);
|
|
402
403
|
lines.push(` - ${shortDesc}`);
|
|
403
404
|
}
|
|
404
405
|
});
|
|
@@ -410,8 +411,9 @@ function generatePRSummary(diffData, binaryAnalysis = null, draftedConnectors =
|
|
|
410
411
|
lines.push('');
|
|
411
412
|
selfHostedOnlyNew.forEach(c => {
|
|
412
413
|
lines.push(`- **${c.name}** (${c.type}, ${c.status})`);
|
|
413
|
-
|
|
414
|
-
|
|
414
|
+
const desc = c.summary || c.description;
|
|
415
|
+
if (desc) {
|
|
416
|
+
const shortDesc = truncateToSentence(desc, 2);
|
|
415
417
|
lines.push(` - ${shortDesc}`);
|
|
416
418
|
}
|
|
417
419
|
});
|
|
@@ -421,8 +423,9 @@ function generatePRSummary(diffData, binaryAnalysis = null, draftedConnectors =
|
|
|
421
423
|
// No cloud support info, just list all
|
|
422
424
|
diffData.details.newComponents.forEach(c => {
|
|
423
425
|
lines.push(`- **${c.name}** (${c.type}, ${c.status})`);
|
|
424
|
-
|
|
425
|
-
|
|
426
|
+
const desc = c.summary || c.description;
|
|
427
|
+
if (desc) {
|
|
428
|
+
const shortDesc = truncateToSentence(desc, 2);
|
|
426
429
|
lines.push(` - ${shortDesc}`);
|
|
427
430
|
}
|
|
428
431
|
});
|
|
@@ -189,8 +189,9 @@ function updateWhatsNew ({ dataDir, oldVersion, newVersion, binaryAnalysis }) {
|
|
|
189
189
|
for (const comp of comps) {
|
|
190
190
|
section += `** xref:guides:bloblang/functions.adoc#${comp.name}[\`${comp.name}\`]`
|
|
191
191
|
if (comp.status && comp.status !== 'stable') section += ` (${comp.status})`
|
|
192
|
-
|
|
193
|
-
|
|
192
|
+
const desc = comp.summary || comp.description
|
|
193
|
+
if (desc) {
|
|
194
|
+
section += `: ${capToTwoSentences(desc)}`
|
|
194
195
|
} else {
|
|
195
196
|
section += `\n+\n// TODO: Add description for ${comp.name} function`
|
|
196
197
|
}
|
|
@@ -201,8 +202,9 @@ function updateWhatsNew ({ dataDir, oldVersion, newVersion, binaryAnalysis }) {
|
|
|
201
202
|
for (const comp of comps) {
|
|
202
203
|
section += `** xref:guides:bloblang/methods.adoc#${comp.name}[\`${comp.name}\`]`
|
|
203
204
|
if (comp.status && comp.status !== 'stable') section += ` (${comp.status})`
|
|
204
|
-
|
|
205
|
-
|
|
205
|
+
const desc = comp.summary || comp.description
|
|
206
|
+
if (desc) {
|
|
207
|
+
section += `: ${capToTwoSentences(desc)}`
|
|
206
208
|
} else {
|
|
207
209
|
section += `\n+\n// TODO: Add description for ${comp.name} method`
|
|
208
210
|
}
|
|
@@ -267,7 +269,8 @@ function updateWhatsNew ({ dataDir, oldVersion, newVersion, binaryAnalysis }) {
|
|
|
267
269
|
|
|
268
270
|
for (const comp of diff.details.deprecatedComponents) {
|
|
269
271
|
const typeLabel = comp.type.charAt(0).toUpperCase() + comp.type.slice(1)
|
|
270
|
-
const
|
|
272
|
+
const descText = comp.summary || comp.description
|
|
273
|
+
const desc = descText ? capToTwoSentences(descText) : '-'
|
|
271
274
|
|
|
272
275
|
if (comp.type === 'bloblang-functions') {
|
|
273
276
|
section += `|xref:guides:bloblang/functions.adoc#${comp.name}[${comp.name}]\n`
|
|
@@ -913,6 +916,9 @@ async function handleRpcnConnectorDocs (options) {
|
|
|
913
916
|
fs.unlinkSync(oldestPath)
|
|
914
917
|
console.log(`🧹 Deleted old version from docs-data: ${oldestFile}`)
|
|
915
918
|
}
|
|
919
|
+
|
|
920
|
+
// Reload newIndex after augmentation so diff generation uses augmented data
|
|
921
|
+
newIndex = JSON.parse(fs.readFileSync(dataFile, 'utf8'))
|
|
916
922
|
} catch (err) {
|
|
917
923
|
console.error(`Warning: Failed to augment data file: ${err.message}`)
|
|
918
924
|
}
|
|
@@ -1066,7 +1072,9 @@ async function handleRpcnConnectorDocs (options) {
|
|
|
1066
1072
|
validConnectors.push({
|
|
1067
1073
|
name: connector.name,
|
|
1068
1074
|
type: type.replace(/s$/, ''),
|
|
1069
|
-
status: connector.status || connector.type || 'stable'
|
|
1075
|
+
status: connector.status || connector.type || 'stable',
|
|
1076
|
+
cloudOnly: connector.cloudOnly === true,
|
|
1077
|
+
requiresCgo: connector.requiresCgo === true
|
|
1070
1078
|
})
|
|
1071
1079
|
}
|
|
1072
1080
|
})
|
|
@@ -1113,14 +1121,175 @@ async function handleRpcnConnectorDocs (options) {
|
|
|
1113
1121
|
cloudOnly: path.resolve(process.cwd(), 'modules/components/partials/components/cloud-only')
|
|
1114
1122
|
}
|
|
1115
1123
|
|
|
1116
|
-
|
|
1124
|
+
// Build a set of cloud-supported connectors (inCloud + cloudOnly, excluding self-hosted-only)
|
|
1125
|
+
const cloudSupportedSet = new Set()
|
|
1126
|
+
if (binaryAnalysis?.comparison) {
|
|
1127
|
+
// inCloud = available in both OSS and Cloud
|
|
1128
|
+
binaryAnalysis.comparison.inCloud?.forEach(c => {
|
|
1129
|
+
cloudSupportedSet.add(`${c.type}:${c.name}`)
|
|
1130
|
+
})
|
|
1131
|
+
// cloudOnly = only available in Cloud (not in OSS)
|
|
1132
|
+
binaryAnalysis.comparison.cloudOnly?.forEach(c => {
|
|
1133
|
+
cloudSupportedSet.add(`${c.type}:${c.name}`)
|
|
1134
|
+
})
|
|
1135
|
+
} else {
|
|
1136
|
+
// Fallback when binary analysis is unavailable:
|
|
1137
|
+
// Check all connectors that have cloudSupported flag or assume all non-deprecated are cloud-supported
|
|
1138
|
+
console.log(' ℹ️ Binary analysis unavailable - checking all non-deprecated connectors for cloud-docs')
|
|
1139
|
+
const types = ['inputs', 'outputs', 'processors', 'caches', 'rate_limits', 'buffers', 'metrics', 'scanners', 'tracers']
|
|
1140
|
+
types.forEach(type => {
|
|
1141
|
+
if (Array.isArray(dataObj[type])) {
|
|
1142
|
+
dataObj[type].forEach(connector => {
|
|
1143
|
+
// Include if cloudSupported is explicitly true, or if it's null/undefined and not deprecated
|
|
1144
|
+
const isCloudSupported = connector.cloudSupported === true ||
|
|
1145
|
+
(connector.cloudSupported == null && connector.status !== 'deprecated')
|
|
1146
|
+
if (isCloudSupported && connector.name) {
|
|
1147
|
+
// Store type as plural to match binary analysis format
|
|
1148
|
+
cloudSupportedSet.add(`${type}:${connector.name}`)
|
|
1149
|
+
}
|
|
1150
|
+
})
|
|
1151
|
+
}
|
|
1152
|
+
})
|
|
1153
|
+
}
|
|
1154
|
+
|
|
1155
|
+
// Check for missing connector documentation in rp-connect-docs
|
|
1156
|
+
const allMissing = validConnectors.filter(({ name, type, cloudOnly }) => {
|
|
1117
1157
|
const relPath = path.join(`${type}s`, `${name}.adoc`)
|
|
1118
|
-
|
|
1158
|
+
|
|
1159
|
+
// For cloud-only connectors, ONLY check the cloud-only directory
|
|
1160
|
+
if (cloudOnly) {
|
|
1161
|
+
return !fs.existsSync(path.join(roots.cloudOnly, relPath))
|
|
1162
|
+
}
|
|
1163
|
+
|
|
1164
|
+
// For regular connectors, check pages and partials (not cloud-only)
|
|
1165
|
+
const existsInAny = [roots.pages, roots.partials].some(root =>
|
|
1119
1166
|
fs.existsSync(path.join(root, relPath))
|
|
1120
1167
|
)
|
|
1121
1168
|
return !existsInAny
|
|
1122
1169
|
})
|
|
1123
1170
|
|
|
1171
|
+
// Check for cloud-supported connectors missing from cloud-docs repo (via GitHub API)
|
|
1172
|
+
const missingFromCloudDocs = []
|
|
1173
|
+
const cloudDocsErrors = []
|
|
1174
|
+
if (cloudSupportedSet.size > 0 && options.checkCloudDocs !== false) {
|
|
1175
|
+
console.log('\n ℹ️ Checking cloud-docs repository for missing connector pages...')
|
|
1176
|
+
|
|
1177
|
+
// Use shared Octokit instance
|
|
1178
|
+
const octokit = require('../../cli-utils/octokit-client')
|
|
1179
|
+
|
|
1180
|
+
try {
|
|
1181
|
+
// Check each cloud-supported connector
|
|
1182
|
+
// Filter to only check actual connector/component types that need individual pages
|
|
1183
|
+
const connectorTypes = ['inputs', 'outputs', 'processors', 'caches', 'buffers', 'scanners', 'metrics', 'tracers']
|
|
1184
|
+
|
|
1185
|
+
for (const connectorKey of cloudSupportedSet) {
|
|
1186
|
+
const [type, name] = connectorKey.split(':')
|
|
1187
|
+
|
|
1188
|
+
// Skip non-connector types (config, bloblang-functions, bloblang-methods, rate-limits)
|
|
1189
|
+
if (!connectorTypes.includes(type)) {
|
|
1190
|
+
continue
|
|
1191
|
+
}
|
|
1192
|
+
|
|
1193
|
+
// Skip deprecated connectors - they don't need cloud-docs pages
|
|
1194
|
+
if (Array.isArray(dataObj[type])) {
|
|
1195
|
+
const connector = dataObj[type].find(c => c.name === name)
|
|
1196
|
+
if (connector && connector.status === 'deprecated') {
|
|
1197
|
+
continue
|
|
1198
|
+
}
|
|
1199
|
+
}
|
|
1200
|
+
|
|
1201
|
+
const cloudDocsPath = `modules/develop/pages/connect/components/${type}/${name}.adoc`
|
|
1202
|
+
|
|
1203
|
+
try {
|
|
1204
|
+
await octokit.repos.getContent({
|
|
1205
|
+
owner: 'redpanda-data',
|
|
1206
|
+
repo: 'cloud-docs',
|
|
1207
|
+
path: cloudDocsPath,
|
|
1208
|
+
ref: 'main'
|
|
1209
|
+
})
|
|
1210
|
+
// File exists, no action needed
|
|
1211
|
+
} catch (error) {
|
|
1212
|
+
if (error.status === 404) {
|
|
1213
|
+
// File doesn't exist in cloud-docs
|
|
1214
|
+
missingFromCloudDocs.push({ type, name, path: cloudDocsPath })
|
|
1215
|
+
} else {
|
|
1216
|
+
// Non-404 error (auth, rate-limit, network, etc.)
|
|
1217
|
+
// Try fallback: check raw URL without authentication
|
|
1218
|
+
const rawUrl = `https://raw.githubusercontent.com/redpanda-data/cloud-docs/main/${cloudDocsPath}`
|
|
1219
|
+
try {
|
|
1220
|
+
const https = require('https')
|
|
1221
|
+
const { URL } = require('url')
|
|
1222
|
+
const parsedUrl = new URL(rawUrl)
|
|
1223
|
+
|
|
1224
|
+
await new Promise((resolve, reject) => {
|
|
1225
|
+
const req = https.request({
|
|
1226
|
+
hostname: parsedUrl.hostname,
|
|
1227
|
+
path: parsedUrl.pathname,
|
|
1228
|
+
method: 'HEAD',
|
|
1229
|
+
timeout: 5000
|
|
1230
|
+
}, (res) => {
|
|
1231
|
+
if (res.statusCode === 200) {
|
|
1232
|
+
resolve() // File exists
|
|
1233
|
+
} else if (res.statusCode === 404) {
|
|
1234
|
+
reject(new Error('404'))
|
|
1235
|
+
} else {
|
|
1236
|
+
reject(new Error(`Status ${res.statusCode}`))
|
|
1237
|
+
}
|
|
1238
|
+
})
|
|
1239
|
+
req.on('error', reject)
|
|
1240
|
+
req.on('timeout', () => {
|
|
1241
|
+
req.destroy()
|
|
1242
|
+
reject(new Error('Timeout'))
|
|
1243
|
+
})
|
|
1244
|
+
req.end()
|
|
1245
|
+
})
|
|
1246
|
+
// Fallback succeeded - file exists, no action needed
|
|
1247
|
+
} catch (fallbackError) {
|
|
1248
|
+
if (fallbackError.message === '404') {
|
|
1249
|
+
// Confirmed missing via fallback
|
|
1250
|
+
missingFromCloudDocs.push({ type, name, path: cloudDocsPath })
|
|
1251
|
+
} else {
|
|
1252
|
+
// Both API and fallback failed
|
|
1253
|
+
cloudDocsErrors.push({
|
|
1254
|
+
type,
|
|
1255
|
+
name,
|
|
1256
|
+
path: cloudDocsPath,
|
|
1257
|
+
status: error.status || 'unknown',
|
|
1258
|
+
message: `API: ${error.message}; Fallback: ${fallbackError.message}`
|
|
1259
|
+
})
|
|
1260
|
+
}
|
|
1261
|
+
}
|
|
1262
|
+
}
|
|
1263
|
+
}
|
|
1264
|
+
}
|
|
1265
|
+
|
|
1266
|
+
// Report results
|
|
1267
|
+
if (cloudDocsErrors.length > 0) {
|
|
1268
|
+
console.log(` ⚠️ Encountered ${cloudDocsErrors.length} error(s) while checking cloud-docs (check inconclusive):`)
|
|
1269
|
+
cloudDocsErrors.forEach(({ type, name, status, message }) => {
|
|
1270
|
+
console.log(` • ${type}/${name} - Status ${status}: ${message}`)
|
|
1271
|
+
})
|
|
1272
|
+
console.log(` ℹ️ Please resolve these errors (e.g., check GITHUB_TOKEN or VBOT_GITHUB_API_TOKEN, API rate limits, network connectivity)`)
|
|
1273
|
+
if (missingFromCloudDocs.length > 0) {
|
|
1274
|
+
console.log(` ℹ️ Additionally, ${missingFromCloudDocs.length} connector(s) confirmed missing from cloud-docs:`)
|
|
1275
|
+
missingFromCloudDocs.forEach(({ type, name }) => {
|
|
1276
|
+
console.log(` • ${type}/${name}`)
|
|
1277
|
+
})
|
|
1278
|
+
}
|
|
1279
|
+
} else if (missingFromCloudDocs.length > 0) {
|
|
1280
|
+
console.log(` ⚠️ Found ${missingFromCloudDocs.length} cloud-supported connector(s) missing from cloud-docs:`)
|
|
1281
|
+
missingFromCloudDocs.forEach(({ type, name }) => {
|
|
1282
|
+
console.log(` • ${type}/${name}`)
|
|
1283
|
+
})
|
|
1284
|
+
console.log(` ℹ️ These connectors need pages added to https://github.com/redpanda-data/cloud-docs`)
|
|
1285
|
+
} else {
|
|
1286
|
+
console.log(` ✓ All cloud-supported connectors have pages in cloud-docs`)
|
|
1287
|
+
}
|
|
1288
|
+
} catch (error) {
|
|
1289
|
+
console.log(` ⚠️ Could not check cloud-docs: ${error.message}`)
|
|
1290
|
+
}
|
|
1291
|
+
}
|
|
1292
|
+
|
|
1124
1293
|
const missingConnectors = allMissing.filter(c =>
|
|
1125
1294
|
!c.name.includes('sql_driver') &&
|
|
1126
1295
|
c.status !== 'deprecated'
|