@redpanda-data/docs-extensions-and-macros 4.13.1 → 4.13.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/bin/doc-tools-mcp.js +16 -4
  2. package/bin/doc-tools.js +768 -2089
  3. package/bin/mcp-tools/generated-docs-review.js +2 -2
  4. package/bin/mcp-tools/mcp-validation.js +1 -1
  5. package/bin/mcp-tools/openapi.js +2 -2
  6. package/bin/mcp-tools/property-docs.js +18 -0
  7. package/bin/mcp-tools/rpcn-docs.js +28 -3
  8. package/cli-utils/antora-utils.js +53 -2
  9. package/cli-utils/dependencies.js +313 -0
  10. package/cli-utils/diff-utils.js +273 -0
  11. package/cli-utils/doc-tools-utils.js +54 -0
  12. package/extensions/algolia-indexer/generate-index.js +134 -102
  13. package/extensions/algolia-indexer/index.js +70 -38
  14. package/extensions/collect-bloblang-samples.js +2 -1
  15. package/extensions/generate-rp-connect-categories.js +125 -67
  16. package/extensions/generate-rp-connect-info.js +291 -137
  17. package/macros/rp-connect-components.js +34 -5
  18. package/package.json +4 -3
  19. package/tools/add-commercial-names.js +207 -0
  20. package/tools/bundle-openapi.js +1 -1
  21. package/tools/generate-cli-docs.js +6 -2
  22. package/tools/get-console-version.js +5 -0
  23. package/tools/get-redpanda-version.js +5 -0
  24. package/tools/property-extractor/compare-properties.js +3 -3
  25. package/tools/property-extractor/generate-handlebars-docs.js +14 -14
  26. package/tools/property-extractor/generate-pr-summary.js +46 -0
  27. package/tools/property-extractor/pr-summary-formatter.js +375 -0
  28. package/tools/redpanda-connect/README.adoc +403 -38
  29. package/tools/redpanda-connect/connector-binary-analyzer.js +588 -0
  30. package/tools/redpanda-connect/generate-rpcn-connector-docs.js +97 -34
  31. package/tools/redpanda-connect/parse-csv-connectors.js +1 -1
  32. package/tools/redpanda-connect/pr-summary-formatter.js +663 -0
  33. package/tools/redpanda-connect/report-delta.js +70 -2
  34. package/tools/redpanda-connect/rpcn-connector-docs-handler.js +1279 -0
  35. package/tools/redpanda-connect/templates/connector.hbs +38 -0
  36. package/tools/redpanda-connect/templates/intro.hbs +0 -20
  37. package/tools/redpanda-connect/update-nav.js +216 -0
@@ -0,0 +1,1279 @@
1
+ 'use strict'
2
+
3
+ const { spawnSync } = require('child_process')
4
+ const path = require('path')
5
+ const fs = require('fs')
6
+ const { findRepoRoot } = require('../../cli-utils/doc-tools-utils')
7
+ const { getAntoraValue, setAntoraValue } = require('../../cli-utils/antora-utils')
8
+ const fetchFromGithub = require('../fetch-from-github.js')
9
+ const { generateRpcnConnectorDocs } = require('./generate-rpcn-connector-docs.js')
10
+ const { getRpkConnectVersion, printDeltaReport } = require('./report-delta')
11
+
12
+ /**
13
+ * Cap description to two sentences
14
+ * @param {string} description - Full description text
15
+ * @returns {string} Description capped to two sentences
16
+ */
17
+ function capToTwoSentences (description) {
18
+ if (!description) return ''
19
+
20
+ const hasProblematicContent = (text) => {
21
+ return /```[\s\S]*?```/.test(text) ||
22
+ /`[^`]+`/.test(text) ||
23
+ /^[=#]+\s+.+$/m.test(text) ||
24
+ /\n/.test(text)
25
+ }
26
+
27
+ const abbreviations = [
28
+ /\bv\d+\.\d+(?:\.\d+)?/gi,
29
+ /\d+\.\d+/g,
30
+ /\be\.g\./gi,
31
+ /\bi\.e\./gi,
32
+ /\betc\./gi,
33
+ /\bvs\./gi,
34
+ /\bDr\./gi,
35
+ /\bMr\./gi,
36
+ /\bMs\./gi,
37
+ /\bMrs\./gi,
38
+ /\bSt\./gi,
39
+ /\bNo\./gi
40
+ ]
41
+
42
+ let normalized = description
43
+ const placeholders = []
44
+
45
+ abbreviations.forEach((abbrevRegex, idx) => {
46
+ normalized = normalized.replace(abbrevRegex, (match) => {
47
+ const placeholder = `__ABBREV${idx}_${placeholders.length}__`
48
+ placeholders.push({ placeholder, original: match })
49
+ return placeholder
50
+ })
51
+ })
52
+
53
+ normalized = normalized.replace(/\.{3,}/g, (match) => {
54
+ const placeholder = `__ELLIPSIS_${placeholders.length}__`
55
+ placeholders.push({ placeholder, original: match })
56
+ return placeholder
57
+ })
58
+
59
+ const sentenceRegex = /[^.!?]+[.!?]+(?:\s|$)/g
60
+ const sentences = normalized.match(sentenceRegex)
61
+
62
+ if (!sentences || sentences.length === 0) {
63
+ let result = normalized
64
+ placeholders.forEach(({ placeholder, original }) => {
65
+ result = result.replace(placeholder, original)
66
+ })
67
+ return result
68
+ }
69
+
70
+ let maxSentences = 2
71
+
72
+ if (sentences.length >= 2) {
73
+ let secondSentence = sentences[1]
74
+ placeholders.forEach(({ placeholder, original }) => {
75
+ secondSentence = secondSentence.replace(new RegExp(placeholder, 'g'), original)
76
+ })
77
+
78
+ if (hasProblematicContent(secondSentence)) {
79
+ maxSentences = 1
80
+ }
81
+ }
82
+
83
+ let result = sentences.slice(0, maxSentences).join('')
84
+
85
+ placeholders.forEach(({ placeholder, original }) => {
86
+ result = result.replace(new RegExp(placeholder, 'g'), original)
87
+ })
88
+
89
+ return result.trim()
90
+ }
91
+
92
+ /**
93
+ * Update whats-new.adoc with new release information
94
+ * @param {Object} params - Parameters
95
+ * @param {string} params.dataDir - Data directory path
96
+ * @param {string} params.oldVersion - Old version string
97
+ * @param {string} params.newVersion - New version string
98
+ * @param {Object} params.binaryAnalysis - Binary analysis data
99
+ */
100
+ function updateWhatsNew ({ dataDir, oldVersion, newVersion, binaryAnalysis }) {
101
+ try {
102
+ const whatsNewPath = path.join(findRepoRoot(), 'modules/get-started/pages/whats-new.adoc')
103
+ if (!fs.existsSync(whatsNewPath)) {
104
+ console.error(`Error: Unable to update release notes: 'whats-new.adoc' was not found at: ${whatsNewPath}`)
105
+ return
106
+ }
107
+
108
+ const diffPath = path.join(dataDir, `connect-diff-${oldVersion}_to_${newVersion}.json`)
109
+ if (!fs.existsSync(diffPath)) {
110
+ console.error(`Error: Unable to update release notes: The connector diff JSON was not found at: ${diffPath}`)
111
+ return
112
+ }
113
+
114
+ let diff
115
+ try {
116
+ diff = JSON.parse(fs.readFileSync(diffPath, 'utf8'))
117
+ } catch (jsonErr) {
118
+ console.error(`Error: Unable to parse connector diff JSON at ${diffPath}: ${jsonErr.message}`)
119
+ return
120
+ }
121
+
122
+ let whatsNew
123
+ try {
124
+ whatsNew = fs.readFileSync(whatsNewPath, 'utf8')
125
+ } catch (readErr) {
126
+ console.error(`Error: Unable to read whats-new.adoc at ${whatsNewPath}: ${readErr.message}`)
127
+ return
128
+ }
129
+
130
+ const versionRe = new RegExp(`^== Version ${diff.comparison.newVersion.replace(/[-.]/g, '\\$&')}(?:\\r?\\n|$)`, 'm')
131
+ const match = versionRe.exec(whatsNew)
132
+ let startIdx = match ? match.index : -1
133
+ let endIdx = -1
134
+ if (startIdx !== -1) {
135
+ const rest = whatsNew.slice(startIdx + 1)
136
+ const nextMatch = /^== Version /m.exec(rest)
137
+ endIdx = nextMatch ? startIdx + 1 + nextMatch.index : whatsNew.length
138
+ }
139
+
140
+ let releaseNotesLink = ''
141
+ if (diff.comparison && diff.comparison.newVersion) {
142
+ releaseNotesLink = `link:https://github.com/redpanda-data/connect/releases/tag/v${diff.comparison.newVersion}[See the full release notes^].\n\n`
143
+ }
144
+ let section = `\n== Version ${diff.comparison.newVersion}\n\n${releaseNotesLink}`
145
+
146
+ // Separate Bloblang and regular components
147
+ const bloblangComponents = []
148
+ const regularComponents = []
149
+
150
+ if (diff.details.newComponents && diff.details.newComponents.length) {
151
+ // Filter out cloud-only connectors - they don't go in whats-new.adoc
152
+ const nonCloudOnlyComponents = diff.details.newComponents.filter(comp => {
153
+ const isCloudOnly = diff.binaryAnalysis?.details?.cloudOnly?.some(cloudComp => {
154
+ return cloudComp.name === comp.name && cloudComp.type === comp.type
155
+ })
156
+ return !isCloudOnly
157
+ })
158
+
159
+ for (const comp of nonCloudOnlyComponents) {
160
+ if (comp.type === 'bloblang-functions' || comp.type === 'bloblang-methods') {
161
+ bloblangComponents.push(comp)
162
+ } else {
163
+ const isCgoOnly = diff.binaryAnalysis?.details?.cgoOnly?.some(cgo => {
164
+ return cgo.name === comp.name && cgo.type === comp.type
165
+ })
166
+
167
+ regularComponents.push({
168
+ ...comp,
169
+ requiresCgo: isCgoOnly
170
+ })
171
+ }
172
+ }
173
+ }
174
+
175
+ // Bloblang updates section
176
+ if (bloblangComponents.length > 0) {
177
+ section += '=== Bloblang updates\n\n'
178
+ section += 'This release adds the following new Bloblang capabilities:\n\n'
179
+
180
+ const byType = {}
181
+ for (const comp of bloblangComponents) {
182
+ if (!byType[comp.type]) byType[comp.type] = []
183
+ byType[comp.type].push(comp)
184
+ }
185
+
186
+ for (const [type, comps] of Object.entries(byType)) {
187
+ if (type === 'bloblang-functions') {
188
+ section += '* Functions:\n'
189
+ for (const comp of comps) {
190
+ section += `** xref:guides:bloblang/functions.adoc#${comp.name}[\`${comp.name}\`]`
191
+ if (comp.status && comp.status !== 'stable') section += ` (${comp.status})`
192
+ if (comp.description) {
193
+ section += `: ${capToTwoSentences(comp.description)}`
194
+ } else {
195
+ section += `\n+\n// TODO: Add description for ${comp.name} function`
196
+ }
197
+ section += '\n'
198
+ }
199
+ } else if (type === 'bloblang-methods') {
200
+ section += '* Methods:\n'
201
+ for (const comp of comps) {
202
+ section += `** xref:guides:bloblang/methods.adoc#${comp.name}[\`${comp.name}\`]`
203
+ if (comp.status && comp.status !== 'stable') section += ` (${comp.status})`
204
+ if (comp.description) {
205
+ section += `: ${capToTwoSentences(comp.description)}`
206
+ } else {
207
+ section += `\n+\n// TODO: Add description for ${comp.name} method`
208
+ }
209
+ section += '\n'
210
+ }
211
+ }
212
+ }
213
+ section += '\n'
214
+ }
215
+
216
+ // Component updates section
217
+ if (regularComponents.length > 0) {
218
+ section += '=== Component updates\n\n'
219
+ section += 'This release adds the following new components:\n\n'
220
+
221
+ section += '[cols="1m,1a,1a,3a"]\n'
222
+ section += '|===\n'
223
+ section += '|Component |Type |Status |Description\n\n'
224
+
225
+ for (const comp of regularComponents) {
226
+ const typeLabel = comp.type.charAt(0).toUpperCase() + comp.type.slice(1)
227
+ const statusLabel = comp.status || '-'
228
+ let desc = comp.summary || (comp.description ? capToTwoSentences(comp.description) : '// TODO: Add description')
229
+
230
+ if (comp.requiresCgo) {
231
+ const cgoNote = '\nNOTE: Requires a cgo-enabled binary. See the xref:install:index.adoc[installation guides] for details.'
232
+ desc = desc.startsWith('// TODO') ? cgoNote : `${desc}\n\n${cgoNote}`
233
+ }
234
+
235
+ const typePlural = comp.type.endsWith('s') ? comp.type : `${comp.type}s`
236
+ section += `|xref:components:${typePlural}/${comp.name}.adoc[${comp.name}]\n`
237
+ section += `|${typeLabel}\n`
238
+ section += `|${statusLabel}\n`
239
+ section += `|${desc}\n\n`
240
+ }
241
+
242
+ section += '|===\n\n'
243
+ }
244
+
245
+ // New fields section
246
+ if (diff.details.newFields && diff.details.newFields.length) {
247
+ const regularFields = diff.details.newFields.filter(field => {
248
+ const [type] = field.component.split(':')
249
+ return type !== 'bloblang-functions' && type !== 'bloblang-methods'
250
+ })
251
+
252
+ if (regularFields.length > 0) {
253
+ section += '\n=== New field support\n\n'
254
+ section += 'This release adds support for the following new fields:\n\n'
255
+ section += buildFieldsTable(regularFields, capToTwoSentences)
256
+ }
257
+ }
258
+
259
+ // Deprecated components section
260
+ if (diff.details.deprecatedComponents && diff.details.deprecatedComponents.length) {
261
+ section += '\n=== Deprecations\n\n'
262
+ section += 'The following components are now deprecated:\n\n'
263
+
264
+ section += '[cols="1m,1a,3a"]\n'
265
+ section += '|===\n'
266
+ section += '|Component |Type |Description\n\n'
267
+
268
+ for (const comp of diff.details.deprecatedComponents) {
269
+ const typeLabel = comp.type.charAt(0).toUpperCase() + comp.type.slice(1)
270
+ const desc = comp.description ? capToTwoSentences(comp.description) : '-'
271
+
272
+ if (comp.type === 'bloblang-functions') {
273
+ section += `|xref:guides:bloblang/functions.adoc#${comp.name}[${comp.name}]\n`
274
+ } else if (comp.type === 'bloblang-methods') {
275
+ section += `|xref:guides:bloblang/methods.adoc#${comp.name}[${comp.name}]\n`
276
+ } else {
277
+ section += `|xref:components:${comp.type}/${comp.name}.adoc[${comp.name}]\n`
278
+ }
279
+ section += `|${typeLabel}\n`
280
+ section += `|${desc}\n\n`
281
+ }
282
+
283
+ section += '|===\n\n'
284
+ }
285
+
286
+ // Deprecated fields section
287
+ if (diff.details.deprecatedFields && diff.details.deprecatedFields.length) {
288
+ const regularDeprecatedFields = diff.details.deprecatedFields.filter(field => {
289
+ const [type] = field.component.split(':')
290
+ return type !== 'bloblang-functions' && type !== 'bloblang-methods'
291
+ })
292
+
293
+ if (regularDeprecatedFields.length > 0) {
294
+ if (!diff.details.deprecatedComponents || diff.details.deprecatedComponents.length === 0) {
295
+ section += '\n=== Deprecations\n\n'
296
+ } else {
297
+ section += '\n'
298
+ }
299
+ section += 'The following fields are now deprecated:\n\n'
300
+ section += buildFieldsTable(regularDeprecatedFields, capToTwoSentences)
301
+ }
302
+ }
303
+
304
+ // Changed defaults section
305
+ if (diff.details.changedDefaults && diff.details.changedDefaults.length) {
306
+ const regularChangedDefaults = diff.details.changedDefaults.filter(change => {
307
+ const [type] = change.component.split(':')
308
+ return type !== 'bloblang-functions' && type !== 'bloblang-methods'
309
+ })
310
+
311
+ if (regularChangedDefaults.length > 0) {
312
+ section += '\n=== Default value changes\n\n'
313
+ section += 'This release includes the following default value changes:\n\n'
314
+ section += buildChangedDefaultsTable(regularChangedDefaults, capToTwoSentences)
315
+ }
316
+ }
317
+
318
+ // Update the file
319
+ let contentWithoutOldSection = whatsNew
320
+ if (startIdx !== -1) {
321
+ contentWithoutOldSection = whatsNew.slice(0, startIdx) + whatsNew.slice(endIdx)
322
+ }
323
+
324
+ const versionHeading = /^== Version /m
325
+ const firstMatch = versionHeading.exec(contentWithoutOldSection)
326
+ const insertIdx = firstMatch ? firstMatch.index : contentWithoutOldSection.length
327
+
328
+ const updated = contentWithoutOldSection.slice(0, insertIdx) + section + '\n' + contentWithoutOldSection.slice(insertIdx)
329
+
330
+ if (startIdx !== -1) {
331
+ console.log(`♻️ whats-new.adoc: replaced section for Version ${diff.comparison.newVersion}`)
332
+ } else {
333
+ console.log(`Done: whats-new.adoc updated with Version ${diff.comparison.newVersion}`)
334
+ }
335
+
336
+ fs.writeFileSync(whatsNewPath, updated, 'utf8')
337
+ } catch (err) {
338
+ console.error(`Error: Failed to update whats-new.adoc: ${err.message}`)
339
+ }
340
+ }
341
+
342
+ /**
343
+ * Build a fields table for whats-new.adoc
344
+ * @param {Array} fields - Field data
345
+ * @param {Function} capFn - Caption function
346
+ * @returns {string} AsciiDoc table
347
+ */
348
+ function buildFieldsTable (fields, capFn) {
349
+ const byField = {}
350
+ for (const field of fields) {
351
+ const [type, compName] = field.component.split(':')
352
+ if (!byField[field.field]) {
353
+ byField[field.field] = {
354
+ description: field.description,
355
+ components: []
356
+ }
357
+ }
358
+ byField[field.field].components.push({ type, name: compName })
359
+ }
360
+
361
+ let section = '[cols="1m,3,2a"]\n'
362
+ section += '|===\n'
363
+ section += '|Field |Description |Affected components\n\n'
364
+
365
+ for (const [fieldName, info] of Object.entries(byField)) {
366
+ const byType = {}
367
+ for (const comp of info.components) {
368
+ if (!byType[comp.type]) byType[comp.type] = []
369
+ byType[comp.type].push(comp.name)
370
+ }
371
+
372
+ let componentList = ''
373
+ for (const [type, names] of Object.entries(byType)) {
374
+ if (componentList) componentList += '\n\n'
375
+
376
+ const typeLabel = names.length === 1
377
+ ? type.charAt(0).toUpperCase() + type.slice(1)
378
+ : type.charAt(0).toUpperCase() + type.slice(1) + (type.endsWith('s') ? '' : 's')
379
+
380
+ componentList += `*${typeLabel}:*\n\n`
381
+ names.forEach(name => {
382
+ componentList += `* xref:components:${type}/${name}.adoc#${fieldName}[${name}]\n`
383
+ })
384
+ }
385
+
386
+ const desc = info.description ? capFn(info.description) : '// TODO: Add description'
387
+
388
+ section += `|${fieldName}\n`
389
+ section += `|${desc}\n`
390
+ section += `|${componentList}\n\n`
391
+ }
392
+
393
+ section += '|===\n\n'
394
+ return section
395
+ }
396
+
397
+ /**
398
+ * Build changed defaults table for whats-new.adoc
399
+ * @param {Array} changedDefaults - Changed defaults data
400
+ * @param {Function} capFn - Caption function
401
+ * @returns {string} AsciiDoc table
402
+ */
403
+ function buildChangedDefaultsTable (changedDefaults, capFn) {
404
+ const byFieldAndDefaults = {}
405
+ for (const change of changedDefaults) {
406
+ const [type, compName] = change.component.split(':')
407
+ const compositeKey = `${change.field}|${String(change.oldDefault)}|${String(change.newDefault)}`
408
+ if (!byFieldAndDefaults[compositeKey]) {
409
+ byFieldAndDefaults[compositeKey] = {
410
+ field: change.field,
411
+ oldDefault: change.oldDefault,
412
+ newDefault: change.newDefault,
413
+ description: change.description,
414
+ components: []
415
+ }
416
+ }
417
+ byFieldAndDefaults[compositeKey].components.push({ type, name: compName })
418
+ }
419
+
420
+ let section = '[cols="1m,1,1,3,2a"]\n'
421
+ section += '|===\n'
422
+ section += '|Field |Old default |New default |Description |Affected components\n\n'
423
+
424
+ for (const [, info] of Object.entries(byFieldAndDefaults)) {
425
+ const formatDefault = (val) => {
426
+ if (val === undefined || val === null) return 'none'
427
+ if (typeof val === 'string') return val
428
+ if (typeof val === 'number' || typeof val === 'boolean') return String(val)
429
+ return JSON.stringify(val)
430
+ }
431
+
432
+ const oldVal = formatDefault(info.oldDefault)
433
+ const newVal = formatDefault(info.newDefault)
434
+ const desc = info.description ? capFn(info.description) : '// TODO: Add description'
435
+
436
+ const byType = {}
437
+ for (const comp of info.components) {
438
+ if (!byType[comp.type]) byType[comp.type] = []
439
+ byType[comp.type].push(comp.name)
440
+ }
441
+
442
+ let componentList = ''
443
+ for (const [type, names] of Object.entries(byType)) {
444
+ if (componentList) componentList += '\n\n'
445
+
446
+ const typeLabel = names.length === 1
447
+ ? type.charAt(0).toUpperCase() + type.slice(1)
448
+ : type.charAt(0).toUpperCase() + type.slice(1) + (type.endsWith('s') ? '' : 's')
449
+
450
+ componentList += `*${typeLabel}:*\n\n`
451
+ names.forEach(name => {
452
+ componentList += `* xref:components:${type}/${name}.adoc#${info.field}[${name}]\n`
453
+ })
454
+ }
455
+
456
+ section += `|${info.field}\n`
457
+ section += `|${oldVal}\n`
458
+ section += `|${newVal}\n`
459
+ section += `|${desc}\n`
460
+ section += `|${componentList}\n\n`
461
+ }
462
+
463
+ section += '|===\n\n'
464
+ return section
465
+ }
466
+
467
+ /**
468
+ * Log a collapsed list of files
469
+ * @param {string} label - Label for the list
470
+ * @param {Array} filesArray - Array of file paths
471
+ * @param {number} maxToShow - Maximum items to show
472
+ */
473
+ function logCollapsed (label, filesArray, maxToShow = 10) {
474
+ console.log(` • ${label}: ${filesArray.length} total`)
475
+ const sample = filesArray.slice(0, maxToShow)
476
+ sample.forEach(fp => console.log(` – ${fp}`))
477
+ const remaining = filesArray.length - sample.length
478
+ if (remaining > 0) {
479
+ console.log(` … plus ${remaining} more`)
480
+ }
481
+ console.log('')
482
+ }
483
+
484
+ /**
485
+ * Main handler for rpcn-connector-docs command
486
+ * @param {Object} options - Command options
487
+ */
488
+ async function handleRpcnConnectorDocs (options) {
489
+ const dataDir = path.resolve(process.cwd(), options.dataDir)
490
+ fs.mkdirSync(dataDir, { recursive: true })
491
+
492
+ const timestamp = new Date().toISOString()
493
+
494
+ let newVersion
495
+ let dataFile
496
+ let binaryAnalysis = null
497
+ let draftsWritten = 0
498
+ let draftFiles = []
499
+ let needsAugmentation = false
500
+
501
+ if (options.fetchConnectors) {
502
+ try {
503
+ if (options.connectVersion) {
504
+ console.log(`Installing Redpanda Connect version ${options.connectVersion}...`)
505
+ const installResult = spawnSync('rpk', ['connect', 'install', '--connect-version', options.connectVersion, '--force'], {
506
+ stdio: 'inherit'
507
+ })
508
+ if (installResult.status !== 0) {
509
+ throw new Error(`Failed to install Connect version ${options.connectVersion}`)
510
+ }
511
+ console.log(`Done: Installed Redpanda Connect version ${options.connectVersion}`)
512
+ newVersion = options.connectVersion
513
+ } else {
514
+ newVersion = getRpkConnectVersion()
515
+ }
516
+ console.log(`Fetching connector data from Connect ${newVersion}...`)
517
+
518
+ const tmpFile = path.join(dataDir, `connect-${newVersion}.tmp.json`)
519
+ const finalFile = path.join(dataDir, `connect-${newVersion}.json`)
520
+
521
+ const fd = fs.openSync(tmpFile, 'w')
522
+ const r = spawnSync('rpk', ['connect', 'list', '--format', 'json-full'], { stdio: ['ignore', fd, 'inherit'] })
523
+ fs.closeSync(fd)
524
+
525
+ const rawJson = fs.readFileSync(tmpFile, 'utf8')
526
+ const parsed = JSON.parse(rawJson)
527
+ fs.writeFileSync(finalFile, JSON.stringify(parsed, null, 2))
528
+ fs.unlinkSync(tmpFile)
529
+ dataFile = finalFile
530
+ needsAugmentation = true
531
+ console.log(`Done: Fetched connector data for version ${newVersion}`)
532
+
533
+ // Fetch info.csv
534
+ try {
535
+ console.log(`Fetching info.csv for Connect v${newVersion}...`)
536
+ const csvFile = path.join(dataDir, `connect-info-${newVersion}.csv`)
537
+
538
+ if (!fs.existsSync(csvFile)) {
539
+ await fetchFromGithub(
540
+ 'redpanda-data',
541
+ 'connect',
542
+ 'internal/plugins/info.csv',
543
+ dataDir,
544
+ `connect-info-${newVersion}.csv`,
545
+ `v${newVersion}`
546
+ )
547
+ console.log(`Done: Fetched info.csv for version ${newVersion}`)
548
+ } else {
549
+ console.log(`✓ CSV already exists: connect-info-${newVersion}.csv`)
550
+ }
551
+ } catch (csvErr) {
552
+ console.warn(`Warning: Failed to fetch info.csv: ${csvErr.message}`)
553
+ }
554
+
555
+ // Fetch Bloblang examples
556
+ try {
557
+ console.log(`Fetching Bloblang playground examples for Connect v${newVersion}...`)
558
+ const examplesFile = path.join(dataDir, `bloblang-samples-${newVersion}.json`)
559
+
560
+ if (!fs.existsSync(examplesFile)) {
561
+ const tempExamplesDir = path.join(dataDir, `temp-playground-${newVersion}`)
562
+ await fetchFromGithub(
563
+ 'redpanda-data',
564
+ 'connect',
565
+ 'docs/guides/bloblang/playground',
566
+ tempExamplesDir,
567
+ null,
568
+ `v${newVersion}`
569
+ )
570
+
571
+ const yaml = require('js-yaml')
572
+ const bloblangSamples = {}
573
+ const files = fs.readdirSync(tempExamplesDir).filter(f => f.endsWith('.yaml') || f.endsWith('.yml'))
574
+
575
+ for (const file of files) {
576
+ try {
577
+ const content = fs.readFileSync(path.join(tempExamplesDir, file), 'utf8')
578
+ const parsedYaml = yaml.load(content)
579
+ if (parsedYaml.title && parsedYaml.input && parsedYaml.mapping) {
580
+ bloblangSamples[file] = parsedYaml
581
+ }
582
+ } catch (err) {
583
+ console.warn(`Warning: Failed to parse ${file}: ${err.message}`)
584
+ }
585
+ }
586
+
587
+ fs.writeFileSync(examplesFile, JSON.stringify(bloblangSamples, null, 2))
588
+ fs.rmSync(tempExamplesDir, { recursive: true, force: true })
589
+ console.log(`Done: Fetched ${Object.keys(bloblangSamples).length} Bloblang examples`)
590
+ } else {
591
+ console.log(`✓ Bloblang samples already exist: bloblang-samples-${newVersion}.json`)
592
+ }
593
+ } catch (examplesErr) {
594
+ console.warn(`Warning: Failed to fetch Bloblang examples: ${examplesErr.message}`)
595
+ }
596
+ } catch (err) {
597
+ console.error(`Error: Failed to fetch connectors: ${err.message}`)
598
+ process.exit(1)
599
+ }
600
+ } else {
601
+ const candidates = fs.readdirSync(dataDir).filter(f => /^connect-\d+\.\d+\.\d+\.json$/.test(f))
602
+ if (candidates.length === 0) {
603
+ console.error('Error: No connect-<version>.json found. Use --fetch-connectors.')
604
+ process.exit(1)
605
+ }
606
+ candidates.sort()
607
+ dataFile = path.join(dataDir, candidates[candidates.length - 1])
608
+ newVersion = candidates[candidates.length - 1].match(/connect-(\d+\.\d+\.\d+)\.json/)[1]
609
+ }
610
+
611
+ console.log('Generating connector partials...')
612
+ let partialsWritten, partialFiles
613
+
614
+ try {
615
+ const result = await generateRpcnConnectorDocs({
616
+ data: dataFile,
617
+ overrides: options.overrides,
618
+ template: options.templateMain,
619
+ templateIntro: options.templateIntro,
620
+ templateFields: options.templateFields,
621
+ templateExamples: options.templateExamples,
622
+ templateBloblang: options.templateBloblang,
623
+ writeFullDrafts: false,
624
+ includeBloblang: !!options.includeBloblang
625
+ })
626
+ partialsWritten = result.partialsWritten
627
+ partialFiles = result.partialFiles
628
+ } catch (err) {
629
+ console.error(`Error: Failed to generate partials: ${err.message}`)
630
+ process.exit(1)
631
+ }
632
+
633
+ let oldIndex = {}
634
+ let oldVersion = null
635
+ if (options.oldData && fs.existsSync(options.oldData)) {
636
+ oldIndex = JSON.parse(fs.readFileSync(options.oldData, 'utf8'))
637
+ const m = options.oldData.match(/connect-([\d.]+)\.json$/)
638
+ if (m) oldVersion = m[1]
639
+ } else {
640
+ const existingDataFiles = fs.readdirSync(dataDir)
641
+ .filter(f => /^connect-\d+\.\d+\.\d+\.json$/.test(f))
642
+ .filter(f => f !== path.basename(dataFile))
643
+ .sort()
644
+
645
+ if (existingDataFiles.length > 0) {
646
+ const oldFile = existingDataFiles[existingDataFiles.length - 1]
647
+ oldVersion = oldFile.match(/connect-(\d+\.\d+\.\d+)\.json/)[1]
648
+ const oldPath = path.join(dataDir, oldFile)
649
+ oldIndex = JSON.parse(fs.readFileSync(oldPath, 'utf8'))
650
+ console.log(`📋 Using old version data: ${oldFile}`)
651
+ } else {
652
+ oldVersion = getAntoraValue('asciidoc.attributes.latest-connect-version')
653
+ if (oldVersion) {
654
+ const oldPath = path.join(dataDir, `connect-${oldVersion}.json`)
655
+ if (fs.existsSync(oldPath)) {
656
+ oldIndex = JSON.parse(fs.readFileSync(oldPath, 'utf8'))
657
+ }
658
+ }
659
+ }
660
+ }
661
+
662
+ let newIndex = JSON.parse(fs.readFileSync(dataFile, 'utf8'))
663
+
664
+ // Save a clean copy of OSS data for binary analysis (before augmentation)
665
+ // This ensures the binary analyzer compares actual binaries, not augmented data
666
+ const cleanOssDataPath = path.join(dataDir, `._connect-${newVersion}-clean.json`)
667
+
668
+ // Strip augmentation fields to create clean data for comparison
669
+ const cleanData = JSON.parse(JSON.stringify(newIndex))
670
+ const connectorTypes = ['inputs', 'outputs', 'processors', 'caches', 'rate_limits', 'buffers', 'metrics', 'scanners', 'tracers']
671
+
672
+ for (const type of connectorTypes) {
673
+ if (Array.isArray(cleanData[type])) {
674
+ cleanData[type] = cleanData[type].filter(c => !c.cloudOnly) // Remove cloud-only connectors added by augmentation
675
+ cleanData[type].forEach(c => {
676
+ delete c.cloudSupported
677
+ delete c.requiresCgo
678
+ delete c.cloudOnly
679
+ })
680
+ }
681
+ }
682
+
683
+ fs.writeFileSync(cleanOssDataPath, JSON.stringify(cleanData, null, 2), 'utf8')
684
+
685
+ const versionsMatch = oldVersion && newVersion && oldVersion === newVersion
686
+ if (versionsMatch) {
687
+ console.log(`\n✓ Already at version ${newVersion}`)
688
+ console.log(' Skipping diff generation, but will run binary analysis.\n')
689
+ }
690
+
691
+ // Publish merged version
692
+ if (options.overrides && fs.existsSync(options.overrides)) {
693
+ try {
694
+ const { mergeOverrides, resolveReferences } = require('./generate-rpcn-connector-docs.js')
695
+
696
+ const mergedData = JSON.parse(JSON.stringify(newIndex))
697
+ const ovRaw = fs.readFileSync(options.overrides, 'utf8')
698
+ const ovObj = JSON.parse(ovRaw)
699
+ const resolvedOverrides = resolveReferences(ovObj, ovObj)
700
+ mergeOverrides(mergedData, resolvedOverrides)
701
+
702
+ const attachmentsRoot = path.resolve(process.cwd(), 'modules/components/attachments')
703
+ fs.mkdirSync(attachmentsRoot, { recursive: true })
704
+
705
+ const existingFiles = fs.readdirSync(attachmentsRoot)
706
+ .filter(f => /^connect-\d+\.\d+\.\d+\.json$/.test(f))
707
+ .sort()
708
+
709
+ for (const oldFile of existingFiles) {
710
+ const oldFilePath = path.join(attachmentsRoot, oldFile)
711
+ fs.unlinkSync(oldFilePath)
712
+ console.log(`🧹 Deleted old version: ${oldFile}`)
713
+ }
714
+
715
+ const destFile = path.join(attachmentsRoot, `connect-${newVersion}.json`)
716
+ fs.writeFileSync(destFile, JSON.stringify(mergedData, null, 2), 'utf8')
717
+ console.log(`Done: Published merged version to: ${path.relative(process.cwd(), destFile)}`)
718
+ } catch (err) {
719
+ console.error(`Error: Failed to publish merged version: ${err.message}`)
720
+ }
721
+ }
722
+
723
+ printDeltaReport(oldIndex, newIndex)
724
+
725
+ // Binary analysis
726
+ let oldBinaryAnalysis = null
727
+
728
+ if (oldVersion) {
729
+ const standalonePath = path.join(dataDir, `binary-analysis-${oldVersion}.json`)
730
+ if (fs.existsSync(standalonePath)) {
731
+ try {
732
+ oldBinaryAnalysis = JSON.parse(fs.readFileSync(standalonePath, 'utf8'))
733
+ console.log(`✓ Loaded old binary analysis from: binary-analysis-${oldVersion}.json`)
734
+ } catch (err) {
735
+ console.warn(`Warning: Failed to load ${standalonePath}: ${err.message}`)
736
+ }
737
+ }
738
+
739
+ if (!oldBinaryAnalysis) {
740
+ const diffFiles = fs.readdirSync(dataDir)
741
+ .filter(f => f.startsWith('connect-diff-') && f.endsWith(`_to_${oldVersion}.json`))
742
+ .sort()
743
+ .reverse()
744
+
745
+ for (const file of diffFiles) {
746
+ const diffPath = path.join(dataDir, file)
747
+ try {
748
+ const oldDiff = JSON.parse(fs.readFileSync(diffPath, 'utf8'))
749
+ if (oldDiff.binaryAnalysis) {
750
+ oldBinaryAnalysis = {
751
+ comparison: {
752
+ inCloud: oldDiff.binaryAnalysis.details?.cloudSupported || [],
753
+ notInCloud: oldDiff.binaryAnalysis.details?.selfHostedOnly || []
754
+ },
755
+ cgoOnly: oldDiff.binaryAnalysis.details?.cgoOnly || []
756
+ }
757
+ console.log(`✓ Loaded old binary analysis from: ${file}`)
758
+ break
759
+ }
760
+ } catch {
761
+ // Continue to next file
762
+ }
763
+ }
764
+ }
765
+ }
766
+
767
+ try {
768
+ console.log('\nAnalyzing connector binaries...')
769
+ const { analyzeAllBinaries } = require('./connector-binary-analyzer.js')
770
+
771
+ // Always use clean OSS data for comparison
772
+ // Temporarily rename the file so the analyzer finds it
773
+ const expectedPath = path.join(dataDir, `connect-${newVersion}.json`)
774
+ let tempRenamed = false
775
+
776
+ if (fs.existsSync(cleanOssDataPath)) {
777
+ if (fs.existsSync(expectedPath)) {
778
+ fs.renameSync(expectedPath, path.join(dataDir, `._connect-${newVersion}-augmented.json.tmp`))
779
+ tempRenamed = true
780
+ }
781
+ fs.copyFileSync(cleanOssDataPath, expectedPath)
782
+ }
783
+
784
+ const analysisOptions = {
785
+ skipCloud: false,
786
+ skipCgo: false,
787
+ cgoVersion: options.cgoVersion || null
788
+ }
789
+
790
+ binaryAnalysis = await analyzeAllBinaries(
791
+ newVersion,
792
+ options.cloudVersion || null,
793
+ dataDir,
794
+ analysisOptions
795
+ )
796
+
797
+ // Restore the augmented file
798
+ if (tempRenamed) {
799
+ const expectedPath = path.join(dataDir, `connect-${newVersion}.json`)
800
+ fs.unlinkSync(expectedPath)
801
+ fs.renameSync(path.join(dataDir, `._connect-${newVersion}-augmented.json.tmp`), expectedPath)
802
+ }
803
+
804
+ console.log('Done: Binary analysis complete:')
805
+ console.log(` • OSS version: ${binaryAnalysis.ossVersion}`)
806
+
807
+ if (binaryAnalysis.cloudVersion) {
808
+ console.log(` • Cloud version: ${binaryAnalysis.cloudVersion}`)
809
+ }
810
+
811
+ if (binaryAnalysis.comparison) {
812
+ console.log(` • Connectors in cloud: ${binaryAnalysis.comparison.inCloud.length}`)
813
+ console.log(` • Self-hosted only: ${binaryAnalysis.comparison.notInCloud.length}`)
814
+ if (binaryAnalysis.comparison.cloudOnly && binaryAnalysis.comparison.cloudOnly.length > 0) {
815
+ console.log(` • Cloud-only connectors: ${binaryAnalysis.comparison.cloudOnly.length}`)
816
+ }
817
+ }
818
+
819
+ if (binaryAnalysis.cgoOnly && binaryAnalysis.cgoOnly.length > 0) {
820
+ console.log(` • cgo-only connectors: ${binaryAnalysis.cgoOnly.length}`)
821
+ }
822
+ } catch (err) {
823
+ console.error(`Warning: Binary analysis failed: ${err.message}`)
824
+ console.error(' Continuing without binary analysis data...')
825
+ }
826
+
827
+ // Augment data file
828
+ if (needsAugmentation && binaryAnalysis) {
829
+ try {
830
+ console.log('\nAugmenting connector data with cloud/cgo fields...')
831
+
832
+ const connectorData = JSON.parse(fs.readFileSync(dataFile, 'utf8'))
833
+
834
+ const cloudSet = new Set(
835
+ (binaryAnalysis.comparison?.inCloud || []).map(c => `${c.type}:${c.name}`)
836
+ )
837
+ const cgoOnlySet = new Set(
838
+ (binaryAnalysis.cgoOnly || []).map(c => `${c.type}:${c.name}`)
839
+ )
840
+
841
+ let augmentedCount = 0
842
+ let addedCgoCount = 0
843
+ let addedCloudOnlyCount = 0
844
+
845
+ const connectorTypes = ['inputs', 'outputs', 'processors', 'caches', 'rate_limits',
846
+ 'buffers', 'metrics', 'scanners', 'tracers']
847
+
848
+ for (const type of connectorTypes) {
849
+ if (!Array.isArray(connectorData[type])) {
850
+ connectorData[type] = []
851
+ }
852
+
853
+ for (const connector of connectorData[type]) {
854
+ const key = `${type}:${connector.name}`
855
+ connector.cloudSupported = cloudSet.has(key)
856
+ connector.requiresCgo = cgoOnlySet.has(key)
857
+ augmentedCount++
858
+ }
859
+
860
+ if (binaryAnalysis.cgoOnly) {
861
+ for (const cgoConn of binaryAnalysis.cgoOnly) {
862
+ if (cgoConn.type === type) {
863
+ const exists = connectorData[type].some(c => c.name === cgoConn.name)
864
+ if (!exists) {
865
+ connectorData[type].push({
866
+ ...cgoConn,
867
+ type: cgoConn.type.replace(/s$/, ''),
868
+ cloudSupported: false,
869
+ requiresCgo: true
870
+ })
871
+ addedCgoCount++
872
+ }
873
+ }
874
+ }
875
+ }
876
+
877
+ if (binaryAnalysis.comparison?.cloudOnly) {
878
+ for (const cloudConn of binaryAnalysis.comparison.cloudOnly) {
879
+ if (cloudConn.type === type) {
880
+ const exists = connectorData[type].some(c => c.name === cloudConn.name)
881
+ if (!exists) {
882
+ connectorData[type].push({
883
+ ...cloudConn,
884
+ type: cloudConn.type.replace(/s$/, ''),
885
+ cloudSupported: true,
886
+ requiresCgo: false,
887
+ cloudOnly: true
888
+ })
889
+ addedCloudOnlyCount++
890
+ }
891
+ }
892
+ }
893
+ }
894
+ }
895
+
896
+ fs.writeFileSync(dataFile, JSON.stringify(connectorData, null, 2), 'utf8')
897
+ console.log(`Done: Augmented ${augmentedCount} connectors with cloud/cgo fields`)
898
+ if (addedCgoCount > 0) {
899
+ console.log(` • Added ${addedCgoCount} cgo-only connectors to data file`)
900
+ }
901
+ if (addedCloudOnlyCount > 0) {
902
+ console.log(` • Added ${addedCloudOnlyCount} cloud-only connectors to data file`)
903
+ }
904
+
905
+ // Keep only 2 most recent versions
906
+ const dataFiles = fs.readdirSync(dataDir)
907
+ .filter(f => /^connect-\d+\.\d+\.\d+\.json$/.test(f))
908
+ .sort()
909
+
910
+ while (dataFiles.length > 2) {
911
+ const oldestFile = dataFiles.shift()
912
+ const oldestPath = path.join(dataDir, oldestFile)
913
+ fs.unlinkSync(oldestPath)
914
+ console.log(`🧹 Deleted old version from docs-data: ${oldestFile}`)
915
+ }
916
+ } catch (err) {
917
+ console.error(`Warning: Failed to augment data file: ${err.message}`)
918
+ }
919
+ }
920
+
921
+ // Generate diff JSON
922
+ let diffJson = null
923
+ if (!oldVersion) {
924
+ console.warn('Warning: Skipping diff generation: oldVersion not available')
925
+ } else if (versionsMatch) {
926
+ console.log(`⏭️ Skipping diff generation: versions match (${oldVersion} === ${newVersion})`)
927
+ } else {
928
+ const { generateConnectorDiffJson } = require('./report-delta.js')
929
+ diffJson = generateConnectorDiffJson(
930
+ oldIndex,
931
+ newIndex,
932
+ {
933
+ oldVersion: oldVersion,
934
+ newVersion,
935
+ timestamp,
936
+ binaryAnalysis,
937
+ oldBinaryAnalysis
938
+ }
939
+ )
940
+
941
+ // Filter out components that already have documentation
942
+ const docRoots = {
943
+ pages: path.resolve(process.cwd(), 'modules/components/pages'),
944
+ partials: path.resolve(process.cwd(), 'modules/components/partials/components'),
945
+ cloudOnly: path.resolve(process.cwd(), 'modules/components/partials/components/cloud-only')
946
+ }
947
+
948
+ if (diffJson.details && diffJson.details.newComponents) {
949
+ const originalCount = diffJson.details.newComponents.length
950
+ diffJson.details.newComponents = diffJson.details.newComponents.filter(comp => {
951
+ const typePlural = comp.type.endsWith('s') ? comp.type : `${comp.type}s`
952
+ const relPath = path.join(typePlural, `${comp.name}.adoc`)
953
+ const docsExist = Object.values(docRoots).some(root =>
954
+ fs.existsSync(path.join(root, relPath))
955
+ )
956
+ return !docsExist
957
+ })
958
+ const filteredCount = originalCount - diffJson.details.newComponents.length
959
+ if (filteredCount > 0) {
960
+ console.log(` ℹ️ Filtered out ${filteredCount} components that already have documentation`)
961
+ }
962
+ // Update summary count
963
+ if (diffJson.summary) {
964
+ diffJson.summary.newComponents = diffJson.details.newComponents.length
965
+ }
966
+ }
967
+
968
+ // Add new cgo-only components
969
+ if (binaryAnalysis && binaryAnalysis.cgoOnly && binaryAnalysis.cgoOnly.length > 0) {
970
+ // Define roots for checking if docs already exist
971
+ const docRoots = {
972
+ pages: path.resolve(process.cwd(), 'modules/components/pages'),
973
+ partials: path.resolve(process.cwd(), 'modules/components/partials/components'),
974
+ cloudOnly: path.resolve(process.cwd(), 'modules/components/partials/components/cloud-only')
975
+ }
976
+
977
+ let newCgoComponents
978
+
979
+ if (oldBinaryAnalysis) {
980
+ const oldCgoSet = new Set((oldBinaryAnalysis.cgoOnly || []).map(c => `${c.type}:${c.name}`))
981
+ newCgoComponents = binaryAnalysis.cgoOnly.filter(cgoComp => {
982
+ const wasInOldOss = oldIndex[cgoComp.type]?.some(c => c.name === cgoComp.name)
983
+ const wasInOldCgo = oldCgoSet.has(`${cgoComp.type}:${cgoComp.name}`)
984
+
985
+ // Check if docs already exist
986
+ const typePlural = cgoComp.type.endsWith('s') ? cgoComp.type : `${cgoComp.type}s`
987
+ const relPath = path.join(typePlural, `${cgoComp.name}.adoc`)
988
+ const docsExist = Object.values(docRoots).some(root =>
989
+ fs.existsSync(path.join(root, relPath))
990
+ )
991
+
992
+ return !wasInOldOss && !wasInOldCgo && !docsExist
993
+ })
994
+ } else {
995
+ newCgoComponents = binaryAnalysis.cgoOnly.filter(cgoComp => {
996
+ const wasInOldOss = oldIndex[cgoComp.type]?.some(c => c.name === cgoComp.name)
997
+
998
+ // Check if docs already exist
999
+ const typePlural = cgoComp.type.endsWith('s') ? cgoComp.type : `${cgoComp.type}s`
1000
+ const relPath = path.join(typePlural, `${cgoComp.name}.adoc`)
1001
+ const docsExist = Object.values(docRoots).some(root =>
1002
+ fs.existsSync(path.join(root, relPath))
1003
+ )
1004
+
1005
+ return !wasInOldOss && !docsExist
1006
+ })
1007
+ if (newCgoComponents.length > 0) {
1008
+ console.log(` ℹ️ No old binary analysis found - treating ${newCgoComponents.length} cgo components not in old OSS data as new`)
1009
+ }
1010
+ }
1011
+
1012
+ if (newCgoComponents && newCgoComponents.length > 0) {
1013
+ console.log(` • Found ${newCgoComponents.length} new cgo-only components`)
1014
+ newCgoComponents.forEach(cgoComp => {
1015
+ const typeSingular = cgoComp.type.replace(/s$/, '')
1016
+ diffJson.details.newComponents.push({
1017
+ name: cgoComp.name,
1018
+ type: typeSingular,
1019
+ status: cgoComp.status || '',
1020
+ version: '',
1021
+ description: cgoComp.description || '',
1022
+ summary: cgoComp.summary || ''
1023
+ })
1024
+ })
1025
+ }
1026
+ }
1027
+
1028
+ const diffPath = path.join(dataDir, `connect-diff-${oldVersion}_to_${newVersion}.json`)
1029
+ fs.writeFileSync(diffPath, JSON.stringify(diffJson, null, 2), 'utf8')
1030
+ console.log(`Done: Connector diff JSON written to: ${diffPath}`)
1031
+ if (diffJson.binaryAnalysis) {
1032
+ console.log(` • Includes binary analysis: OSS ${diffJson.binaryAnalysis.versions.oss}, Cloud ${diffJson.binaryAnalysis.versions.cloud || 'N/A'}, cgo ${diffJson.binaryAnalysis.versions.cgo || 'N/A'}`)
1033
+ }
1034
+
1035
+ // Cleanup old diff files
1036
+ try {
1037
+ const oldDiffFiles = fs.readdirSync(dataDir)
1038
+ .filter(f => f.startsWith('connect-diff-') && f.endsWith('.json') && f !== path.basename(diffPath))
1039
+
1040
+ if (oldDiffFiles.length > 0) {
1041
+ console.log(`🧹 Cleaning up ${oldDiffFiles.length} old diff files...`)
1042
+ oldDiffFiles.forEach(f => {
1043
+ const oldDiffPath = path.join(dataDir, f)
1044
+ fs.unlinkSync(oldDiffPath)
1045
+ console.log(` • Deleted: ${f}`)
1046
+ })
1047
+ }
1048
+ } catch (err) {
1049
+ console.warn(`Warning: Failed to clean up old diff files: ${err.message}`)
1050
+ }
1051
+ }
1052
+
1053
+ // Draft missing connectors
1054
+ if (options.draftMissing) {
1055
+ console.log('\nDrafting missing connectors…')
1056
+ try {
1057
+ const rawData = fs.readFileSync(dataFile, 'utf8')
1058
+ const dataObj = JSON.parse(rawData)
1059
+
1060
+ const validConnectors = []
1061
+ const types = ['inputs', 'outputs', 'processors', 'caches', 'rate_limits', 'buffers', 'metrics', 'scanners', 'tracers']
1062
+ types.forEach(type => {
1063
+ if (Array.isArray(dataObj[type])) {
1064
+ dataObj[type].forEach(connector => {
1065
+ if (connector.name) {
1066
+ validConnectors.push({
1067
+ name: connector.name,
1068
+ type: type.replace(/s$/, ''),
1069
+ status: connector.status || connector.type || 'stable'
1070
+ })
1071
+ }
1072
+ })
1073
+ }
1074
+ })
1075
+
1076
+ // Add cgo-only connectors
1077
+ if (binaryAnalysis && binaryAnalysis.cgoOnly) {
1078
+ binaryAnalysis.cgoOnly.forEach(cgoConn => {
1079
+ const exists = validConnectors.some(c =>
1080
+ c.name === cgoConn.name && c.type === cgoConn.type.replace(/s$/, '')
1081
+ )
1082
+ if (!exists) {
1083
+ validConnectors.push({
1084
+ name: cgoConn.name,
1085
+ type: cgoConn.type.replace(/s$/, ''),
1086
+ status: cgoConn.status || 'stable',
1087
+ requiresCgo: true
1088
+ })
1089
+ }
1090
+ })
1091
+ }
1092
+
1093
+ // Add cloud-only connectors
1094
+ if (binaryAnalysis && binaryAnalysis.comparison?.cloudOnly) {
1095
+ binaryAnalysis.comparison.cloudOnly.forEach(cloudConn => {
1096
+ const exists = validConnectors.some(c =>
1097
+ c.name === cloudConn.name && c.type === cloudConn.type.replace(/s$/, '')
1098
+ )
1099
+ if (!exists) {
1100
+ validConnectors.push({
1101
+ name: cloudConn.name,
1102
+ type: cloudConn.type.replace(/s$/, ''),
1103
+ status: cloudConn.status || 'stable',
1104
+ cloudOnly: true
1105
+ })
1106
+ }
1107
+ })
1108
+ }
1109
+
1110
+ const roots = {
1111
+ pages: path.resolve(process.cwd(), 'modules/components/pages'),
1112
+ partials: path.resolve(process.cwd(), 'modules/components/partials/components'),
1113
+ cloudOnly: path.resolve(process.cwd(), 'modules/components/partials/components/cloud-only')
1114
+ }
1115
+
1116
+ const allMissing = validConnectors.filter(({ name, type }) => {
1117
+ const relPath = path.join(`${type}s`, `${name}.adoc`)
1118
+ const existsInAny = Object.values(roots).some(root =>
1119
+ fs.existsSync(path.join(root, relPath))
1120
+ )
1121
+ return !existsInAny
1122
+ })
1123
+
1124
+ const missingConnectors = allMissing.filter(c =>
1125
+ !c.name.includes('sql_driver') &&
1126
+ c.status !== 'deprecated'
1127
+ )
1128
+
1129
+ if (missingConnectors.length === 0) {
1130
+ console.log('Done: All connectors (excluding sql_drivers) already have docs—nothing to draft.')
1131
+ } else {
1132
+ console.log(`Docs missing for ${missingConnectors.length} connectors:`)
1133
+ missingConnectors.forEach(({ name, type }) => {
1134
+ console.log(` • ${type}/${name}`)
1135
+ })
1136
+ console.log('')
1137
+
1138
+ const filteredDataObj = {}
1139
+
1140
+ for (const [key, arr] of Object.entries(dataObj)) {
1141
+ if (!Array.isArray(arr)) {
1142
+ filteredDataObj[key] = arr
1143
+ continue
1144
+ }
1145
+ filteredDataObj[key] = arr.filter(component =>
1146
+ missingConnectors.some(
1147
+ m => m.name === component.name && `${m.type}s` === key
1148
+ )
1149
+ )
1150
+ }
1151
+
1152
+ const cgoMissing = missingConnectors.filter(m => m.requiresCgo)
1153
+ if (cgoMissing.length > 0 && binaryAnalysis && binaryAnalysis.cgoIndex) {
1154
+ console.log('Fetching cgo-only connector schemas for drafting...')
1155
+ cgoMissing.forEach(cgo => {
1156
+ const typeKey = `${cgo.type}s`
1157
+ if (binaryAnalysis.cgoIndex[typeKey]) {
1158
+ const cgoConnector = binaryAnalysis.cgoIndex[typeKey].find(c => c.name === cgo.name)
1159
+ if (cgoConnector) {
1160
+ if (!filteredDataObj[typeKey]) filteredDataObj[typeKey] = []
1161
+ filteredDataObj[typeKey].push(cgoConnector)
1162
+ console.log(` • Added cgo connector schema: ${cgo.type}/${cgo.name}`)
1163
+ }
1164
+ }
1165
+ })
1166
+ }
1167
+
1168
+ const cloudMissing = missingConnectors.filter(m => m.cloudOnly)
1169
+ if (cloudMissing.length > 0 && binaryAnalysis && binaryAnalysis.cloudIndex) {
1170
+ console.log('Fetching cloud-only connector schemas for drafting...')
1171
+ cloudMissing.forEach(cloud => {
1172
+ const typeKey = `${cloud.type}s`
1173
+ if (binaryAnalysis.cloudIndex[typeKey]) {
1174
+ const cloudConnector = binaryAnalysis.cloudIndex[typeKey].find(c => c.name === cloud.name)
1175
+ if (cloudConnector) {
1176
+ if (!filteredDataObj[typeKey]) filteredDataObj[typeKey] = []
1177
+ filteredDataObj[typeKey].push(cloudConnector)
1178
+ console.log(` • Added cloud-only connector schema: ${cloud.type}/${cloud.name}`)
1179
+ }
1180
+ }
1181
+ })
1182
+ }
1183
+
1184
+ const tempDataPath = path.join(dataDir, '._filtered_connect_data.json')
1185
+ fs.writeFileSync(tempDataPath, JSON.stringify(filteredDataObj, null, 2), 'utf8')
1186
+
1187
+ const draftResult = await generateRpcnConnectorDocs({
1188
+ data: tempDataPath,
1189
+ overrides: options.overrides,
1190
+ template: options.templateMain,
1191
+ templateFields: options.templateFields,
1192
+ templateExamples: options.templateExamples,
1193
+ templateIntro: options.templateIntro,
1194
+ writeFullDrafts: true,
1195
+ cgoOnly: binaryAnalysis?.cgoOnly || [],
1196
+ cloudOnly: binaryAnalysis?.comparison?.cloudOnly || []
1197
+ })
1198
+
1199
+ fs.unlinkSync(tempDataPath)
1200
+ draftsWritten = draftResult.draftsWritten
1201
+ draftFiles = draftResult.draftFiles
1202
+ }
1203
+ } catch (err) {
1204
+ console.error(`Error: Could not draft missing: ${err.message}`)
1205
+ process.exit(1)
1206
+ }
1207
+ }
1208
+
1209
+ // Update nav.adoc if drafts were generated
1210
+ if (draftFiles && draftFiles.length > 0) {
1211
+ try {
1212
+ const { updateNavFromDrafts } = require('./update-nav.js')
1213
+ const navResult = updateNavFromDrafts(draftFiles)
1214
+
1215
+ if (navResult.updated > 0) {
1216
+ console.log(`\nDone: Updated nav.adoc: added ${navResult.updated} connector${navResult.updated !== 1 ? 's' : ''}`)
1217
+ navResult.updates.forEach(u => {
1218
+ console.log(` • ${u.type}/${u.name}`)
1219
+ })
1220
+ }
1221
+
1222
+ if (navResult.skippedCount > 0) {
1223
+ console.log(`\nℹ️ Skipped ${navResult.skippedCount} connector${navResult.skippedCount !== 1 ? 's' : ''}:`)
1224
+ navResult.skipped.forEach(s => {
1225
+ console.log(` • ${s.type}/${s.name} (${s.reason})`)
1226
+ })
1227
+ }
1228
+ } catch (err) {
1229
+ console.error(`Warning: Failed to update nav.adoc: ${err.message}`)
1230
+ }
1231
+ }
1232
+
1233
+ // Generate PR summary
1234
+ try {
1235
+ const { printPRSummary } = require('./pr-summary-formatter.js')
1236
+ printPRSummary(diffJson, binaryAnalysis, draftFiles)
1237
+ } catch (err) {
1238
+ console.error(`Warning: Failed to generate PR summary: ${err.message}`)
1239
+ }
1240
+
1241
+ const wrote = setAntoraValue('asciidoc.attributes.latest-connect-version', newVersion)
1242
+ if (wrote) {
1243
+ console.log(`Done: Updated Antora version: ${newVersion}`)
1244
+ }
1245
+
1246
+ console.log('Generation Report:')
1247
+ console.log(` • Partial files: ${partialsWritten}`)
1248
+ const fieldsPartials = partialFiles.filter(fp => fp.includes('/fields/'))
1249
+ const examplesPartials = partialFiles.filter(fp => fp.includes('/examples/'))
1250
+
1251
+ logCollapsed('Fields partials', fieldsPartials, 10)
1252
+ logCollapsed('Examples partials', examplesPartials, 10)
1253
+
1254
+ if (options.draftMissing) {
1255
+ console.log(` • Full drafts: ${draftsWritten}`)
1256
+ const draftFilePaths = draftFiles.map(df => typeof df === 'string' ? df : df.path)
1257
+ logCollapsed('Draft files', draftFilePaths, 5)
1258
+ }
1259
+
1260
+ // Update whats-new.adoc
1261
+ if (options.updateWhatsNew) {
1262
+ if (!oldVersion) {
1263
+ console.warn('Warning: Skipping whats-new update: oldVersion not available')
1264
+ } else {
1265
+ updateWhatsNew({ dataDir, oldVersion, newVersion, binaryAnalysis })
1266
+ }
1267
+ }
1268
+
1269
+ console.log('\n📄 Summary:')
1270
+ console.log(` • Run time: ${timestamp}`)
1271
+ console.log(` • Version used: ${newVersion}`)
1272
+ process.exit(0)
1273
+ }
1274
+
1275
+ module.exports = {
1276
+ handleRpcnConnectorDocs,
1277
+ updateWhatsNew,
1278
+ capToTwoSentences
1279
+ }