@redpanda-data/docs-extensions-and-macros 4.13.1 → 4.13.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/bin/doc-tools-mcp.js +15 -3
  2. package/bin/doc-tools.js +767 -2088
  3. package/bin/mcp-tools/property-docs.js +18 -0
  4. package/bin/mcp-tools/rpcn-docs.js +28 -3
  5. package/cli-utils/antora-utils.js +53 -2
  6. package/cli-utils/dependencies.js +313 -0
  7. package/cli-utils/diff-utils.js +273 -0
  8. package/cli-utils/doc-tools-utils.js +54 -0
  9. package/extensions/algolia-indexer/generate-index.js +134 -102
  10. package/extensions/algolia-indexer/index.js +70 -38
  11. package/extensions/collect-bloblang-samples.js +2 -1
  12. package/extensions/generate-rp-connect-categories.js +126 -67
  13. package/extensions/generate-rp-connect-info.js +291 -137
  14. package/macros/rp-connect-components.js +34 -5
  15. package/package.json +4 -3
  16. package/tools/add-commercial-names.js +207 -0
  17. package/tools/generate-cli-docs.js +6 -2
  18. package/tools/get-console-version.js +5 -0
  19. package/tools/get-redpanda-version.js +5 -0
  20. package/tools/property-extractor/compare-properties.js +3 -3
  21. package/tools/property-extractor/generate-handlebars-docs.js +14 -14
  22. package/tools/property-extractor/generate-pr-summary.js +46 -0
  23. package/tools/property-extractor/pr-summary-formatter.js +375 -0
  24. package/tools/redpanda-connect/README.adoc +403 -38
  25. package/tools/redpanda-connect/connector-binary-analyzer.js +588 -0
  26. package/tools/redpanda-connect/generate-rpcn-connector-docs.js +97 -34
  27. package/tools/redpanda-connect/parse-csv-connectors.js +1 -1
  28. package/tools/redpanda-connect/pr-summary-formatter.js +601 -0
  29. package/tools/redpanda-connect/report-delta.js +69 -2
  30. package/tools/redpanda-connect/rpcn-connector-docs-handler.js +1180 -0
  31. package/tools/redpanda-connect/templates/connector.hbs +38 -0
  32. package/tools/redpanda-connect/templates/intro.hbs +0 -20
  33. package/tools/redpanda-connect/update-nav.js +205 -0
@@ -0,0 +1,1180 @@
1
+ 'use strict'
2
+
3
+ const { spawnSync } = require('child_process')
4
+ const path = require('path')
5
+ const fs = require('fs')
6
+ const { findRepoRoot } = require('../../cli-utils/doc-tools-utils')
7
+ const { getAntoraValue, setAntoraValue } = require('../../cli-utils/antora-utils')
8
+ const fetchFromGithub = require('../fetch-from-github.js')
9
+ const { generateRpcnConnectorDocs } = require('./generate-rpcn-connector-docs.js')
10
+ const { getRpkConnectVersion, printDeltaReport } = require('./report-delta')
11
+
12
+ /**
13
+ * Cap description to two sentences
14
+ * @param {string} description - Full description text
15
+ * @returns {string} Description capped to two sentences
16
+ */
17
+ function capToTwoSentences (description) {
18
+ if (!description) return ''
19
+
20
+ const hasProblematicContent = (text) => {
21
+ return /```[\s\S]*?```/.test(text) ||
22
+ /`[^`]+`/.test(text) ||
23
+ /^[=#]+\s+.+$/m.test(text) ||
24
+ /\n/.test(text)
25
+ }
26
+
27
+ const abbreviations = [
28
+ /\bv\d+\.\d+(?:\.\d+)?/gi,
29
+ /\d+\.\d+/g,
30
+ /\be\.g\./gi,
31
+ /\bi\.e\./gi,
32
+ /\betc\./gi,
33
+ /\bvs\./gi,
34
+ /\bDr\./gi,
35
+ /\bMr\./gi,
36
+ /\bMs\./gi,
37
+ /\bMrs\./gi,
38
+ /\bSt\./gi,
39
+ /\bNo\./gi
40
+ ]
41
+
42
+ let normalized = description
43
+ const placeholders = []
44
+
45
+ abbreviations.forEach((abbrevRegex, idx) => {
46
+ normalized = normalized.replace(abbrevRegex, (match) => {
47
+ const placeholder = `__ABBREV${idx}_${placeholders.length}__`
48
+ placeholders.push({ placeholder, original: match })
49
+ return placeholder
50
+ })
51
+ })
52
+
53
+ normalized = normalized.replace(/\.{3,}/g, (match) => {
54
+ const placeholder = `__ELLIPSIS_${placeholders.length}__`
55
+ placeholders.push({ placeholder, original: match })
56
+ return placeholder
57
+ })
58
+
59
+ const sentenceRegex = /[^.!?]+[.!?]+(?:\s|$)/g
60
+ const sentences = normalized.match(sentenceRegex)
61
+
62
+ if (!sentences || sentences.length === 0) {
63
+ let result = normalized
64
+ placeholders.forEach(({ placeholder, original }) => {
65
+ result = result.replace(placeholder, original)
66
+ })
67
+ return result
68
+ }
69
+
70
+ let maxSentences = 2
71
+
72
+ if (sentences.length >= 2) {
73
+ let secondSentence = sentences[1]
74
+ placeholders.forEach(({ placeholder, original }) => {
75
+ secondSentence = secondSentence.replace(new RegExp(placeholder, 'g'), original)
76
+ })
77
+
78
+ if (hasProblematicContent(secondSentence)) {
79
+ maxSentences = 1
80
+ }
81
+ }
82
+
83
+ let result = sentences.slice(0, maxSentences).join('')
84
+
85
+ placeholders.forEach(({ placeholder, original }) => {
86
+ result = result.replace(new RegExp(placeholder, 'g'), original)
87
+ })
88
+
89
+ return result.trim()
90
+ }
91
+
92
+ /**
93
+ * Update whats-new.adoc with new release information
94
+ * @param {Object} params - Parameters
95
+ * @param {string} params.dataDir - Data directory path
96
+ * @param {string} params.oldVersion - Old version string
97
+ * @param {string} params.newVersion - New version string
98
+ * @param {Object} params.binaryAnalysis - Binary analysis data
99
+ */
100
+ function updateWhatsNew ({ dataDir, oldVersion, newVersion, binaryAnalysis }) {
101
+ try {
102
+ const whatsNewPath = path.join(findRepoRoot(), 'modules/get-started/pages/whats-new.adoc')
103
+ if (!fs.existsSync(whatsNewPath)) {
104
+ console.error(`Error: Unable to update release notes: 'whats-new.adoc' was not found at: ${whatsNewPath}`)
105
+ return
106
+ }
107
+
108
+ const diffPath = path.join(dataDir, `connect-diff-${oldVersion}_to_${newVersion}.json`)
109
+ if (!fs.existsSync(diffPath)) {
110
+ console.error(`Error: Unable to update release notes: The connector diff JSON was not found at: ${diffPath}`)
111
+ return
112
+ }
113
+
114
+ let diff
115
+ try {
116
+ diff = JSON.parse(fs.readFileSync(diffPath, 'utf8'))
117
+ } catch (jsonErr) {
118
+ console.error(`Error: Unable to parse connector diff JSON at ${diffPath}: ${jsonErr.message}`)
119
+ return
120
+ }
121
+
122
+ let whatsNew
123
+ try {
124
+ whatsNew = fs.readFileSync(whatsNewPath, 'utf8')
125
+ } catch (readErr) {
126
+ console.error(`Error: Unable to read whats-new.adoc at ${whatsNewPath}: ${readErr.message}`)
127
+ return
128
+ }
129
+
130
+ const versionRe = new RegExp(`^== Version ${diff.comparison.newVersion.replace(/[-.]/g, '\\$&')}(?:\\r?\\n|$)`, 'm')
131
+ const match = versionRe.exec(whatsNew)
132
+ let startIdx = match ? match.index : -1
133
+ let endIdx = -1
134
+ if (startIdx !== -1) {
135
+ const rest = whatsNew.slice(startIdx + 1)
136
+ const nextMatch = /^== Version /m.exec(rest)
137
+ endIdx = nextMatch ? startIdx + 1 + nextMatch.index : whatsNew.length
138
+ }
139
+
140
+ let releaseNotesLink = ''
141
+ if (diff.comparison && diff.comparison.newVersion) {
142
+ releaseNotesLink = `link:https://github.com/redpanda-data/connect/releases/tag/v${diff.comparison.newVersion}[See the full release notes^].\n\n`
143
+ }
144
+ let section = `\n== Version ${diff.comparison.newVersion}\n\n${releaseNotesLink}`
145
+
146
+ // Separate Bloblang and regular components
147
+ const bloblangComponents = []
148
+ const regularComponents = []
149
+
150
+ if (diff.details.newComponents && diff.details.newComponents.length) {
151
+ for (const comp of diff.details.newComponents) {
152
+ if (comp.type === 'bloblang-functions' || comp.type === 'bloblang-methods') {
153
+ bloblangComponents.push(comp)
154
+ } else {
155
+ const isCgoOnly = binaryAnalysis?.cgoOnly?.some(cgo => {
156
+ const typeSingular = cgo.type.replace(/s$/, '')
157
+ return cgo.name === comp.name && typeSingular === comp.type
158
+ })
159
+
160
+ regularComponents.push({
161
+ ...comp,
162
+ requiresCgo: isCgoOnly
163
+ })
164
+ }
165
+ }
166
+ }
167
+
168
+ // Bloblang updates section
169
+ if (bloblangComponents.length > 0) {
170
+ section += '=== Bloblang updates\n\n'
171
+ section += 'This release adds the following new Bloblang capabilities:\n\n'
172
+
173
+ const byType = {}
174
+ for (const comp of bloblangComponents) {
175
+ if (!byType[comp.type]) byType[comp.type] = []
176
+ byType[comp.type].push(comp)
177
+ }
178
+
179
+ for (const [type, comps] of Object.entries(byType)) {
180
+ if (type === 'bloblang-functions') {
181
+ section += '* Functions:\n'
182
+ for (const comp of comps) {
183
+ section += `** xref:guides:bloblang/functions.adoc#${comp.name}[\`${comp.name}\`]`
184
+ if (comp.status && comp.status !== 'stable') section += ` (${comp.status})`
185
+ if (comp.description) {
186
+ section += `: ${capToTwoSentences(comp.description)}`
187
+ } else {
188
+ section += `\n+\n// TODO: Add description for ${comp.name} function`
189
+ }
190
+ section += '\n'
191
+ }
192
+ } else if (type === 'bloblang-methods') {
193
+ section += '* Methods:\n'
194
+ for (const comp of comps) {
195
+ section += `** xref:guides:bloblang/methods.adoc#${comp.name}[\`${comp.name}\`]`
196
+ if (comp.status && comp.status !== 'stable') section += ` (${comp.status})`
197
+ if (comp.description) {
198
+ section += `: ${capToTwoSentences(comp.description)}`
199
+ } else {
200
+ section += `\n+\n// TODO: Add description for ${comp.name} method`
201
+ }
202
+ section += '\n'
203
+ }
204
+ }
205
+ }
206
+ section += '\n'
207
+ }
208
+
209
+ // Component updates section
210
+ if (regularComponents.length > 0) {
211
+ section += '=== Component updates\n\n'
212
+ section += 'This release adds the following new components:\n\n'
213
+
214
+ section += '[cols="1m,1a,1a,3a"]\n'
215
+ section += '|===\n'
216
+ section += '|Component |Type |Status |Description\n\n'
217
+
218
+ for (const comp of regularComponents) {
219
+ const typeLabel = comp.type.charAt(0).toUpperCase() + comp.type.slice(1)
220
+ const statusLabel = comp.status || '-'
221
+ let desc = comp.summary || (comp.description ? capToTwoSentences(comp.description) : '// TODO: Add description')
222
+
223
+ if (comp.requiresCgo) {
224
+ const cgoNote = '\nNOTE: Requires a cgo-enabled binary. See the xref:install:index.adoc[installation guides] for details.'
225
+ desc = desc.startsWith('// TODO') ? cgoNote : `${desc}\n\n${cgoNote}`
226
+ }
227
+
228
+ const typePlural = comp.type.endsWith('s') ? comp.type : `${comp.type}s`
229
+ section += `|xref:components:${typePlural}/${comp.name}.adoc[${comp.name}]\n`
230
+ section += `|${typeLabel}\n`
231
+ section += `|${statusLabel}\n`
232
+ section += `|${desc}\n\n`
233
+ }
234
+
235
+ section += '|===\n\n'
236
+ }
237
+
238
+ // New fields section
239
+ if (diff.details.newFields && diff.details.newFields.length) {
240
+ const regularFields = diff.details.newFields.filter(field => {
241
+ const [type] = field.component.split(':')
242
+ return type !== 'bloblang-functions' && type !== 'bloblang-methods'
243
+ })
244
+
245
+ if (regularFields.length > 0) {
246
+ section += '\n=== New field support\n\n'
247
+ section += 'This release adds support for the following new fields:\n\n'
248
+ section += buildFieldsTable(regularFields, capToTwoSentences)
249
+ }
250
+ }
251
+
252
+ // Deprecated components section
253
+ if (diff.details.deprecatedComponents && diff.details.deprecatedComponents.length) {
254
+ section += '\n=== Deprecations\n\n'
255
+ section += 'The following components are now deprecated:\n\n'
256
+
257
+ section += '[cols="1m,1a,3a"]\n'
258
+ section += '|===\n'
259
+ section += '|Component |Type |Description\n\n'
260
+
261
+ for (const comp of diff.details.deprecatedComponents) {
262
+ const typeLabel = comp.type.charAt(0).toUpperCase() + comp.type.slice(1)
263
+ const desc = comp.description ? capToTwoSentences(comp.description) : '-'
264
+
265
+ if (comp.type === 'bloblang-functions') {
266
+ section += `|xref:guides:bloblang/functions.adoc#${comp.name}[${comp.name}]\n`
267
+ } else if (comp.type === 'bloblang-methods') {
268
+ section += `|xref:guides:bloblang/methods.adoc#${comp.name}[${comp.name}]\n`
269
+ } else {
270
+ section += `|xref:components:${comp.type}/${comp.name}.adoc[${comp.name}]\n`
271
+ }
272
+ section += `|${typeLabel}\n`
273
+ section += `|${desc}\n\n`
274
+ }
275
+
276
+ section += '|===\n\n'
277
+ }
278
+
279
+ // Deprecated fields section
280
+ if (diff.details.deprecatedFields && diff.details.deprecatedFields.length) {
281
+ const regularDeprecatedFields = diff.details.deprecatedFields.filter(field => {
282
+ const [type] = field.component.split(':')
283
+ return type !== 'bloblang-functions' && type !== 'bloblang-methods'
284
+ })
285
+
286
+ if (regularDeprecatedFields.length > 0) {
287
+ if (!diff.details.deprecatedComponents || diff.details.deprecatedComponents.length === 0) {
288
+ section += '\n=== Deprecations\n\n'
289
+ } else {
290
+ section += '\n'
291
+ }
292
+ section += 'The following fields are now deprecated:\n\n'
293
+ section += buildFieldsTable(regularDeprecatedFields, capToTwoSentences)
294
+ }
295
+ }
296
+
297
+ // Changed defaults section
298
+ if (diff.details.changedDefaults && diff.details.changedDefaults.length) {
299
+ const regularChangedDefaults = diff.details.changedDefaults.filter(change => {
300
+ const [type] = change.component.split(':')
301
+ return type !== 'bloblang-functions' && type !== 'bloblang-methods'
302
+ })
303
+
304
+ if (regularChangedDefaults.length > 0) {
305
+ section += '\n=== Default value changes\n\n'
306
+ section += 'This release includes the following default value changes:\n\n'
307
+ section += buildChangedDefaultsTable(regularChangedDefaults, capToTwoSentences)
308
+ }
309
+ }
310
+
311
+ // Update the file
312
+ let contentWithoutOldSection = whatsNew
313
+ if (startIdx !== -1) {
314
+ contentWithoutOldSection = whatsNew.slice(0, startIdx) + whatsNew.slice(endIdx)
315
+ }
316
+
317
+ const versionHeading = /^== Version /m
318
+ const firstMatch = versionHeading.exec(contentWithoutOldSection)
319
+ const insertIdx = firstMatch ? firstMatch.index : contentWithoutOldSection.length
320
+
321
+ const updated = contentWithoutOldSection.slice(0, insertIdx) + section + '\n' + contentWithoutOldSection.slice(insertIdx)
322
+
323
+ if (startIdx !== -1) {
324
+ console.log(`♻️ whats-new.adoc: replaced section for Version ${diff.comparison.newVersion}`)
325
+ } else {
326
+ console.log(`Done: whats-new.adoc updated with Version ${diff.comparison.newVersion}`)
327
+ }
328
+
329
+ fs.writeFileSync(whatsNewPath, updated, 'utf8')
330
+ } catch (err) {
331
+ console.error(`Error: Failed to update whats-new.adoc: ${err.message}`)
332
+ }
333
+ }
334
+
335
+ /**
336
+ * Build a fields table for whats-new.adoc
337
+ * @param {Array} fields - Field data
338
+ * @param {Function} capFn - Caption function
339
+ * @returns {string} AsciiDoc table
340
+ */
341
+ function buildFieldsTable (fields, capFn) {
342
+ const byField = {}
343
+ for (const field of fields) {
344
+ const [type, compName] = field.component.split(':')
345
+ if (!byField[field.field]) {
346
+ byField[field.field] = {
347
+ description: field.description,
348
+ components: []
349
+ }
350
+ }
351
+ byField[field.field].components.push({ type, name: compName })
352
+ }
353
+
354
+ let section = '[cols="1m,3,2a"]\n'
355
+ section += '|===\n'
356
+ section += '|Field |Description |Affected components\n\n'
357
+
358
+ for (const [fieldName, info] of Object.entries(byField)) {
359
+ const byType = {}
360
+ for (const comp of info.components) {
361
+ if (!byType[comp.type]) byType[comp.type] = []
362
+ byType[comp.type].push(comp.name)
363
+ }
364
+
365
+ let componentList = ''
366
+ for (const [type, names] of Object.entries(byType)) {
367
+ if (componentList) componentList += '\n\n'
368
+
369
+ const typeLabel = names.length === 1
370
+ ? type.charAt(0).toUpperCase() + type.slice(1)
371
+ : type.charAt(0).toUpperCase() + type.slice(1) + (type.endsWith('s') ? '' : 's')
372
+
373
+ componentList += `*${typeLabel}:*\n\n`
374
+ names.forEach(name => {
375
+ componentList += `* xref:components:${type}/${name}.adoc#${fieldName}[${name}]\n`
376
+ })
377
+ }
378
+
379
+ const desc = info.description ? capFn(info.description) : '// TODO: Add description'
380
+
381
+ section += `|${fieldName}\n`
382
+ section += `|${desc}\n`
383
+ section += `|${componentList}\n\n`
384
+ }
385
+
386
+ section += '|===\n\n'
387
+ return section
388
+ }
389
+
390
+ /**
391
+ * Build changed defaults table for whats-new.adoc
392
+ * @param {Array} changedDefaults - Changed defaults data
393
+ * @param {Function} capFn - Caption function
394
+ * @returns {string} AsciiDoc table
395
+ */
396
+ function buildChangedDefaultsTable (changedDefaults, capFn) {
397
+ const byFieldAndDefaults = {}
398
+ for (const change of changedDefaults) {
399
+ const [type, compName] = change.component.split(':')
400
+ const compositeKey = `${change.field}|${String(change.oldDefault)}|${String(change.newDefault)}`
401
+ if (!byFieldAndDefaults[compositeKey]) {
402
+ byFieldAndDefaults[compositeKey] = {
403
+ field: change.field,
404
+ oldDefault: change.oldDefault,
405
+ newDefault: change.newDefault,
406
+ description: change.description,
407
+ components: []
408
+ }
409
+ }
410
+ byFieldAndDefaults[compositeKey].components.push({ type, name: compName })
411
+ }
412
+
413
+ let section = '[cols="1m,1,1,3,2a"]\n'
414
+ section += '|===\n'
415
+ section += '|Field |Old default |New default |Description |Affected components\n\n'
416
+
417
+ for (const [, info] of Object.entries(byFieldAndDefaults)) {
418
+ const formatDefault = (val) => {
419
+ if (val === undefined || val === null) return 'none'
420
+ if (typeof val === 'string') return val
421
+ if (typeof val === 'number' || typeof val === 'boolean') return String(val)
422
+ return JSON.stringify(val)
423
+ }
424
+
425
+ const oldVal = formatDefault(info.oldDefault)
426
+ const newVal = formatDefault(info.newDefault)
427
+ const desc = info.description ? capFn(info.description) : '// TODO: Add description'
428
+
429
+ const byType = {}
430
+ for (const comp of info.components) {
431
+ if (!byType[comp.type]) byType[comp.type] = []
432
+ byType[comp.type].push(comp.name)
433
+ }
434
+
435
+ let componentList = ''
436
+ for (const [type, names] of Object.entries(byType)) {
437
+ if (componentList) componentList += '\n\n'
438
+
439
+ const typeLabel = names.length === 1
440
+ ? type.charAt(0).toUpperCase() + type.slice(1)
441
+ : type.charAt(0).toUpperCase() + type.slice(1) + (type.endsWith('s') ? '' : 's')
442
+
443
+ componentList += `*${typeLabel}:*\n\n`
444
+ names.forEach(name => {
445
+ componentList += `* xref:components:${type}/${name}.adoc#${info.field}[${name}]\n`
446
+ })
447
+ }
448
+
449
+ section += `|${info.field}\n`
450
+ section += `|${oldVal}\n`
451
+ section += `|${newVal}\n`
452
+ section += `|${desc}\n`
453
+ section += `|${componentList}\n\n`
454
+ }
455
+
456
+ section += '|===\n\n'
457
+ return section
458
+ }
459
+
460
+ /**
461
+ * Log a collapsed list of files
462
+ * @param {string} label - Label for the list
463
+ * @param {Array} filesArray - Array of file paths
464
+ * @param {number} maxToShow - Maximum items to show
465
+ */
466
+ function logCollapsed (label, filesArray, maxToShow = 10) {
467
+ console.log(` • ${label}: ${filesArray.length} total`)
468
+ const sample = filesArray.slice(0, maxToShow)
469
+ sample.forEach(fp => console.log(` – ${fp}`))
470
+ const remaining = filesArray.length - sample.length
471
+ if (remaining > 0) {
472
+ console.log(` … plus ${remaining} more`)
473
+ }
474
+ console.log('')
475
+ }
476
+
477
+ /**
478
+ * Main handler for rpcn-connector-docs command
479
+ * @param {Object} options - Command options
480
+ */
481
+ async function handleRpcnConnectorDocs (options) {
482
+ const dataDir = path.resolve(process.cwd(), options.dataDir)
483
+ fs.mkdirSync(dataDir, { recursive: true })
484
+
485
+ const timestamp = new Date().toISOString()
486
+
487
+ let newVersion
488
+ let dataFile
489
+ let binaryAnalysis = null
490
+ let draftsWritten = 0
491
+ let draftFiles = []
492
+ let needsAugmentation = false
493
+
494
+ if (options.fetchConnectors) {
495
+ try {
496
+ if (options.connectVersion) {
497
+ console.log(`Installing Redpanda Connect version ${options.connectVersion}...`)
498
+ const installResult = spawnSync('rpk', ['connect', 'install', '--connect-version', options.connectVersion, '--force'], {
499
+ stdio: 'inherit'
500
+ })
501
+ if (installResult.status !== 0) {
502
+ throw new Error(`Failed to install Connect version ${options.connectVersion}`)
503
+ }
504
+ console.log(`Done: Installed Redpanda Connect version ${options.connectVersion}`)
505
+ newVersion = options.connectVersion
506
+ } else {
507
+ newVersion = getRpkConnectVersion()
508
+ }
509
+ console.log(`Fetching connector data from Connect ${newVersion}...`)
510
+
511
+ const tmpFile = path.join(dataDir, `connect-${newVersion}.tmp.json`)
512
+ const finalFile = path.join(dataDir, `connect-${newVersion}.json`)
513
+
514
+ const fd = fs.openSync(tmpFile, 'w')
515
+ const r = spawnSync('rpk', ['connect', 'list', '--format', 'json-full'], { stdio: ['ignore', fd, 'inherit'] })
516
+ fs.closeSync(fd)
517
+
518
+ const rawJson = fs.readFileSync(tmpFile, 'utf8')
519
+ const parsed = JSON.parse(rawJson)
520
+ fs.writeFileSync(finalFile, JSON.stringify(parsed, null, 2))
521
+ fs.unlinkSync(tmpFile)
522
+ dataFile = finalFile
523
+ needsAugmentation = true
524
+ console.log(`Done: Fetched connector data for version ${newVersion}`)
525
+
526
+ // Fetch info.csv
527
+ try {
528
+ console.log(`Fetching info.csv for Connect v${newVersion}...`)
529
+ const csvFile = path.join(dataDir, `connect-info-${newVersion}.csv`)
530
+
531
+ if (!fs.existsSync(csvFile)) {
532
+ await fetchFromGithub(
533
+ 'redpanda-data',
534
+ 'connect',
535
+ 'internal/plugins/info.csv',
536
+ dataDir,
537
+ `connect-info-${newVersion}.csv`,
538
+ `v${newVersion}`
539
+ )
540
+ console.log(`Done: Fetched info.csv for version ${newVersion}`)
541
+ } else {
542
+ console.log(`✓ CSV already exists: connect-info-${newVersion}.csv`)
543
+ }
544
+ } catch (csvErr) {
545
+ console.warn(`Warning: Failed to fetch info.csv: ${csvErr.message}`)
546
+ }
547
+
548
+ // Fetch Bloblang examples
549
+ try {
550
+ console.log(`Fetching Bloblang playground examples for Connect v${newVersion}...`)
551
+ const examplesFile = path.join(dataDir, `bloblang-samples-${newVersion}.json`)
552
+
553
+ if (!fs.existsSync(examplesFile)) {
554
+ const tempExamplesDir = path.join(dataDir, `temp-playground-${newVersion}`)
555
+ await fetchFromGithub(
556
+ 'redpanda-data',
557
+ 'connect',
558
+ 'docs/guides/bloblang/playground',
559
+ tempExamplesDir,
560
+ null,
561
+ `v${newVersion}`
562
+ )
563
+
564
+ const yaml = require('js-yaml')
565
+ const bloblangSamples = {}
566
+ const files = fs.readdirSync(tempExamplesDir).filter(f => f.endsWith('.yaml') || f.endsWith('.yml'))
567
+
568
+ for (const file of files) {
569
+ try {
570
+ const content = fs.readFileSync(path.join(tempExamplesDir, file), 'utf8')
571
+ const parsedYaml = yaml.load(content)
572
+ if (parsedYaml.title && parsedYaml.input && parsedYaml.mapping) {
573
+ bloblangSamples[file] = parsedYaml
574
+ }
575
+ } catch (err) {
576
+ console.warn(`Warning: Failed to parse ${file}: ${err.message}`)
577
+ }
578
+ }
579
+
580
+ fs.writeFileSync(examplesFile, JSON.stringify(bloblangSamples, null, 2))
581
+ fs.rmSync(tempExamplesDir, { recursive: true, force: true })
582
+ console.log(`Done: Fetched ${Object.keys(bloblangSamples).length} Bloblang examples`)
583
+ } else {
584
+ console.log(`✓ Bloblang samples already exist: bloblang-samples-${newVersion}.json`)
585
+ }
586
+ } catch (examplesErr) {
587
+ console.warn(`Warning: Failed to fetch Bloblang examples: ${examplesErr.message}`)
588
+ }
589
+ } catch (err) {
590
+ console.error(`Error: Failed to fetch connectors: ${err.message}`)
591
+ process.exit(1)
592
+ }
593
+ } else {
594
+ const candidates = fs.readdirSync(dataDir).filter(f => /^connect-\d+\.\d+\.\d+\.json$/.test(f))
595
+ if (candidates.length === 0) {
596
+ console.error('Error: No connect-<version>.json found. Use --fetch-connectors.')
597
+ process.exit(1)
598
+ }
599
+ candidates.sort()
600
+ dataFile = path.join(dataDir, candidates[candidates.length - 1])
601
+ newVersion = candidates[candidates.length - 1].match(/connect-(\d+\.\d+\.\d+)\.json/)[1]
602
+ }
603
+
604
+ console.log('Generating connector partials...')
605
+ let partialsWritten, partialFiles
606
+
607
+ try {
608
+ const result = await generateRpcnConnectorDocs({
609
+ data: dataFile,
610
+ overrides: options.overrides,
611
+ template: options.templateMain,
612
+ templateIntro: options.templateIntro,
613
+ templateFields: options.templateFields,
614
+ templateExamples: options.templateExamples,
615
+ templateBloblang: options.templateBloblang,
616
+ writeFullDrafts: false,
617
+ includeBloblang: !!options.includeBloblang
618
+ })
619
+ partialsWritten = result.partialsWritten
620
+ partialFiles = result.partialFiles
621
+ } catch (err) {
622
+ console.error(`Error: Failed to generate partials: ${err.message}`)
623
+ process.exit(1)
624
+ }
625
+
626
+ let oldIndex = {}
627
+ let oldVersion = null
628
+ if (options.oldData && fs.existsSync(options.oldData)) {
629
+ oldIndex = JSON.parse(fs.readFileSync(options.oldData, 'utf8'))
630
+ const m = options.oldData.match(/connect-([\d.]+)\.json$/)
631
+ if (m) oldVersion = m[1]
632
+ } else {
633
+ const existingDataFiles = fs.readdirSync(dataDir)
634
+ .filter(f => /^connect-\d+\.\d+\.\d+\.json$/.test(f))
635
+ .filter(f => f !== path.basename(dataFile))
636
+ .sort()
637
+
638
+ if (existingDataFiles.length > 0) {
639
+ const oldFile = existingDataFiles[existingDataFiles.length - 1]
640
+ oldVersion = oldFile.match(/connect-(\d+\.\d+\.\d+)\.json/)[1]
641
+ const oldPath = path.join(dataDir, oldFile)
642
+ oldIndex = JSON.parse(fs.readFileSync(oldPath, 'utf8'))
643
+ console.log(`📋 Using old version data: ${oldFile}`)
644
+ } else {
645
+ oldVersion = getAntoraValue('asciidoc.attributes.latest-connect-version')
646
+ if (oldVersion) {
647
+ const oldPath = path.join(dataDir, `connect-${oldVersion}.json`)
648
+ if (fs.existsSync(oldPath)) {
649
+ oldIndex = JSON.parse(fs.readFileSync(oldPath, 'utf8'))
650
+ }
651
+ }
652
+ }
653
+ }
654
+
655
+ let newIndex = JSON.parse(fs.readFileSync(dataFile, 'utf8'))
656
+
657
+ const versionsMatch = oldVersion && newVersion && oldVersion === newVersion
658
+ if (versionsMatch) {
659
+ console.log(`\n✓ Already at version ${newVersion}`)
660
+ console.log(' Skipping diff generation, but will run binary analysis.\n')
661
+ }
662
+
663
+ // Publish merged version
664
+ if (options.overrides && fs.existsSync(options.overrides)) {
665
+ try {
666
+ const { mergeOverrides, resolveReferences } = require('./generate-rpcn-connector-docs.js')
667
+
668
+ const mergedData = JSON.parse(JSON.stringify(newIndex))
669
+ const ovRaw = fs.readFileSync(options.overrides, 'utf8')
670
+ const ovObj = JSON.parse(ovRaw)
671
+ const resolvedOverrides = resolveReferences(ovObj, ovObj)
672
+ mergeOverrides(mergedData, resolvedOverrides)
673
+
674
+ const attachmentsRoot = path.resolve(process.cwd(), 'modules/components/attachments')
675
+ fs.mkdirSync(attachmentsRoot, { recursive: true })
676
+
677
+ const existingFiles = fs.readdirSync(attachmentsRoot)
678
+ .filter(f => /^connect-\d+\.\d+\.\d+\.json$/.test(f))
679
+ .sort()
680
+
681
+ for (const oldFile of existingFiles) {
682
+ const oldFilePath = path.join(attachmentsRoot, oldFile)
683
+ fs.unlinkSync(oldFilePath)
684
+ console.log(`🧹 Deleted old version: ${oldFile}`)
685
+ }
686
+
687
+ const destFile = path.join(attachmentsRoot, `connect-${newVersion}.json`)
688
+ fs.writeFileSync(destFile, JSON.stringify(mergedData, null, 2), 'utf8')
689
+ console.log(`Done: Published merged version to: ${path.relative(process.cwd(), destFile)}`)
690
+ } catch (err) {
691
+ console.error(`Error: Failed to publish merged version: ${err.message}`)
692
+ }
693
+ }
694
+
695
+ printDeltaReport(oldIndex, newIndex)
696
+
697
+ // Binary analysis
698
+ let oldBinaryAnalysis = null
699
+
700
+ if (oldVersion) {
701
+ const standalonePath = path.join(dataDir, `binary-analysis-${oldVersion}.json`)
702
+ if (fs.existsSync(standalonePath)) {
703
+ try {
704
+ oldBinaryAnalysis = JSON.parse(fs.readFileSync(standalonePath, 'utf8'))
705
+ console.log(`✓ Loaded old binary analysis from: binary-analysis-${oldVersion}.json`)
706
+ } catch (err) {
707
+ console.warn(`Warning: Failed to load ${standalonePath}: ${err.message}`)
708
+ }
709
+ }
710
+
711
+ if (!oldBinaryAnalysis) {
712
+ const diffFiles = fs.readdirSync(dataDir)
713
+ .filter(f => f.startsWith('connect-diff-') && f.endsWith(`_to_${oldVersion}.json`))
714
+ .sort()
715
+ .reverse()
716
+
717
+ for (const file of diffFiles) {
718
+ const diffPath = path.join(dataDir, file)
719
+ try {
720
+ const oldDiff = JSON.parse(fs.readFileSync(diffPath, 'utf8'))
721
+ if (oldDiff.binaryAnalysis) {
722
+ oldBinaryAnalysis = {
723
+ comparison: {
724
+ inCloud: oldDiff.binaryAnalysis.details?.cloudSupported || [],
725
+ notInCloud: oldDiff.binaryAnalysis.details?.selfHostedOnly || []
726
+ },
727
+ cgoOnly: oldDiff.binaryAnalysis.details?.cgoOnly || []
728
+ }
729
+ console.log(`✓ Loaded old binary analysis from: ${file}`)
730
+ break
731
+ }
732
+ } catch {
733
+ // Continue to next file
734
+ }
735
+ }
736
+ }
737
+ }
738
+
739
+ try {
740
+ console.log('\nAnalyzing connector binaries...')
741
+ const { analyzeAllBinaries } = require('./connector-binary-analyzer.js')
742
+
743
+ const analysisOptions = {
744
+ skipCloud: false,
745
+ skipCgo: false,
746
+ cgoVersion: options.cgoVersion || null
747
+ }
748
+
749
+ binaryAnalysis = await analyzeAllBinaries(
750
+ newVersion,
751
+ options.cloudVersion || null,
752
+ dataDir,
753
+ analysisOptions
754
+ )
755
+
756
+ console.log('Done: Binary analysis complete:')
757
+ console.log(` • OSS version: ${binaryAnalysis.ossVersion}`)
758
+
759
+ if (binaryAnalysis.cloudVersion) {
760
+ console.log(` • Cloud version: ${binaryAnalysis.cloudVersion}`)
761
+ }
762
+
763
+ if (binaryAnalysis.comparison) {
764
+ console.log(` • Connectors in cloud: ${binaryAnalysis.comparison.inCloud.length}`)
765
+ console.log(` • Self-hosted only: ${binaryAnalysis.comparison.notInCloud.length}`)
766
+ if (binaryAnalysis.comparison.cloudOnly && binaryAnalysis.comparison.cloudOnly.length > 0) {
767
+ console.log(` • Cloud-only connectors: ${binaryAnalysis.comparison.cloudOnly.length}`)
768
+ }
769
+ }
770
+
771
+ if (binaryAnalysis.cgoOnly && binaryAnalysis.cgoOnly.length > 0) {
772
+ console.log(` • cgo-only connectors: ${binaryAnalysis.cgoOnly.length}`)
773
+ }
774
+ } catch (err) {
775
+ console.error(`Warning: Binary analysis failed: ${err.message}`)
776
+ console.error(' Continuing without binary analysis data...')
777
+ }
778
+
779
+ // Augment data file
780
+ if (needsAugmentation && binaryAnalysis) {
781
+ try {
782
+ console.log('\nAugmenting connector data with cloud/cgo fields...')
783
+
784
+ const connectorData = JSON.parse(fs.readFileSync(dataFile, 'utf8'))
785
+
786
+ const cloudSet = new Set(
787
+ (binaryAnalysis.comparison?.inCloud || []).map(c => `${c.type}:${c.name}`)
788
+ )
789
+ const cgoOnlySet = new Set(
790
+ (binaryAnalysis.cgoOnly || []).map(c => `${c.type}:${c.name}`)
791
+ )
792
+
793
+ let augmentedCount = 0
794
+ let addedCgoCount = 0
795
+ let addedCloudOnlyCount = 0
796
+
797
+ const connectorTypes = ['inputs', 'outputs', 'processors', 'caches', 'rate_limits',
798
+ 'buffers', 'metrics', 'scanners', 'tracers']
799
+
800
+ for (const type of connectorTypes) {
801
+ if (!Array.isArray(connectorData[type])) {
802
+ connectorData[type] = []
803
+ }
804
+
805
+ for (const connector of connectorData[type]) {
806
+ const key = `${type}:${connector.name}`
807
+ connector.cloudSupported = cloudSet.has(key)
808
+ connector.requiresCgo = cgoOnlySet.has(key)
809
+ augmentedCount++
810
+ }
811
+
812
+ if (binaryAnalysis.cgoOnly) {
813
+ for (const cgoConn of binaryAnalysis.cgoOnly) {
814
+ if (cgoConn.type === type) {
815
+ const exists = connectorData[type].some(c => c.name === cgoConn.name)
816
+ if (!exists) {
817
+ connectorData[type].push({
818
+ ...cgoConn,
819
+ type: cgoConn.type.replace(/s$/, ''),
820
+ cloudSupported: false,
821
+ requiresCgo: true
822
+ })
823
+ addedCgoCount++
824
+ }
825
+ }
826
+ }
827
+ }
828
+
829
+ if (binaryAnalysis.comparison?.cloudOnly) {
830
+ for (const cloudConn of binaryAnalysis.comparison.cloudOnly) {
831
+ if (cloudConn.type === type) {
832
+ const exists = connectorData[type].some(c => c.name === cloudConn.name)
833
+ if (!exists) {
834
+ connectorData[type].push({
835
+ ...cloudConn,
836
+ type: cloudConn.type.replace(/s$/, ''),
837
+ cloudSupported: true,
838
+ requiresCgo: false,
839
+ cloudOnly: true
840
+ })
841
+ addedCloudOnlyCount++
842
+ }
843
+ }
844
+ }
845
+ }
846
+ }
847
+
848
+ fs.writeFileSync(dataFile, JSON.stringify(connectorData, null, 2), 'utf8')
849
+ console.log(`Done: Augmented ${augmentedCount} connectors with cloud/cgo fields`)
850
+ if (addedCgoCount > 0) {
851
+ console.log(` • Added ${addedCgoCount} cgo-only connector(s) to data file`)
852
+ }
853
+ if (addedCloudOnlyCount > 0) {
854
+ console.log(` • Added ${addedCloudOnlyCount} cloud-only connector(s) to data file`)
855
+ }
856
+
857
+ // Keep only 2 most recent versions
858
+ const dataFiles = fs.readdirSync(dataDir)
859
+ .filter(f => /^connect-\d+\.\d+\.\d+\.json$/.test(f))
860
+ .sort()
861
+
862
+ while (dataFiles.length > 2) {
863
+ const oldestFile = dataFiles.shift()
864
+ const oldestPath = path.join(dataDir, oldestFile)
865
+ fs.unlinkSync(oldestPath)
866
+ console.log(`🧹 Deleted old version from docs-data: ${oldestFile}`)
867
+ }
868
+ } catch (err) {
869
+ console.error(`Warning: Failed to augment data file: ${err.message}`)
870
+ }
871
+ }
872
+
873
+ // Generate diff JSON
874
+ let diffJson = null
875
+ if (!oldVersion) {
876
+ console.warn('Warning: Skipping diff generation: oldVersion not available')
877
+ } else if (versionsMatch) {
878
+ console.log(`⏭️ Skipping diff generation: versions match (${oldVersion} === ${newVersion})`)
879
+ } else {
880
+ const { generateConnectorDiffJson } = require('./report-delta.js')
881
+ diffJson = generateConnectorDiffJson(
882
+ oldIndex,
883
+ newIndex,
884
+ {
885
+ oldVersion: oldVersion,
886
+ newVersion,
887
+ timestamp,
888
+ binaryAnalysis,
889
+ oldBinaryAnalysis
890
+ }
891
+ )
892
+
893
+ // Add new cgo-only components
894
+ if (binaryAnalysis && binaryAnalysis.cgoOnly && binaryAnalysis.cgoOnly.length > 0) {
895
+ let newCgoComponents
896
+
897
+ if (oldBinaryAnalysis) {
898
+ const oldCgoSet = new Set((oldBinaryAnalysis.cgoOnly || []).map(c => `${c.type}:${c.name}`))
899
+ newCgoComponents = binaryAnalysis.cgoOnly.filter(cgoComp => {
900
+ const wasInOldOss = oldIndex[cgoComp.type]?.some(c => c.name === cgoComp.name)
901
+ const wasInOldCgo = oldCgoSet.has(`${cgoComp.type}:${cgoComp.name}`)
902
+ return !wasInOldOss && !wasInOldCgo
903
+ })
904
+ } else {
905
+ newCgoComponents = binaryAnalysis.cgoOnly.filter(cgoComp => {
906
+ const wasInOldOss = oldIndex[cgoComp.type]?.some(c => c.name === cgoComp.name)
907
+ return !wasInOldOss
908
+ })
909
+ if (newCgoComponents.length > 0) {
910
+ console.log(` ℹ️ No old binary analysis found - treating ${newCgoComponents.length} cgo component(s) not in old OSS data as new`)
911
+ }
912
+ }
913
+
914
+ if (newCgoComponents && newCgoComponents.length > 0) {
915
+ console.log(` • Found ${newCgoComponents.length} new cgo-only component(s)`)
916
+ newCgoComponents.forEach(cgoComp => {
917
+ const typeSingular = cgoComp.type.replace(/s$/, '')
918
+ diffJson.details.newComponents.push({
919
+ name: cgoComp.name,
920
+ type: typeSingular,
921
+ status: cgoComp.status || '',
922
+ version: '',
923
+ description: cgoComp.description || '',
924
+ summary: cgoComp.summary || ''
925
+ })
926
+ })
927
+ }
928
+ }
929
+
930
+ const diffPath = path.join(dataDir, `connect-diff-${oldVersion}_to_${newVersion}.json`)
931
+ fs.writeFileSync(diffPath, JSON.stringify(diffJson, null, 2), 'utf8')
932
+ console.log(`Done: Connector diff JSON written to: ${diffPath}`)
933
+ if (diffJson.binaryAnalysis) {
934
+ console.log(` • Includes binary analysis: OSS ${diffJson.binaryAnalysis.versions.oss}, Cloud ${diffJson.binaryAnalysis.versions.cloud || 'N/A'}, cgo ${diffJson.binaryAnalysis.versions.cgo || 'N/A'}`)
935
+ }
936
+
937
+ // Cleanup old diff files
938
+ try {
939
+ const oldDiffFiles = fs.readdirSync(dataDir)
940
+ .filter(f => f.startsWith('connect-diff-') && f.endsWith('.json') && f !== path.basename(diffPath))
941
+
942
+ if (oldDiffFiles.length > 0) {
943
+ console.log(`🧹 Cleaning up ${oldDiffFiles.length} old diff file(s)...`)
944
+ oldDiffFiles.forEach(f => {
945
+ const oldDiffPath = path.join(dataDir, f)
946
+ fs.unlinkSync(oldDiffPath)
947
+ console.log(` • Deleted: ${f}`)
948
+ })
949
+ }
950
+ } catch (err) {
951
+ console.warn(`Warning: Failed to clean up old diff files: ${err.message}`)
952
+ }
953
+ }
954
+
955
+ // Draft missing connectors
956
+ if (options.draftMissing) {
957
+ console.log('\nDrafting missing connectors…')
958
+ try {
959
+ const rawData = fs.readFileSync(dataFile, 'utf8')
960
+ const dataObj = JSON.parse(rawData)
961
+
962
+ const validConnectors = []
963
+ const types = ['inputs', 'outputs', 'processors', 'caches', 'rate_limits', 'buffers', 'metrics', 'scanners', 'tracers']
964
+ types.forEach(type => {
965
+ if (Array.isArray(dataObj[type])) {
966
+ dataObj[type].forEach(connector => {
967
+ if (connector.name) {
968
+ validConnectors.push({
969
+ name: connector.name,
970
+ type: type.replace(/s$/, ''),
971
+ status: connector.status || connector.type || 'stable'
972
+ })
973
+ }
974
+ })
975
+ }
976
+ })
977
+
978
+ // Add cgo-only connectors
979
+ if (binaryAnalysis && binaryAnalysis.cgoOnly) {
980
+ binaryAnalysis.cgoOnly.forEach(cgoConn => {
981
+ const exists = validConnectors.some(c =>
982
+ c.name === cgoConn.name && c.type === cgoConn.type.replace(/s$/, '')
983
+ )
984
+ if (!exists) {
985
+ validConnectors.push({
986
+ name: cgoConn.name,
987
+ type: cgoConn.type.replace(/s$/, ''),
988
+ status: cgoConn.status || 'stable',
989
+ requiresCgo: true
990
+ })
991
+ }
992
+ })
993
+ }
994
+
995
+ // Add cloud-only connectors
996
+ if (binaryAnalysis && binaryAnalysis.comparison?.cloudOnly) {
997
+ binaryAnalysis.comparison.cloudOnly.forEach(cloudConn => {
998
+ const exists = validConnectors.some(c =>
999
+ c.name === cloudConn.name && c.type === cloudConn.type.replace(/s$/, '')
1000
+ )
1001
+ if (!exists) {
1002
+ validConnectors.push({
1003
+ name: cloudConn.name,
1004
+ type: cloudConn.type.replace(/s$/, ''),
1005
+ status: cloudConn.status || 'stable',
1006
+ cloudOnly: true
1007
+ })
1008
+ }
1009
+ })
1010
+ }
1011
+
1012
+ const roots = {
1013
+ pages: path.resolve(process.cwd(), 'modules/components/pages'),
1014
+ partials: path.resolve(process.cwd(), 'modules/components/partials/components')
1015
+ }
1016
+
1017
+ const allMissing = validConnectors.filter(({ name, type }) => {
1018
+ const relPath = path.join(`${type}s`, `${name}.adoc`)
1019
+ const existsInAny = Object.values(roots).some(root =>
1020
+ fs.existsSync(path.join(root, relPath))
1021
+ )
1022
+ return !existsInAny
1023
+ })
1024
+
1025
+ const missingConnectors = allMissing.filter(c =>
1026
+ !c.name.includes('sql_driver') &&
1027
+ c.status !== 'deprecated'
1028
+ )
1029
+
1030
+ if (missingConnectors.length === 0) {
1031
+ console.log('Done: All connectors (excluding sql_drivers) already have docs—nothing to draft.')
1032
+ } else {
1033
+ console.log(`Docs missing for ${missingConnectors.length} connectors:`)
1034
+ missingConnectors.forEach(({ name, type }) => {
1035
+ console.log(` • ${type}/${name}`)
1036
+ })
1037
+ console.log('')
1038
+
1039
+ const filteredDataObj = {}
1040
+
1041
+ for (const [key, arr] of Object.entries(dataObj)) {
1042
+ if (!Array.isArray(arr)) {
1043
+ filteredDataObj[key] = arr
1044
+ continue
1045
+ }
1046
+ filteredDataObj[key] = arr.filter(component =>
1047
+ missingConnectors.some(
1048
+ m => m.name === component.name && `${m.type}s` === key
1049
+ )
1050
+ )
1051
+ }
1052
+
1053
+ const cgoMissing = missingConnectors.filter(m => m.requiresCgo)
1054
+ if (cgoMissing.length > 0 && binaryAnalysis && binaryAnalysis.cgoIndex) {
1055
+ console.log('Fetching cgo-only connector schemas for drafting...')
1056
+ cgoMissing.forEach(cgo => {
1057
+ const typeKey = `${cgo.type}s`
1058
+ if (binaryAnalysis.cgoIndex[typeKey]) {
1059
+ const cgoConnector = binaryAnalysis.cgoIndex[typeKey].find(c => c.name === cgo.name)
1060
+ if (cgoConnector) {
1061
+ if (!filteredDataObj[typeKey]) filteredDataObj[typeKey] = []
1062
+ filteredDataObj[typeKey].push(cgoConnector)
1063
+ console.log(` • Added cgo connector schema: ${cgo.type}/${cgo.name}`)
1064
+ }
1065
+ }
1066
+ })
1067
+ }
1068
+
1069
+ const cloudMissing = missingConnectors.filter(m => m.cloudOnly)
1070
+ if (cloudMissing.length > 0 && binaryAnalysis && binaryAnalysis.cloudIndex) {
1071
+ console.log('Fetching cloud-only connector schemas for drafting...')
1072
+ cloudMissing.forEach(cloud => {
1073
+ const typeKey = `${cloud.type}s`
1074
+ if (binaryAnalysis.cloudIndex[typeKey]) {
1075
+ const cloudConnector = binaryAnalysis.cloudIndex[typeKey].find(c => c.name === cloud.name)
1076
+ if (cloudConnector) {
1077
+ if (!filteredDataObj[typeKey]) filteredDataObj[typeKey] = []
1078
+ filteredDataObj[typeKey].push(cloudConnector)
1079
+ console.log(` • Added cloud-only connector schema: ${cloud.type}/${cloud.name}`)
1080
+ }
1081
+ }
1082
+ })
1083
+ }
1084
+
1085
+ const tempDataPath = path.join(dataDir, '._filtered_connect_data.json')
1086
+ fs.writeFileSync(tempDataPath, JSON.stringify(filteredDataObj, null, 2), 'utf8')
1087
+
1088
+ const draftResult = await generateRpcnConnectorDocs({
1089
+ data: tempDataPath,
1090
+ overrides: options.overrides,
1091
+ template: options.templateMain,
1092
+ templateFields: options.templateFields,
1093
+ templateExamples: options.templateExamples,
1094
+ templateIntro: options.templateIntro,
1095
+ writeFullDrafts: true,
1096
+ cgoOnly: binaryAnalysis?.cgoOnly || [],
1097
+ cloudOnly: binaryAnalysis?.comparison?.cloudOnly || []
1098
+ })
1099
+
1100
+ fs.unlinkSync(tempDataPath)
1101
+ draftsWritten = draftResult.draftsWritten
1102
+ draftFiles = draftResult.draftFiles
1103
+ }
1104
+ } catch (err) {
1105
+ console.error(`Error: Could not draft missing: ${err.message}`)
1106
+ process.exit(1)
1107
+ }
1108
+ }
1109
+
1110
+ // Update nav.adoc if drafts were generated
1111
+ if (draftFiles && draftFiles.length > 0) {
1112
+ try {
1113
+ const { updateNavFromDrafts } = require('./update-nav.js')
1114
+ const navResult = updateNavFromDrafts(draftFiles)
1115
+
1116
+ if (navResult.updated > 0) {
1117
+ console.log(`\nDone: Updated nav.adoc: added ${navResult.updated} connector${navResult.updated !== 1 ? 's' : ''}`)
1118
+ navResult.updates.forEach(u => {
1119
+ console.log(` • ${u.type}/${u.name}`)
1120
+ })
1121
+ }
1122
+
1123
+ if (navResult.skippedCount > 0) {
1124
+ console.log(`\nℹ️ Skipped ${navResult.skippedCount} connector${navResult.skippedCount !== 1 ? 's' : ''}:`)
1125
+ navResult.skipped.forEach(s => {
1126
+ console.log(` • ${s.type}/${s.name} (${s.reason})`)
1127
+ })
1128
+ }
1129
+ } catch (err) {
1130
+ console.error(`Warning: Failed to update nav.adoc: ${err.message}`)
1131
+ }
1132
+ }
1133
+
1134
+ // Generate PR summary
1135
+ try {
1136
+ const { printPRSummary } = require('./pr-summary-formatter.js')
1137
+ printPRSummary(diffJson, binaryAnalysis, draftFiles)
1138
+ } catch (err) {
1139
+ console.error(`Warning: Failed to generate PR summary: ${err.message}`)
1140
+ }
1141
+
1142
+ const wrote = setAntoraValue('asciidoc.attributes.latest-connect-version', newVersion)
1143
+ if (wrote) {
1144
+ console.log(`Done: Updated Antora version: ${newVersion}`)
1145
+ }
1146
+
1147
+ console.log('Generation Report:')
1148
+ console.log(` • Partial files: ${partialsWritten}`)
1149
+ const fieldsPartials = partialFiles.filter(fp => fp.includes('/fields/'))
1150
+ const examplesPartials = partialFiles.filter(fp => fp.includes('/examples/'))
1151
+
1152
+ logCollapsed('Fields partials', fieldsPartials, 10)
1153
+ logCollapsed('Examples partials', examplesPartials, 10)
1154
+
1155
+ if (options.draftMissing) {
1156
+ console.log(` • Full drafts: ${draftsWritten}`)
1157
+ const draftFilePaths = draftFiles.map(df => typeof df === 'string' ? df : df.path)
1158
+ logCollapsed('Draft files', draftFilePaths, 5)
1159
+ }
1160
+
1161
+ // Update whats-new.adoc
1162
+ if (options.updateWhatsNew) {
1163
+ if (!oldVersion) {
1164
+ console.warn('Warning: Skipping whats-new update: oldVersion not available')
1165
+ } else {
1166
+ updateWhatsNew({ dataDir, oldVersion, newVersion, binaryAnalysis })
1167
+ }
1168
+ }
1169
+
1170
+ console.log('\n📄 Summary:')
1171
+ console.log(` • Run time: ${timestamp}`)
1172
+ console.log(` • Version used: ${newVersion}`)
1173
+ process.exit(0)
1174
+ }
1175
+
1176
+ module.exports = {
1177
+ handleRpcnConnectorDocs,
1178
+ updateWhatsNew,
1179
+ capToTwoSentences
1180
+ }