specweave 0.18.1 → 0.20.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CLAUDE.md +229 -1817
- package/README.md +68 -0
- package/bin/specweave.js +62 -6
- package/dist/plugins/specweave/lib/hooks/sync-living-docs.d.ts.map +1 -1
- package/dist/plugins/specweave/lib/hooks/sync-living-docs.js +3 -0
- package/dist/plugins/specweave/lib/hooks/sync-living-docs.js.map +1 -1
- package/dist/plugins/specweave/lib/hooks/update-ac-status.d.ts +21 -0
- package/dist/plugins/specweave/lib/hooks/update-ac-status.d.ts.map +1 -0
- package/dist/plugins/specweave/lib/hooks/update-ac-status.js +162 -0
- package/dist/plugins/specweave/lib/hooks/update-ac-status.js.map +1 -0
- package/dist/plugins/specweave-ado/lib/ado-spec-content-sync.d.ts.map +1 -1
- package/dist/plugins/specweave-ado/lib/ado-spec-content-sync.js +65 -6
- package/dist/plugins/specweave-ado/lib/ado-spec-content-sync.js.map +1 -1
- package/dist/plugins/specweave-github/lib/completion-calculator.d.ts +112 -0
- package/dist/plugins/specweave-github/lib/completion-calculator.d.ts.map +1 -0
- package/dist/plugins/specweave-github/lib/completion-calculator.js +301 -0
- package/dist/plugins/specweave-github/lib/completion-calculator.js.map +1 -0
- package/dist/plugins/specweave-github/lib/duplicate-detector.d.ts +3 -3
- package/dist/plugins/specweave-github/lib/duplicate-detector.js +3 -3
- package/dist/plugins/specweave-github/lib/epic-content-builder.d.ts +7 -0
- package/dist/plugins/specweave-github/lib/epic-content-builder.d.ts.map +1 -1
- package/dist/plugins/specweave-github/lib/epic-content-builder.js +42 -0
- package/dist/plugins/specweave-github/lib/epic-content-builder.js.map +1 -1
- package/dist/plugins/specweave-github/lib/github-client-v2.d.ts +14 -0
- package/dist/plugins/specweave-github/lib/github-client-v2.d.ts.map +1 -1
- package/dist/plugins/specweave-github/lib/github-client-v2.js +51 -0
- package/dist/plugins/specweave-github/lib/github-client-v2.js.map +1 -1
- package/dist/plugins/specweave-github/lib/github-epic-sync.js +1 -1
- package/dist/plugins/specweave-github/lib/github-epic-sync.js.map +1 -1
- package/dist/plugins/specweave-github/lib/github-feature-sync.d.ts +87 -0
- package/dist/plugins/specweave-github/lib/github-feature-sync.d.ts.map +1 -0
- package/dist/plugins/specweave-github/lib/github-feature-sync.js +412 -0
- package/dist/plugins/specweave-github/lib/github-feature-sync.js.map +1 -0
- package/dist/plugins/specweave-github/lib/github-spec-content-sync.d.ts.map +1 -1
- package/dist/plugins/specweave-github/lib/github-spec-content-sync.js +64 -13
- package/dist/plugins/specweave-github/lib/github-spec-content-sync.js.map +1 -1
- package/dist/plugins/specweave-github/lib/progress-comment-builder.d.ts +78 -0
- package/dist/plugins/specweave-github/lib/progress-comment-builder.d.ts.map +1 -0
- package/dist/plugins/specweave-github/lib/progress-comment-builder.js +237 -0
- package/dist/plugins/specweave-github/lib/progress-comment-builder.js.map +1 -0
- package/dist/plugins/specweave-github/lib/user-story-content-builder.d.ts +97 -0
- package/dist/plugins/specweave-github/lib/user-story-content-builder.d.ts.map +1 -0
- package/dist/plugins/specweave-github/lib/user-story-content-builder.js +301 -0
- package/dist/plugins/specweave-github/lib/user-story-content-builder.js.map +1 -0
- package/dist/plugins/specweave-github/lib/user-story-issue-builder.d.ts +83 -0
- package/dist/plugins/specweave-github/lib/user-story-issue-builder.d.ts.map +1 -0
- package/dist/plugins/specweave-github/lib/user-story-issue-builder.js +386 -0
- package/dist/plugins/specweave-github/lib/user-story-issue-builder.js.map +1 -0
- package/dist/plugins/specweave-jira/lib/enhanced-jira-sync.d.ts +8 -6
- package/dist/plugins/specweave-jira/lib/enhanced-jira-sync.d.ts.map +1 -1
- package/dist/plugins/specweave-jira/lib/enhanced-jira-sync.js +78 -117
- package/dist/plugins/specweave-jira/lib/enhanced-jira-sync.js.map +1 -1
- package/dist/plugins/specweave-kafka/lib/cli/kcat-wrapper.d.ts +57 -0
- package/dist/plugins/specweave-kafka/lib/cli/kcat-wrapper.d.ts.map +1 -0
- package/dist/plugins/specweave-kafka/lib/cli/kcat-wrapper.js +248 -0
- package/dist/plugins/specweave-kafka/lib/cli/kcat-wrapper.js.map +1 -0
- package/dist/plugins/specweave-kafka/lib/cli/types.d.ts +82 -0
- package/dist/plugins/specweave-kafka/lib/cli/types.d.ts.map +1 -0
- package/dist/plugins/specweave-kafka/lib/cli/types.js +13 -0
- package/dist/plugins/specweave-kafka/lib/cli/types.js.map +1 -0
- package/dist/plugins/specweave-kafka/lib/mcp/detector.d.ts +49 -0
- package/dist/plugins/specweave-kafka/lib/mcp/detector.d.ts.map +1 -0
- package/dist/plugins/specweave-kafka/lib/mcp/detector.js +316 -0
- package/dist/plugins/specweave-kafka/lib/mcp/detector.js.map +1 -0
- package/dist/plugins/specweave-kafka/lib/mcp/types.d.ts +70 -0
- package/dist/plugins/specweave-kafka/lib/mcp/types.d.ts.map +1 -0
- package/dist/plugins/specweave-kafka/lib/mcp/types.js +23 -0
- package/dist/plugins/specweave-kafka/lib/mcp/types.js.map +1 -0
- package/dist/plugins/specweave-kafka/lib/utils/partitioning.d.ts +85 -0
- package/dist/plugins/specweave-kafka/lib/utils/partitioning.d.ts.map +1 -0
- package/dist/plugins/specweave-kafka/lib/utils/partitioning.js +281 -0
- package/dist/plugins/specweave-kafka/lib/utils/partitioning.js.map +1 -0
- package/dist/plugins/specweave-kafka/lib/utils/sizing.d.ts +75 -0
- package/dist/plugins/specweave-kafka/lib/utils/sizing.d.ts.map +1 -0
- package/dist/plugins/specweave-kafka/lib/utils/sizing.js +238 -0
- package/dist/plugins/specweave-kafka/lib/utils/sizing.js.map +1 -0
- package/dist/src/cli/commands/import-docs.js +4 -4
- package/dist/src/cli/commands/import-docs.js.map +1 -1
- package/dist/src/cli/commands/init-multiproject.d.ts.map +1 -1
- package/dist/src/cli/commands/init-multiproject.js +17 -18
- package/dist/src/cli/commands/init-multiproject.js.map +1 -1
- package/dist/src/cli/commands/migrate-to-multiproject.d.ts.map +1 -1
- package/dist/src/cli/commands/migrate-to-multiproject.js +8 -4
- package/dist/src/cli/commands/migrate-to-multiproject.js.map +1 -1
- package/dist/src/cli/commands/switch-project.d.ts.map +1 -1
- package/dist/src/cli/commands/switch-project.js +9 -26
- package/dist/src/cli/commands/switch-project.js.map +1 -1
- package/dist/src/cli/commands/sync-spec-content.js +3 -0
- package/dist/src/cli/commands/sync-spec-content.js.map +1 -1
- package/dist/src/core/deduplication/command-deduplicator.d.ts +166 -0
- package/dist/src/core/deduplication/command-deduplicator.d.ts.map +1 -0
- package/dist/src/core/deduplication/command-deduplicator.js +254 -0
- package/dist/src/core/deduplication/command-deduplicator.js.map +1 -0
- package/dist/src/core/increment/active-increment-manager.d.ts +42 -15
- package/dist/src/core/increment/active-increment-manager.d.ts.map +1 -1
- package/dist/src/core/increment/active-increment-manager.js +113 -46
- package/dist/src/core/increment/active-increment-manager.js.map +1 -1
- package/dist/src/core/increment/conflict-resolver.d.ts +40 -0
- package/dist/src/core/increment/conflict-resolver.d.ts.map +1 -0
- package/dist/src/core/increment/conflict-resolver.js +219 -0
- package/dist/src/core/increment/conflict-resolver.js.map +1 -0
- package/dist/src/core/increment/discipline-checker.d.ts.map +1 -1
- package/dist/src/core/increment/discipline-checker.js +7 -1
- package/dist/src/core/increment/discipline-checker.js.map +1 -1
- package/dist/src/core/increment/duplicate-detector.d.ts +52 -0
- package/dist/src/core/increment/duplicate-detector.d.ts.map +1 -0
- package/dist/src/core/increment/duplicate-detector.js +276 -0
- package/dist/src/core/increment/duplicate-detector.js.map +1 -0
- package/dist/src/core/increment/increment-archiver.d.ts +90 -0
- package/dist/src/core/increment/increment-archiver.d.ts.map +1 -0
- package/dist/src/core/increment/increment-archiver.js +368 -0
- package/dist/src/core/increment/increment-archiver.js.map +1 -0
- package/dist/src/core/increment/increment-reopener.d.ts +165 -0
- package/dist/src/core/increment/increment-reopener.d.ts.map +1 -0
- package/dist/src/core/increment/increment-reopener.js +390 -0
- package/dist/src/core/increment/increment-reopener.js.map +1 -0
- package/dist/src/core/increment/metadata-manager.d.ts +26 -1
- package/dist/src/core/increment/metadata-manager.d.ts.map +1 -1
- package/dist/src/core/increment/metadata-manager.js +143 -5
- package/dist/src/core/increment/metadata-manager.js.map +1 -1
- package/dist/src/core/increment/recent-work-scanner.d.ts +121 -0
- package/dist/src/core/increment/recent-work-scanner.d.ts.map +1 -0
- package/dist/src/core/increment/recent-work-scanner.js +303 -0
- package/dist/src/core/increment/recent-work-scanner.js.map +1 -0
- package/dist/src/core/increment/types.d.ts +1 -0
- package/dist/src/core/increment/types.d.ts.map +1 -1
- package/dist/src/core/increment-utils.d.ts +112 -0
- package/dist/src/core/increment-utils.d.ts.map +1 -0
- package/dist/src/core/increment-utils.js +210 -0
- package/dist/src/core/increment-utils.js.map +1 -0
- package/dist/src/core/living-docs/ac-project-specific-generator.d.ts +65 -0
- package/dist/src/core/living-docs/ac-project-specific-generator.d.ts.map +1 -0
- package/dist/src/core/living-docs/ac-project-specific-generator.js +175 -0
- package/dist/src/core/living-docs/ac-project-specific-generator.js.map +1 -0
- package/dist/src/core/living-docs/feature-archiver.d.ts +130 -0
- package/dist/src/core/living-docs/feature-archiver.d.ts.map +1 -0
- package/dist/src/core/living-docs/feature-archiver.js +549 -0
- package/dist/src/core/living-docs/feature-archiver.js.map +1 -0
- package/dist/src/core/living-docs/feature-id-manager.d.ts +81 -0
- package/dist/src/core/living-docs/feature-id-manager.d.ts.map +1 -0
- package/dist/src/core/living-docs/feature-id-manager.js +339 -0
- package/dist/src/core/living-docs/feature-id-manager.js.map +1 -0
- package/dist/src/core/living-docs/hierarchy-mapper.d.ts +144 -83
- package/dist/src/core/living-docs/hierarchy-mapper.d.ts.map +1 -1
- package/dist/src/core/living-docs/hierarchy-mapper.js +488 -270
- package/dist/src/core/living-docs/hierarchy-mapper.js.map +1 -1
- package/dist/src/core/living-docs/index.d.ts +6 -0
- package/dist/src/core/living-docs/index.d.ts.map +1 -1
- package/dist/src/core/living-docs/index.js +6 -0
- package/dist/src/core/living-docs/index.js.map +1 -1
- package/dist/src/core/living-docs/project-detector.d.ts +6 -0
- package/dist/src/core/living-docs/project-detector.d.ts.map +1 -1
- package/dist/src/core/living-docs/project-detector.js +35 -1
- package/dist/src/core/living-docs/project-detector.js.map +1 -1
- package/dist/src/core/living-docs/spec-distributor.d.ts +100 -26
- package/dist/src/core/living-docs/spec-distributor.d.ts.map +1 -1
- package/dist/src/core/living-docs/spec-distributor.js +1275 -258
- package/dist/src/core/living-docs/spec-distributor.js.map +1 -1
- package/dist/src/core/living-docs/task-project-specific-generator.d.ts +109 -0
- package/dist/src/core/living-docs/task-project-specific-generator.d.ts.map +1 -0
- package/dist/src/core/living-docs/task-project-specific-generator.js +221 -0
- package/dist/src/core/living-docs/task-project-specific-generator.js.map +1 -0
- package/dist/src/core/living-docs/types.d.ts +143 -0
- package/dist/src/core/living-docs/types.d.ts.map +1 -1
- package/dist/src/core/project-manager.d.ts +2 -17
- package/dist/src/core/project-manager.d.ts.map +1 -1
- package/dist/src/core/project-manager.js +68 -48
- package/dist/src/core/project-manager.js.map +1 -1
- package/dist/src/core/spec-content-sync.d.ts +1 -1
- package/dist/src/core/spec-content-sync.d.ts.map +1 -1
- package/dist/src/core/sync/enhanced-content-builder.d.ts.map +1 -1
- package/dist/src/core/sync/enhanced-content-builder.js +2 -1
- package/dist/src/core/sync/enhanced-content-builder.js.map +1 -1
- package/dist/src/core/sync/performance-optimizer.d.ts +153 -0
- package/dist/src/core/sync/performance-optimizer.d.ts.map +1 -0
- package/dist/src/core/sync/performance-optimizer.js +220 -0
- package/dist/src/core/sync/performance-optimizer.js.map +1 -0
- package/dist/src/core/sync/retry-handler.d.ts +98 -0
- package/dist/src/core/sync/retry-handler.d.ts.map +1 -0
- package/dist/src/core/sync/retry-handler.js +196 -0
- package/dist/src/core/sync/retry-handler.js.map +1 -0
- package/dist/src/core/types/config.d.ts +94 -0
- package/dist/src/core/types/config.d.ts.map +1 -1
- package/dist/src/core/types/config.js +16 -0
- package/dist/src/core/types/config.js.map +1 -1
- package/dist/src/core/types/increment-metadata.d.ts +6 -0
- package/dist/src/core/types/increment-metadata.d.ts.map +1 -1
- package/dist/src/core/types/increment-metadata.js +10 -1
- package/dist/src/core/types/increment-metadata.js.map +1 -1
- package/dist/src/integrations/jira/jira-incremental-mapper.d.ts.map +1 -1
- package/dist/src/integrations/jira/jira-incremental-mapper.js +4 -8
- package/dist/src/integrations/jira/jira-incremental-mapper.js.map +1 -1
- package/dist/src/integrations/jira/jira-mapper.d.ts.map +1 -1
- package/dist/src/integrations/jira/jira-mapper.js +4 -8
- package/dist/src/integrations/jira/jira-mapper.js.map +1 -1
- package/package.json +1 -1
- package/plugins/specweave/COMMANDS.md +13 -4
- package/plugins/specweave/commands/specweave-abandon.md +22 -20
- package/plugins/specweave/commands/specweave-archive-features.md +121 -0
- package/plugins/specweave/commands/specweave-archive-increments.md +82 -0
- package/plugins/specweave/commands/specweave-archive.md +363 -0
- package/plugins/specweave/commands/specweave-backlog.md +211 -0
- package/plugins/specweave/commands/specweave-fix-duplicates.md +517 -0
- package/plugins/specweave/commands/specweave-increment.md +4 -3
- package/plugins/specweave/commands/specweave-progress.md +176 -27
- package/plugins/specweave/commands/specweave-reopen.md +391 -0
- package/plugins/specweave/commands/specweave-restore-feature.md +90 -0
- package/plugins/specweave/commands/specweave-restore.md +309 -0
- package/plugins/specweave/commands/specweave-resume.md +51 -23
- package/plugins/specweave/commands/specweave-status.md +41 -7
- package/plugins/specweave/commands/specweave-sync-specs.md +425 -0
- package/plugins/specweave/hooks/hooks.json +4 -0
- package/plugins/specweave/hooks/lib/sync-spec-content.sh +2 -2
- package/plugins/specweave/hooks/post-task-completion.sh +39 -0
- package/plugins/specweave/hooks/pre-command-deduplication.sh +83 -0
- package/plugins/specweave/hooks/user-prompt-submit.sh +1 -1
- package/plugins/specweave/lib/hooks/sync-living-docs.js +2 -0
- package/plugins/specweave/lib/hooks/sync-living-docs.ts +4 -0
- package/plugins/specweave/lib/hooks/update-ac-status.js +102 -0
- package/plugins/specweave/lib/hooks/update-ac-status.ts +192 -0
- package/plugins/specweave/skills/archive-increments/SKILL.md +198 -0
- package/plugins/specweave/skills/increment-planner/scripts/feature-utils.js +14 -0
- package/plugins/specweave/skills/smart-reopen-detector/SKILL.md +244 -0
- package/plugins/specweave-ado/lib/ado-spec-content-sync.js +49 -5
- package/plugins/specweave-ado/lib/ado-spec-content-sync.ts +72 -6
- package/plugins/specweave-confluent/.claude-plugin/plugin.json +23 -0
- package/plugins/specweave-confluent/README.md +375 -0
- package/plugins/specweave-confluent/agents/confluent-architect/AGENT.md +306 -0
- package/plugins/specweave-confluent/skills/confluent-kafka-connect/SKILL.md +453 -0
- package/plugins/specweave-confluent/skills/confluent-ksqldb/SKILL.md +470 -0
- package/plugins/specweave-confluent/skills/confluent-schema-registry/SKILL.md +316 -0
- package/plugins/specweave-github/agents/github-task-splitter/AGENT.md +2 -2
- package/plugins/specweave-github/agents/user-story-updater/AGENT.md +148 -0
- package/plugins/specweave-github/commands/specweave-github-cleanup-duplicates.md +1 -1
- package/plugins/specweave-github/commands/specweave-github-update-user-story.md +156 -0
- package/plugins/specweave-github/hooks/post-task-completion.sh +10 -9
- package/plugins/specweave-github/lib/completion-calculator.js +262 -0
- package/plugins/specweave-github/lib/completion-calculator.ts +434 -0
- package/plugins/specweave-github/lib/duplicate-detector.js +3 -3
- package/plugins/specweave-github/lib/duplicate-detector.ts +4 -4
- package/plugins/specweave-github/lib/epic-content-builder.js +38 -0
- package/plugins/specweave-github/lib/epic-content-builder.ts +59 -0
- package/plugins/specweave-github/lib/github-client-v2.js +49 -0
- package/plugins/specweave-github/lib/github-client-v2.ts +59 -0
- package/plugins/specweave-github/lib/github-epic-sync.ts +1 -1
- package/plugins/specweave-github/lib/github-feature-sync.js +381 -0
- package/plugins/specweave-github/lib/github-feature-sync.ts +568 -0
- package/plugins/specweave-github/lib/github-spec-content-sync.js +40 -10
- package/plugins/specweave-github/lib/github-spec-content-sync.ts +82 -14
- package/plugins/specweave-github/lib/progress-comment-builder.js +229 -0
- package/plugins/specweave-github/lib/progress-comment-builder.ts +324 -0
- package/plugins/specweave-github/lib/user-story-content-builder.js +299 -0
- package/plugins/specweave-github/lib/user-story-content-builder.ts +413 -0
- package/plugins/specweave-github/lib/user-story-issue-builder.js +344 -0
- package/plugins/specweave-github/lib/user-story-issue-builder.ts +543 -0
- package/plugins/specweave-github/skills/github-issue-standard/SKILL.md +189 -0
- package/plugins/specweave-jira/lib/enhanced-jira-sync.js +134 -0
- package/plugins/specweave-jira/lib/{enhanced-jira-sync.ts.disabled → enhanced-jira-sync.ts} +26 -52
- package/plugins/specweave-kafka/.claude-plugin/plugin.json +26 -0
- package/plugins/specweave-kafka/IMPLEMENTATION-COMPLETE.md +483 -0
- package/plugins/specweave-kafka/README.md +242 -0
- package/plugins/specweave-kafka/agents/kafka-architect/AGENT.md +235 -0
- package/plugins/specweave-kafka/agents/kafka-devops/AGENT.md +209 -0
- package/plugins/specweave-kafka/agents/kafka-observability/AGENT.md +266 -0
- package/plugins/specweave-kafka/commands/deploy.md +99 -0
- package/plugins/specweave-kafka/commands/dev-env.md +176 -0
- package/plugins/specweave-kafka/commands/mcp-configure.md +101 -0
- package/plugins/specweave-kafka/commands/monitor-setup.md +96 -0
- package/plugins/specweave-kafka/docker/kafka-local/docker-compose.yml +187 -0
- package/plugins/specweave-kafka/docker/redpanda/docker-compose.yml +199 -0
- package/plugins/specweave-kafka/docker/templates/consumer-nodejs.js +225 -0
- package/plugins/specweave-kafka/docker/templates/consumer-python.py +220 -0
- package/plugins/specweave-kafka/docker/templates/producer-nodejs.js +168 -0
- package/plugins/specweave-kafka/docker/templates/producer-python.py +167 -0
- package/plugins/specweave-kafka/lib/adapters/apache-kafka-adapter.js +438 -0
- package/plugins/specweave-kafka/lib/adapters/apache-kafka-adapter.ts +541 -0
- package/plugins/specweave-kafka/lib/adapters/platform-adapter.js +47 -0
- package/plugins/specweave-kafka/lib/adapters/platform-adapter.ts +343 -0
- package/plugins/specweave-kafka/lib/cli/kcat-wrapper.js +258 -0
- package/plugins/specweave-kafka/lib/cli/kcat-wrapper.ts +298 -0
- package/plugins/specweave-kafka/lib/cli/types.js +10 -0
- package/plugins/specweave-kafka/lib/cli/types.ts +92 -0
- package/plugins/specweave-kafka/lib/connectors/connector-catalog.js +305 -0
- package/plugins/specweave-kafka/lib/connectors/connector-catalog.ts +528 -0
- package/plugins/specweave-kafka/lib/documentation/diagram-generator.js +114 -0
- package/plugins/specweave-kafka/lib/documentation/diagram-generator.ts +195 -0
- package/plugins/specweave-kafka/lib/documentation/exporter.js +210 -0
- package/plugins/specweave-kafka/lib/documentation/exporter.ts +338 -0
- package/plugins/specweave-kafka/lib/documentation/schema-catalog-generator.js +60 -0
- package/plugins/specweave-kafka/lib/documentation/schema-catalog-generator.ts +130 -0
- package/plugins/specweave-kafka/lib/documentation/topology-generator.js +143 -0
- package/plugins/specweave-kafka/lib/documentation/topology-generator.ts +290 -0
- package/plugins/specweave-kafka/lib/mcp/detector.js +298 -0
- package/plugins/specweave-kafka/lib/mcp/detector.ts +352 -0
- package/plugins/specweave-kafka/lib/mcp/types.js +21 -0
- package/plugins/specweave-kafka/lib/mcp/types.ts +77 -0
- package/plugins/specweave-kafka/lib/multi-cluster/cluster-config-manager.js +193 -0
- package/plugins/specweave-kafka/lib/multi-cluster/cluster-config-manager.ts +362 -0
- package/plugins/specweave-kafka/lib/multi-cluster/cluster-switcher.js +188 -0
- package/plugins/specweave-kafka/lib/multi-cluster/cluster-switcher.ts +359 -0
- package/plugins/specweave-kafka/lib/multi-cluster/health-aggregator.js +195 -0
- package/plugins/specweave-kafka/lib/multi-cluster/health-aggregator.ts +380 -0
- package/plugins/specweave-kafka/lib/observability/opentelemetry-kafka.js +209 -0
- package/plugins/specweave-kafka/lib/observability/opentelemetry-kafka.ts +358 -0
- package/plugins/specweave-kafka/lib/patterns/advanced-ksqldb-patterns.js +354 -0
- package/plugins/specweave-kafka/lib/patterns/advanced-ksqldb-patterns.ts +563 -0
- package/plugins/specweave-kafka/lib/patterns/circuit-breaker-resilience.js +259 -0
- package/plugins/specweave-kafka/lib/patterns/circuit-breaker-resilience.ts +516 -0
- package/plugins/specweave-kafka/lib/patterns/dead-letter-queue.js +233 -0
- package/plugins/specweave-kafka/lib/patterns/dead-letter-queue.ts +423 -0
- package/plugins/specweave-kafka/lib/patterns/exactly-once-semantics.js +266 -0
- package/plugins/specweave-kafka/lib/patterns/exactly-once-semantics.ts +445 -0
- package/plugins/specweave-kafka/lib/patterns/flink-kafka-integration.js +312 -0
- package/plugins/specweave-kafka/lib/patterns/flink-kafka-integration.ts +561 -0
- package/plugins/specweave-kafka/lib/patterns/multi-dc-replication.js +289 -0
- package/plugins/specweave-kafka/lib/patterns/multi-dc-replication.ts +607 -0
- package/plugins/specweave-kafka/lib/patterns/rate-limiting-backpressure.js +264 -0
- package/plugins/specweave-kafka/lib/patterns/rate-limiting-backpressure.ts +498 -0
- package/plugins/specweave-kafka/lib/patterns/stream-processing-optimization.js +263 -0
- package/plugins/specweave-kafka/lib/patterns/stream-processing-optimization.ts +549 -0
- package/plugins/specweave-kafka/lib/patterns/tiered-storage-compaction.js +205 -0
- package/plugins/specweave-kafka/lib/patterns/tiered-storage-compaction.ts +399 -0
- package/plugins/specweave-kafka/lib/performance/performance-optimizer.js +249 -0
- package/plugins/specweave-kafka/lib/performance/performance-optimizer.ts +427 -0
- package/plugins/specweave-kafka/lib/security/kafka-security.js +252 -0
- package/plugins/specweave-kafka/lib/security/kafka-security.ts +494 -0
- package/plugins/specweave-kafka/lib/utils/capacity-planner.js +203 -0
- package/plugins/specweave-kafka/lib/utils/capacity-planner.ts +469 -0
- package/plugins/specweave-kafka/lib/utils/config-validator.js +419 -0
- package/plugins/specweave-kafka/lib/utils/config-validator.ts +564 -0
- package/plugins/specweave-kafka/lib/utils/partitioning.js +329 -0
- package/plugins/specweave-kafka/lib/utils/partitioning.ts +473 -0
- package/plugins/specweave-kafka/lib/utils/sizing.js +221 -0
- package/plugins/specweave-kafka/lib/utils/sizing.ts +374 -0
- package/plugins/specweave-kafka/monitoring/grafana/dashboards/kafka-broker-metrics.json +628 -0
- package/plugins/specweave-kafka/monitoring/grafana/dashboards/kafka-cluster-overview.json +564 -0
- package/plugins/specweave-kafka/monitoring/grafana/dashboards/kafka-consumer-lag.json +509 -0
- package/plugins/specweave-kafka/monitoring/grafana/dashboards/kafka-jvm-metrics.json +674 -0
- package/plugins/specweave-kafka/monitoring/grafana/dashboards/kafka-topic-metrics.json +578 -0
- package/plugins/specweave-kafka/monitoring/grafana/provisioning/dashboards/kafka.yml +17 -0
- package/plugins/specweave-kafka/monitoring/grafana/provisioning/datasources/prometheus.yml +17 -0
- package/plugins/specweave-kafka/monitoring/prometheus/kafka-alerts.yml +415 -0
- package/plugins/specweave-kafka/monitoring/prometheus/kafka-jmx-exporter.yml +256 -0
- package/plugins/specweave-kafka/package.json +41 -0
- package/plugins/specweave-kafka/skills/kafka-architecture/SKILL.md +647 -0
- package/plugins/specweave-kafka/skills/kafka-cli-tools/SKILL.md +433 -0
- package/plugins/specweave-kafka/skills/kafka-iac-deployment/SKILL.md +449 -0
- package/plugins/specweave-kafka/skills/kafka-kubernetes/SKILL.md +667 -0
- package/plugins/specweave-kafka/skills/kafka-mcp-integration/SKILL.md +273 -0
- package/plugins/specweave-kafka/skills/kafka-observability/SKILL.md +576 -0
- package/plugins/specweave-kafka/templates/config/broker-production.properties +254 -0
- package/plugins/specweave-kafka/templates/config/consumer-low-latency.properties +112 -0
- package/plugins/specweave-kafka/templates/config/producer-high-throughput.properties +120 -0
- package/plugins/specweave-kafka/templates/migration/mirrormaker2-config.properties +234 -0
- package/plugins/specweave-kafka/templates/monitoring/grafana/multi-cluster-dashboard.json +686 -0
- package/plugins/specweave-kafka/terraform/apache-kafka/main.tf +347 -0
- package/plugins/specweave-kafka/terraform/apache-kafka/outputs.tf +107 -0
- package/plugins/specweave-kafka/terraform/apache-kafka/templates/kafka-broker-init.sh.tpl +216 -0
- package/plugins/specweave-kafka/terraform/apache-kafka/variables.tf +156 -0
- package/plugins/specweave-kafka/terraform/aws-msk/main.tf +362 -0
- package/plugins/specweave-kafka/terraform/aws-msk/outputs.tf +93 -0
- package/plugins/specweave-kafka/terraform/aws-msk/templates/server.properties.tpl +32 -0
- package/plugins/specweave-kafka/terraform/aws-msk/variables.tf +235 -0
- package/plugins/specweave-kafka/terraform/azure-event-hubs/main.tf +281 -0
- package/plugins/specweave-kafka/terraform/azure-event-hubs/outputs.tf +118 -0
- package/plugins/specweave-kafka/terraform/azure-event-hubs/variables.tf +148 -0
- package/plugins/specweave-kafka/tsconfig.json +21 -0
- package/plugins/specweave-kafka-streams/.claude-plugin/plugin.json +23 -0
- package/plugins/specweave-kafka-streams/README.md +310 -0
- package/plugins/specweave-kafka-streams/skills/kafka-streams-topology/SKILL.md +539 -0
- package/plugins/specweave-n8n/.claude-plugin/plugin.json +22 -0
- package/plugins/specweave-n8n/README.md +354 -0
- package/plugins/specweave-n8n/skills/n8n-kafka-workflows/SKILL.md +504 -0
- package/plugins/specweave-release/commands/specweave-release-platform.md +1 -1
- package/plugins/specweave-release/hooks/post-task-completion.sh +2 -2
- package/src/templates/AGENTS.md.template +601 -7
- package/src/templates/CLAUDE.md.template +188 -88
- package/plugins/specweave-ado/commands/specweave-ado-sync-spec.md +0 -255
- package/plugins/specweave-github/commands/specweave-github-sync-epic.md +0 -248
- package/plugins/specweave-github/commands/specweave-github-sync-from.md +0 -147
- package/plugins/specweave-github/commands/specweave-github-sync-spec.md +0 -208
- package/plugins/specweave-github/commands/specweave-github-sync-tasks.md +0 -530
- package/plugins/specweave-jira/commands/specweave-jira-sync-epic.md +0 -267
- package/plugins/specweave-jira/commands/specweave-jira-sync-spec.md +0 -240
|
@@ -0,0 +1,528 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Kafka Connect Connector Catalog
|
|
3
|
+
*
|
|
4
|
+
* Comprehensive collection of source and sink connectors with configuration templates
|
|
5
|
+
*
|
|
6
|
+
* @module connector-catalog
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Connector Categories
|
|
11
|
+
*/
|
|
12
|
+
export enum ConnectorCategory {
|
|
13
|
+
DATABASE = 'database',
|
|
14
|
+
CLOUD_STORAGE = 'cloud-storage',
|
|
15
|
+
MESSAGE_QUEUE = 'message-queue',
|
|
16
|
+
DATA_WAREHOUSE = 'data-warehouse',
|
|
17
|
+
SEARCH = 'search',
|
|
18
|
+
MONITORING = 'monitoring',
|
|
19
|
+
FILE_SYSTEM = 'file-system',
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Connector Configuration
|
|
24
|
+
*/
|
|
25
|
+
export interface ConnectorConfig {
|
|
26
|
+
/** Connector name */
|
|
27
|
+
name: string;
|
|
28
|
+
/** Connector class */
|
|
29
|
+
'connector.class': string;
|
|
30
|
+
/** Max tasks */
|
|
31
|
+
'tasks.max': string;
|
|
32
|
+
/** Additional properties */
|
|
33
|
+
[key: string]: any;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Kafka Connect Connector Catalog
|
|
38
|
+
*
|
|
39
|
+
* Pre-configured connectors for common integrations
|
|
40
|
+
*/
|
|
41
|
+
export class ConnectorCatalog {
|
|
42
|
+
/**
|
|
43
|
+
* JDBC Source Connector (Database → Kafka)
|
|
44
|
+
*
|
|
45
|
+
* Use case: Stream database tables to Kafka (CDC alternative)
|
|
46
|
+
*/
|
|
47
|
+
static jdbcSource(options: {
|
|
48
|
+
connectionUrl: string;
|
|
49
|
+
user: string;
|
|
50
|
+
password: string;
|
|
51
|
+
tableName: string;
|
|
52
|
+
mode: 'bulk' | 'incrementing' | 'timestamp' | 'timestamp+incrementing';
|
|
53
|
+
incrementingColumn?: string;
|
|
54
|
+
timestampColumn?: string;
|
|
55
|
+
topicPrefix: string;
|
|
56
|
+
}): ConnectorConfig {
|
|
57
|
+
const config: ConnectorConfig = {
|
|
58
|
+
name: `jdbc-source-${options.tableName}`,
|
|
59
|
+
'connector.class': 'io.confluent.connect.jdbc.JdbcSourceConnector',
|
|
60
|
+
'tasks.max': '1',
|
|
61
|
+
'connection.url': options.connectionUrl,
|
|
62
|
+
'connection.user': options.user,
|
|
63
|
+
'connection.password': options.password,
|
|
64
|
+
'table.whitelist': options.tableName,
|
|
65
|
+
'mode': options.mode,
|
|
66
|
+
'topic.prefix': options.topicPrefix,
|
|
67
|
+
'poll.interval.ms': '5000',
|
|
68
|
+
};
|
|
69
|
+
|
|
70
|
+
if (options.mode === 'incrementing' && options.incrementingColumn) {
|
|
71
|
+
config['incrementing.column.name'] = options.incrementingColumn;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
if (
|
|
75
|
+
(options.mode === 'timestamp' || options.mode === 'timestamp+incrementing') &&
|
|
76
|
+
options.timestampColumn
|
|
77
|
+
) {
|
|
78
|
+
config['timestamp.column.name'] = options.timestampColumn;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
return config;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* JDBC Sink Connector (Kafka → Database)
|
|
86
|
+
*
|
|
87
|
+
* Use case: Write Kafka events to database tables
|
|
88
|
+
*/
|
|
89
|
+
static jdbcSink(options: {
|
|
90
|
+
connectionUrl: string;
|
|
91
|
+
user: string;
|
|
92
|
+
password: string;
|
|
93
|
+
topics: string[];
|
|
94
|
+
autoCreate?: boolean;
|
|
95
|
+
autoEvolve?: boolean;
|
|
96
|
+
insertMode?: 'insert' | 'upsert' | 'update';
|
|
97
|
+
pkMode?: 'none' | 'kafka' | 'record_key' | 'record_value';
|
|
98
|
+
pkFields?: string[];
|
|
99
|
+
}): ConnectorConfig {
|
|
100
|
+
return {
|
|
101
|
+
name: `jdbc-sink-${options.topics.join('-')}`,
|
|
102
|
+
'connector.class': 'io.confluent.connect.jdbc.JdbcSinkConnector',
|
|
103
|
+
'tasks.max': '1',
|
|
104
|
+
'connection.url': options.connectionUrl,
|
|
105
|
+
'connection.user': options.user,
|
|
106
|
+
'connection.password': options.password,
|
|
107
|
+
'topics': options.topics.join(','),
|
|
108
|
+
'auto.create': options.autoCreate !== false ? 'true' : 'false',
|
|
109
|
+
'auto.evolve': options.autoEvolve !== false ? 'true' : 'false',
|
|
110
|
+
'insert.mode': options.insertMode || 'insert',
|
|
111
|
+
'pk.mode': options.pkMode || 'none',
|
|
112
|
+
'pk.fields': options.pkFields?.join(',') || '',
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
/**
|
|
117
|
+
* Debezium MySQL Source Connector (CDC)
|
|
118
|
+
*
|
|
119
|
+
* Use case: Capture all database changes in real-time
|
|
120
|
+
*/
|
|
121
|
+
static debeziumMySQL(options: {
|
|
122
|
+
hostname: string;
|
|
123
|
+
port: number;
|
|
124
|
+
user: string;
|
|
125
|
+
password: string;
|
|
126
|
+
databaseName: string;
|
|
127
|
+
serverId: number;
|
|
128
|
+
serverName: string;
|
|
129
|
+
tableIncludeList?: string;
|
|
130
|
+
}): ConnectorConfig {
|
|
131
|
+
return {
|
|
132
|
+
name: `debezium-mysql-${options.databaseName}`,
|
|
133
|
+
'connector.class': 'io.debezium.connector.mysql.MySqlConnector',
|
|
134
|
+
'tasks.max': '1',
|
|
135
|
+
'database.hostname': options.hostname,
|
|
136
|
+
'database.port': options.port.toString(),
|
|
137
|
+
'database.user': options.user,
|
|
138
|
+
'database.password': options.password,
|
|
139
|
+
'database.server.id': options.serverId.toString(),
|
|
140
|
+
'database.server.name': options.serverName,
|
|
141
|
+
'database.include.list': options.databaseName,
|
|
142
|
+
'table.include.list': options.tableIncludeList || `${options.databaseName}.*`,
|
|
143
|
+
'database.history.kafka.bootstrap.servers': 'localhost:9092',
|
|
144
|
+
'database.history.kafka.topic': `dbhistory.${options.databaseName}`,
|
|
145
|
+
};
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
/**
|
|
149
|
+
* Debezium PostgreSQL Source Connector (CDC)
|
|
150
|
+
*/
|
|
151
|
+
static debeziumPostgreSQL(options: {
|
|
152
|
+
hostname: string;
|
|
153
|
+
port: number;
|
|
154
|
+
user: string;
|
|
155
|
+
password: string;
|
|
156
|
+
databaseName: string;
|
|
157
|
+
serverName: string;
|
|
158
|
+
slotName: string;
|
|
159
|
+
publicationName: string;
|
|
160
|
+
}): ConnectorConfig {
|
|
161
|
+
return {
|
|
162
|
+
name: `debezium-postgres-${options.databaseName}`,
|
|
163
|
+
'connector.class': 'io.debezium.connector.postgresql.PostgresConnector',
|
|
164
|
+
'tasks.max': '1',
|
|
165
|
+
'database.hostname': options.hostname,
|
|
166
|
+
'database.port': options.port.toString(),
|
|
167
|
+
'database.user': options.user,
|
|
168
|
+
'database.password': options.password,
|
|
169
|
+
'database.dbname': options.databaseName,
|
|
170
|
+
'database.server.name': options.serverName,
|
|
171
|
+
'slot.name': options.slotName,
|
|
172
|
+
'publication.name': options.publicationName,
|
|
173
|
+
'plugin.name': 'pgoutput',
|
|
174
|
+
};
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
/**
|
|
178
|
+
* S3 Sink Connector (Kafka → AWS S3)
|
|
179
|
+
*
|
|
180
|
+
* Use case: Archive Kafka data to S3 for analytics
|
|
181
|
+
*/
|
|
182
|
+
static s3Sink(options: {
|
|
183
|
+
topics: string[];
|
|
184
|
+
s3BucketName: string;
|
|
185
|
+
s3Region: string;
|
|
186
|
+
format: 'json' | 'avro' | 'parquet';
|
|
187
|
+
flushSize?: number;
|
|
188
|
+
rotateIntervalMs?: number;
|
|
189
|
+
partitionerClass?: string;
|
|
190
|
+
}): ConnectorConfig {
|
|
191
|
+
const formatClass =
|
|
192
|
+
options.format === 'avro'
|
|
193
|
+
? 'io.confluent.connect.s3.format.avro.AvroFormat'
|
|
194
|
+
: options.format === 'parquet'
|
|
195
|
+
? 'io.confluent.connect.s3.format.parquet.ParquetFormat'
|
|
196
|
+
: 'io.confluent.connect.s3.format.json.JsonFormat';
|
|
197
|
+
|
|
198
|
+
return {
|
|
199
|
+
name: `s3-sink-${options.topics.join('-')}`,
|
|
200
|
+
'connector.class': 'io.confluent.connect.s3.S3SinkConnector',
|
|
201
|
+
'tasks.max': '1',
|
|
202
|
+
'topics': options.topics.join(','),
|
|
203
|
+
's3.bucket.name': options.s3BucketName,
|
|
204
|
+
's3.region': options.s3Region,
|
|
205
|
+
'format.class': formatClass,
|
|
206
|
+
'flush.size': (options.flushSize || 1000).toString(),
|
|
207
|
+
'rotate.interval.ms': (options.rotateIntervalMs || 3600000).toString(),
|
|
208
|
+
'partitioner.class': options.partitionerClass || 'io.confluent.connect.storage.partitioner.TimeBasedPartitioner',
|
|
209
|
+
'path.format': "'year'=YYYY/'month'=MM/'day'=dd/'hour'=HH",
|
|
210
|
+
'locale': 'en-US',
|
|
211
|
+
'timezone': 'UTC',
|
|
212
|
+
'timestamp.extractor': 'Record',
|
|
213
|
+
};
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
/**
|
|
217
|
+
* Elasticsearch Sink Connector (Kafka → Elasticsearch)
|
|
218
|
+
*
|
|
219
|
+
* Use case: Index Kafka data for full-text search
|
|
220
|
+
*/
|
|
221
|
+
static elasticsearchSink(options: {
|
|
222
|
+
topics: string[];
|
|
223
|
+
connectionUrl: string;
|
|
224
|
+
indexName?: string;
|
|
225
|
+
typeName?: string;
|
|
226
|
+
batchSize?: number;
|
|
227
|
+
}): ConnectorConfig {
|
|
228
|
+
return {
|
|
229
|
+
name: `elasticsearch-sink-${options.topics.join('-')}`,
|
|
230
|
+
'connector.class': 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector',
|
|
231
|
+
'tasks.max': '1',
|
|
232
|
+
'topics': options.topics.join(','),
|
|
233
|
+
'connection.url': options.connectionUrl,
|
|
234
|
+
'type.name': options.typeName || '_doc',
|
|
235
|
+
'key.ignore': 'true',
|
|
236
|
+
'schema.ignore': 'false',
|
|
237
|
+
'batch.size': (options.batchSize || 2000).toString(),
|
|
238
|
+
'max.buffered.records': '20000',
|
|
239
|
+
'linger.ms': '1000',
|
|
240
|
+
'flush.timeout.ms': '10000',
|
|
241
|
+
'max.in.flight.requests': '5',
|
|
242
|
+
'retry.backoff.ms': '100',
|
|
243
|
+
'max.retries': '10',
|
|
244
|
+
};
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
/**
|
|
248
|
+
* MongoDB Sink Connector (Kafka → MongoDB)
|
|
249
|
+
*
|
|
250
|
+
* Use case: Write Kafka events to MongoDB collections
|
|
251
|
+
*/
|
|
252
|
+
static mongodbSink(options: {
|
|
253
|
+
topics: string[];
|
|
254
|
+
connectionUri: string;
|
|
255
|
+
databaseName: string;
|
|
256
|
+
collectionName?: string;
|
|
257
|
+
}): ConnectorConfig {
|
|
258
|
+
return {
|
|
259
|
+
name: `mongodb-sink-${options.topics.join('-')}`,
|
|
260
|
+
'connector.class': 'com.mongodb.kafka.connect.MongoSinkConnector',
|
|
261
|
+
'tasks.max': '1',
|
|
262
|
+
'topics': options.topics.join(','),
|
|
263
|
+
'connection.uri': options.connectionUri,
|
|
264
|
+
'database': options.databaseName,
|
|
265
|
+
'collection': options.collectionName || 'kafka_data',
|
|
266
|
+
'max.num.retries': '3',
|
|
267
|
+
'retries.defer.timeout': '5000',
|
|
268
|
+
};
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
/**
|
|
272
|
+
* HTTP Sink Connector (Kafka → REST API)
|
|
273
|
+
*
|
|
274
|
+
* Use case: Send Kafka events to external APIs
|
|
275
|
+
*/
|
|
276
|
+
static httpSink(options: {
|
|
277
|
+
topics: string[];
|
|
278
|
+
httpApiUrl: string;
|
|
279
|
+
httpMethod?: 'POST' | 'PUT' | 'PATCH';
|
|
280
|
+
headers?: Record<string, string>;
|
|
281
|
+
batchSize?: number;
|
|
282
|
+
}): ConnectorConfig {
|
|
283
|
+
const config: ConnectorConfig = {
|
|
284
|
+
name: `http-sink-${options.topics.join('-')}`,
|
|
285
|
+
'connector.class': 'io.confluent.connect.http.HttpSinkConnector',
|
|
286
|
+
'tasks.max': '1',
|
|
287
|
+
'topics': options.topics.join(','),
|
|
288
|
+
'http.api.url': options.httpApiUrl,
|
|
289
|
+
'request.method': options.httpMethod || 'POST',
|
|
290
|
+
'batch.max.size': (options.batchSize || 10).toString(),
|
|
291
|
+
'retry.on.status.codes': '500-599',
|
|
292
|
+
'max.retries': '3',
|
|
293
|
+
'retry.backoff.ms': '1000',
|
|
294
|
+
};
|
|
295
|
+
|
|
296
|
+
if (options.headers) {
|
|
297
|
+
Object.entries(options.headers).forEach(([key, value], index) => {
|
|
298
|
+
config[`headers.${index}.name`] = key;
|
|
299
|
+
config[`headers.${index}.value`] = value;
|
|
300
|
+
});
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
return config;
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
/**
|
|
307
|
+
* HDFS Sink Connector (Kafka → Hadoop HDFS)
|
|
308
|
+
*
|
|
309
|
+
* Use case: Archive Kafka data to Hadoop for batch processing
|
|
310
|
+
*/
|
|
311
|
+
static hdfsSink(options: {
|
|
312
|
+
topics: string[];
|
|
313
|
+
hdfsUrl: string;
|
|
314
|
+
format: 'avro' | 'parquet' | 'json';
|
|
315
|
+
flushSize?: number;
|
|
316
|
+
rotateIntervalMs?: number;
|
|
317
|
+
}): ConnectorConfig {
|
|
318
|
+
const formatClass =
|
|
319
|
+
options.format === 'avro'
|
|
320
|
+
? 'io.confluent.connect.hdfs.avro.AvroFormat'
|
|
321
|
+
: options.format === 'parquet'
|
|
322
|
+
? 'io.confluent.connect.hdfs.parquet.ParquetFormat'
|
|
323
|
+
: 'io.confluent.connect.hdfs.json.JsonFormat';
|
|
324
|
+
|
|
325
|
+
return {
|
|
326
|
+
name: `hdfs-sink-${options.topics.join('-')}`,
|
|
327
|
+
'connector.class': 'io.confluent.connect.hdfs.HdfsSinkConnector',
|
|
328
|
+
'tasks.max': '1',
|
|
329
|
+
'topics': options.topics.join(','),
|
|
330
|
+
'hdfs.url': options.hdfsUrl,
|
|
331
|
+
'format.class': formatClass,
|
|
332
|
+
'flush.size': (options.flushSize || 1000).toString(),
|
|
333
|
+
'rotate.interval.ms': (options.rotateIntervalMs || 3600000).toString(),
|
|
334
|
+
'partitioner.class': 'io.confluent.connect.storage.partitioner.TimeBasedPartitioner',
|
|
335
|
+
'path.format': "'year'=YYYY/'month'=MM/'day'=dd/'hour'=HH",
|
|
336
|
+
'locale': 'en-US',
|
|
337
|
+
'timezone': 'UTC',
|
|
338
|
+
};
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
/**
|
|
342
|
+
* Snowflake Sink Connector (Kafka → Snowflake)
|
|
343
|
+
*
|
|
344
|
+
* Use case: Stream Kafka data to Snowflake data warehouse
|
|
345
|
+
*/
|
|
346
|
+
static snowflakeSink(options: {
|
|
347
|
+
topics: string[];
|
|
348
|
+
snowflakeUrl: string;
|
|
349
|
+
snowflakeUser: string;
|
|
350
|
+
snowflakePrivateKey: string;
|
|
351
|
+
snowflakeDatabase: string;
|
|
352
|
+
snowflakeSchema: string;
|
|
353
|
+
}): ConnectorConfig {
|
|
354
|
+
return {
|
|
355
|
+
name: `snowflake-sink-${options.topics.join('-')}`,
|
|
356
|
+
'connector.class': 'com.snowflake.kafka.connector.SnowflakeSinkConnector',
|
|
357
|
+
'tasks.max': '8',
|
|
358
|
+
'topics': options.topics.join(','),
|
|
359
|
+
'snowflake.url.name': options.snowflakeUrl,
|
|
360
|
+
'snowflake.user.name': options.snowflakeUser,
|
|
361
|
+
'snowflake.private.key': options.snowflakePrivateKey,
|
|
362
|
+
'snowflake.database.name': options.snowflakeDatabase,
|
|
363
|
+
'snowflake.schema.name': options.snowflakeSchema,
|
|
364
|
+
'buffer.count.records': '10000',
|
|
365
|
+
'buffer.flush.time': '60',
|
|
366
|
+
'buffer.size.bytes': '5000000',
|
|
367
|
+
};
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
/**
|
|
371
|
+
* BigQuery Sink Connector (Kafka → Google BigQuery)
|
|
372
|
+
*
|
|
373
|
+
* Use case: Stream Kafka data to BigQuery for analytics
|
|
374
|
+
*/
|
|
375
|
+
static bigQuerySink(options: {
|
|
376
|
+
topics: string[];
|
|
377
|
+
projectId: string;
|
|
378
|
+
datasetName: string;
|
|
379
|
+
autoCreateTables?: boolean;
|
|
380
|
+
}): ConnectorConfig {
|
|
381
|
+
return {
|
|
382
|
+
name: `bigquery-sink-${options.topics.join('-')}`,
|
|
383
|
+
'connector.class': 'com.wepay.kafka.connect.bigquery.BigQuerySinkConnector',
|
|
384
|
+
'tasks.max': '1',
|
|
385
|
+
'topics': options.topics.join(','),
|
|
386
|
+
'project': options.projectId,
|
|
387
|
+
'defaultDataset': options.datasetName,
|
|
388
|
+
'autoCreateTables': options.autoCreateTables !== false ? 'true' : 'false',
|
|
389
|
+
'autoUpdateSchemas': 'true',
|
|
390
|
+
'sanitizeTopics': 'true',
|
|
391
|
+
'allowNewBigQueryFields': 'true',
|
|
392
|
+
'allowBigQueryRequiredFieldRelaxation': 'true',
|
|
393
|
+
};
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
/**
|
|
398
|
+
* Connector Management Utilities
|
|
399
|
+
*/
|
|
400
|
+
export class ConnectorManager {
|
|
401
|
+
/**
|
|
402
|
+
* Deploy connector via REST API
|
|
403
|
+
*/
|
|
404
|
+
static async deployConnector(
|
|
405
|
+
connectUrl: string,
|
|
406
|
+
config: ConnectorConfig
|
|
407
|
+
): Promise<void> {
|
|
408
|
+
const response = await fetch(`${connectUrl}/connectors`, {
|
|
409
|
+
method: 'POST',
|
|
410
|
+
headers: { 'Content-Type': 'application/json' },
|
|
411
|
+
body: JSON.stringify({ name: config.name, config }),
|
|
412
|
+
});
|
|
413
|
+
|
|
414
|
+
if (!response.ok) {
|
|
415
|
+
throw new Error(`Failed to deploy connector: ${await response.text()}`);
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
console.log(`✅ Connector deployed: ${config.name}`);
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
/**
|
|
422
|
+
* List all connectors
|
|
423
|
+
*/
|
|
424
|
+
static async listConnectors(connectUrl: string): Promise<string[]> {
|
|
425
|
+
const response = await fetch(`${connectUrl}/connectors`);
|
|
426
|
+
return response.json();
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
/**
|
|
430
|
+
* Get connector status
|
|
431
|
+
*/
|
|
432
|
+
static async getConnectorStatus(
|
|
433
|
+
connectUrl: string,
|
|
434
|
+
connectorName: string
|
|
435
|
+
): Promise<any> {
|
|
436
|
+
const response = await fetch(`${connectUrl}/connectors/${connectorName}/status`);
|
|
437
|
+
return response.json();
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
/**
|
|
441
|
+
* Delete connector
|
|
442
|
+
*/
|
|
443
|
+
static async deleteConnector(
|
|
444
|
+
connectUrl: string,
|
|
445
|
+
connectorName: string
|
|
446
|
+
): Promise<void> {
|
|
447
|
+
await fetch(`${connectUrl}/connectors/${connectorName}`, {
|
|
448
|
+
method: 'DELETE',
|
|
449
|
+
});
|
|
450
|
+
console.log(`✅ Connector deleted: ${connectorName}`);
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
/**
|
|
455
|
+
* Example Usage: JDBC Source
|
|
456
|
+
*
|
|
457
|
+
* ```typescript
|
|
458
|
+
* const jdbcSource = ConnectorCatalog.jdbcSource({
|
|
459
|
+
* connectionUrl: 'jdbc:postgresql://localhost:5432/mydb',
|
|
460
|
+
* user: 'postgres',
|
|
461
|
+
* password: 'password',
|
|
462
|
+
* tableName: 'users',
|
|
463
|
+
* mode: 'timestamp',
|
|
464
|
+
* timestampColumn: 'updated_at',
|
|
465
|
+
* topicPrefix: 'db-',
|
|
466
|
+
* });
|
|
467
|
+
*
|
|
468
|
+
* await ConnectorManager.deployConnector('http://localhost:8083', jdbcSource);
|
|
469
|
+
* ```
|
|
470
|
+
*/
|
|
471
|
+
|
|
472
|
+
/**
|
|
473
|
+
* Example Usage: Debezium CDC
|
|
474
|
+
*
|
|
475
|
+
* ```typescript
|
|
476
|
+
* const debezium = ConnectorCatalog.debeziumMySQL({
|
|
477
|
+
* hostname: 'mysql.example.com',
|
|
478
|
+
* port: 3306,
|
|
479
|
+
* user: 'debezium',
|
|
480
|
+
* password: 'dbz',
|
|
481
|
+
* databaseName: 'inventory',
|
|
482
|
+
* serverId: 12345,
|
|
483
|
+
* serverName: 'mysql-server',
|
|
484
|
+
* tableIncludeList: 'inventory.orders,inventory.customers',
|
|
485
|
+
* });
|
|
486
|
+
* ```
|
|
487
|
+
*/
|
|
488
|
+
|
|
489
|
+
/**
|
|
490
|
+
* Connector Best Practices:
|
|
491
|
+
*
|
|
492
|
+
* **Connector Selection**:
|
|
493
|
+
* - Use Debezium for real-time CDC (captures all changes)
|
|
494
|
+
* - Use JDBC for bulk snapshots (initial load)
|
|
495
|
+
* - Use S3/HDFS for long-term archival
|
|
496
|
+
* - Use Elasticsearch for search/analytics
|
|
497
|
+
*
|
|
498
|
+
* **Performance Tuning**:
|
|
499
|
+
* - tasks.max: Set to partition count for parallelism
|
|
500
|
+
* - batch.size: Increase for higher throughput (1000-10000)
|
|
501
|
+
* - flush.size: Balance between latency and throughput
|
|
502
|
+
* - max.poll.records: 500 default, increase for large batches
|
|
503
|
+
*
|
|
504
|
+
* **Error Handling**:
|
|
505
|
+
* - errors.tolerance: none (default, fail fast) vs all (skip bad records)
|
|
506
|
+
* - errors.deadletterqueue.topic.name: Route bad records to DLQ
|
|
507
|
+
* - max.retries: 3-10 for transient failures
|
|
508
|
+
* - retry.backoff.ms: Exponential backoff (100-1000ms)
|
|
509
|
+
*
|
|
510
|
+
* **Monitoring**:
|
|
511
|
+
* - Connector status (RUNNING, FAILED, PAUSED)
|
|
512
|
+
* - Task status (per-task monitoring)
|
|
513
|
+
* - Lag (source connectors)
|
|
514
|
+
* - Throughput (records/sec)
|
|
515
|
+
* - Error count
|
|
516
|
+
*
|
|
517
|
+
* **Security**:
|
|
518
|
+
* - Use secrets management (HashiCorp Vault, AWS Secrets Manager)
|
|
519
|
+
* - Never commit credentials to version control
|
|
520
|
+
* - Use service accounts with minimal permissions
|
|
521
|
+
* - Enable SSL/TLS for database connections
|
|
522
|
+
*/
|
|
523
|
+
|
|
524
|
+
export default {
|
|
525
|
+
ConnectorCatalog,
|
|
526
|
+
ConnectorManager,
|
|
527
|
+
ConnectorCategory,
|
|
528
|
+
};
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
class DiagramGenerator {
|
|
2
|
+
/**
|
|
3
|
+
* Generate data flow diagram
|
|
4
|
+
*/
|
|
5
|
+
static generateDataFlow(options) {
|
|
6
|
+
const lines = [];
|
|
7
|
+
lines.push("```mermaid");
|
|
8
|
+
lines.push("graph LR");
|
|
9
|
+
for (const producer of options.producers) {
|
|
10
|
+
const producerId = producer.name.replace(/[^a-zA-Z0-9]/g, "_");
|
|
11
|
+
lines.push(` ${producerId}[${producer.name}]:::producer`);
|
|
12
|
+
for (const topic of producer.topics) {
|
|
13
|
+
const topicId = topic.replace(/[^a-zA-Z0-9]/g, "_");
|
|
14
|
+
lines.push(` ${producerId} -->|produce| T_${topicId}`);
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
for (const topic of options.topics) {
|
|
18
|
+
const topicId = topic.replace(/[^a-zA-Z0-9]/g, "_");
|
|
19
|
+
lines.push(` T_${topicId}[(${topic})]:::topic`);
|
|
20
|
+
}
|
|
21
|
+
for (const consumer of options.consumers) {
|
|
22
|
+
const consumerId = consumer.name.replace(/[^a-zA-Z0-9]/g, "_");
|
|
23
|
+
lines.push(` ${consumerId}[${consumer.name}]:::consumer`);
|
|
24
|
+
for (const topic of consumer.topics) {
|
|
25
|
+
const topicId = topic.replace(/[^a-zA-Z0-9]/g, "_");
|
|
26
|
+
lines.push(` T_${topicId} -->|consume| ${consumerId}`);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
lines.push("");
|
|
30
|
+
lines.push(" classDef producer fill:#90EE90,stroke:#228B22,stroke-width:2px");
|
|
31
|
+
lines.push(" classDef topic fill:#87CEEB,stroke:#4682B4,stroke-width:2px");
|
|
32
|
+
lines.push(" classDef consumer fill:#FFB6C1,stroke:#FF69B4,stroke-width:2px");
|
|
33
|
+
lines.push("```");
|
|
34
|
+
return lines.join("\n");
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Generate architecture diagram
|
|
38
|
+
*/
|
|
39
|
+
static generateArchitecture(options) {
|
|
40
|
+
const lines = [];
|
|
41
|
+
lines.push("```mermaid");
|
|
42
|
+
lines.push("graph TB");
|
|
43
|
+
lines.push(" subgraph Kafka Cluster");
|
|
44
|
+
for (let i = 1; i <= options.brokers; i++) {
|
|
45
|
+
lines.push(` B${i}[Broker ${i}]`);
|
|
46
|
+
}
|
|
47
|
+
lines.push(" end");
|
|
48
|
+
if (options.zookeeper) {
|
|
49
|
+
lines.push(" subgraph ZooKeeper Ensemble");
|
|
50
|
+
lines.push(" ZK1[ZooKeeper 1]");
|
|
51
|
+
lines.push(" ZK2[ZooKeeper 2]");
|
|
52
|
+
lines.push(" ZK3[ZooKeeper 3]");
|
|
53
|
+
lines.push(" end");
|
|
54
|
+
for (let i = 1; i <= options.brokers; i++) {
|
|
55
|
+
lines.push(` B${i} --> ZK1`);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
if (options.schemaRegistry) {
|
|
59
|
+
lines.push(" SR[Schema Registry]:::schemaRegistry");
|
|
60
|
+
lines.push(" SR --> B1");
|
|
61
|
+
}
|
|
62
|
+
if (options.connectCluster) {
|
|
63
|
+
lines.push(" subgraph Kafka Connect Cluster");
|
|
64
|
+
lines.push(" C1[Connect Worker 1]");
|
|
65
|
+
lines.push(" C2[Connect Worker 2]");
|
|
66
|
+
lines.push(" end");
|
|
67
|
+
lines.push(" C1 --> B1");
|
|
68
|
+
lines.push(" C2 --> B1");
|
|
69
|
+
}
|
|
70
|
+
if (options.ksqlDB) {
|
|
71
|
+
lines.push(" KSQL[ksqlDB Server]:::ksqlDB");
|
|
72
|
+
lines.push(" KSQL --> B1");
|
|
73
|
+
}
|
|
74
|
+
lines.push("");
|
|
75
|
+
lines.push(" classDef schemaRegistry fill:#FFA500,stroke:#FF8C00,stroke-width:2px");
|
|
76
|
+
lines.push(" classDef ksqlDB fill:#9370DB,stroke:#8B008B,stroke-width:2px");
|
|
77
|
+
lines.push("```");
|
|
78
|
+
return lines.join("\n");
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Generate multi-DC replication diagram
|
|
82
|
+
*/
|
|
83
|
+
static generateMultiDCReplication(options) {
|
|
84
|
+
const lines = [];
|
|
85
|
+
lines.push("```mermaid");
|
|
86
|
+
lines.push("graph LR");
|
|
87
|
+
if (options.topology === "active-passive") {
|
|
88
|
+
lines.push(` DC1[${options.dataCenters[0]}<br/>Primary]:::primary`);
|
|
89
|
+
lines.push(` DC2[${options.dataCenters[1]}<br/>Standby]:::standby`);
|
|
90
|
+
lines.push(" DC1 -->|MirrorMaker 2| DC2");
|
|
91
|
+
lines.push(" Producers[Producers] --> DC1");
|
|
92
|
+
lines.push(" Consumers[Consumers] --> DC1");
|
|
93
|
+
} else {
|
|
94
|
+
lines.push(` DC1[${options.dataCenters[0]}<br/>Active]:::active`);
|
|
95
|
+
lines.push(` DC2[${options.dataCenters[1]}<br/>Active]:::active`);
|
|
96
|
+
lines.push(" DC1 <-->|Bidirectional<br/>Replication| DC2");
|
|
97
|
+
lines.push(" Producers1[Producers] --> DC1");
|
|
98
|
+
lines.push(" Producers2[Producers] --> DC2");
|
|
99
|
+
lines.push(" Consumers1[Consumers] --> DC1");
|
|
100
|
+
lines.push(" Consumers2[Consumers] --> DC2");
|
|
101
|
+
}
|
|
102
|
+
lines.push("");
|
|
103
|
+
lines.push(" classDef primary fill:#90EE90,stroke:#228B22,stroke-width:3px");
|
|
104
|
+
lines.push(" classDef standby fill:#D3D3D3,stroke:#808080,stroke-width:2px");
|
|
105
|
+
lines.push(" classDef active fill:#87CEEB,stroke:#4682B4,stroke-width:3px");
|
|
106
|
+
lines.push("```");
|
|
107
|
+
return lines.join("\n");
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
var diagram_generator_default = DiagramGenerator;
|
|
111
|
+
export {
|
|
112
|
+
DiagramGenerator,
|
|
113
|
+
diagram_generator_default as default
|
|
114
|
+
};
|