specweave 0.30.13 → 0.30.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. package/.claude-plugin/marketplace.json +0 -11
  2. package/CLAUDE.md +1 -1
  3. package/README.md +32 -0
  4. package/bin/fix-marketplace-errors.sh +1 -1
  5. package/bin/specweave.js +28 -0
  6. package/dist/src/cli/commands/commits.d.ts +7 -0
  7. package/dist/src/cli/commands/commits.d.ts.map +1 -0
  8. package/dist/src/cli/commands/commits.js +42 -0
  9. package/dist/src/cli/commands/commits.js.map +1 -0
  10. package/dist/src/cli/commands/living-docs.d.ts +29 -0
  11. package/dist/src/cli/commands/living-docs.d.ts.map +1 -0
  12. package/dist/src/cli/commands/living-docs.js +350 -0
  13. package/dist/src/cli/commands/living-docs.js.map +1 -0
  14. package/dist/src/cli/helpers/ado-area-selector.js +1 -1
  15. package/dist/src/cli/helpers/ado-area-selector.js.map +1 -1
  16. package/dist/src/core/background/index.d.ts +2 -2
  17. package/dist/src/core/background/index.d.ts.map +1 -1
  18. package/dist/src/core/background/index.js +1 -1
  19. package/dist/src/core/background/index.js.map +1 -1
  20. package/dist/src/core/living-docs/living-docs-sync.d.ts +34 -10
  21. package/dist/src/core/living-docs/living-docs-sync.d.ts.map +1 -1
  22. package/dist/src/core/living-docs/living-docs-sync.js +223 -32
  23. package/dist/src/core/living-docs/living-docs-sync.js.map +1 -1
  24. package/dist/src/importers/ado-importer.js +2 -2
  25. package/dist/src/importers/ado-importer.js.map +1 -1
  26. package/dist/src/importers/item-converter.d.ts +6 -1
  27. package/dist/src/importers/item-converter.d.ts.map +1 -1
  28. package/dist/src/importers/item-converter.js +15 -2
  29. package/dist/src/importers/item-converter.js.map +1 -1
  30. package/dist/src/integrations/ado/ado-pat-provider.d.ts +3 -3
  31. package/dist/src/integrations/ado/ado-pat-provider.js +3 -3
  32. package/dist/src/living-docs/epic-id-allocator.d.ts +1 -1
  33. package/dist/src/living-docs/epic-id-allocator.js +1 -1
  34. package/dist/src/living-docs/fs-id-allocator.d.ts +1 -1
  35. package/dist/src/living-docs/fs-id-allocator.js +1 -1
  36. package/dist/src/living-docs/smart-doc-organizer.js +1 -1
  37. package/dist/src/living-docs/smart-doc-organizer.js.map +1 -1
  38. package/dist/src/utils/auth-helpers.d.ts +23 -0
  39. package/dist/src/utils/auth-helpers.d.ts.map +1 -1
  40. package/dist/src/utils/auth-helpers.js +51 -0
  41. package/dist/src/utils/auth-helpers.js.map +1 -1
  42. package/dist/src/utils/feature-id-collision.d.ts +48 -5
  43. package/dist/src/utils/feature-id-collision.d.ts.map +1 -1
  44. package/dist/src/utils/feature-id-collision.js +251 -19
  45. package/dist/src/utils/feature-id-collision.js.map +1 -1
  46. package/dist/src/utils/validators/ado-validator.js +2 -2
  47. package/dist/src/utils/validators/ado-validator.js.map +1 -1
  48. package/package.json +12 -13
  49. package/plugins/PLUGINS-INDEX.md +2 -3
  50. package/plugins/specweave/commands/specweave-living-docs.md +321 -0
  51. package/plugins/specweave/commands/specweave-organize-docs.md +3 -3
  52. package/plugins/specweave/hooks/v2/handlers/github-sync-handler.sh +10 -1
  53. package/plugins/specweave/hooks/v2/handlers/living-docs-handler.sh +10 -1
  54. package/plugins/specweave-ado/agents/ado-manager/AGENT.md +58 -0
  55. package/plugins/specweave-ado/commands/{specweave-ado-close-workitem.md → close.md} +9 -5
  56. package/plugins/specweave-ado/commands/{specweave-ado-create-workitem.md → create.md} +9 -5
  57. package/plugins/specweave-ado/commands/pull.md +489 -0
  58. package/plugins/specweave-ado/commands/push.md +391 -0
  59. package/plugins/specweave-ado/commands/{specweave-ado-status.md → status.md} +12 -0
  60. package/plugins/specweave-ado/commands/{specweave-ado-sync.md → sync.md} +95 -3
  61. package/plugins/specweave-ado/hooks/README.md +1 -1
  62. package/plugins/specweave-docs/commands/generate.md +3 -3
  63. package/plugins/specweave-docs/commands/init.md +4 -4
  64. package/plugins/specweave-docs/commands/preview.md +5 -5
  65. package/plugins/specweave-github/agents/github-manager/AGENT.md +22 -0
  66. package/plugins/specweave-github/agents/user-story-updater/AGENT.md +1 -1
  67. package/plugins/specweave-github/commands/{specweave-github-close-issue.md → close.md} +2 -2
  68. package/plugins/specweave-github/commands/{specweave-github-create-issue.md → create.md} +2 -2
  69. package/plugins/specweave-github/commands/pull.md +142 -0
  70. package/plugins/specweave-github/commands/push.md +154 -0
  71. package/plugins/specweave-github/commands/{specweave-github-sync.md → sync.md} +19 -5
  72. package/plugins/specweave-github/commands/{specweave-github-update-user-story.md → update-user-story.md} +1 -1
  73. package/plugins/specweave-github/hooks/README.md +1 -1
  74. package/plugins/specweave-jira/agents/jira-manager/AGENT.md +30 -0
  75. package/plugins/specweave-jira/commands/pull.md +164 -0
  76. package/plugins/specweave-jira/commands/push.md +170 -0
  77. package/plugins/specweave-jira/commands/{specweave-jira-sync.md → sync.md} +18 -3
  78. package/plugins/specweave-jira/hooks/README.md +1 -1
  79. package/plugins/specweave-kafka/README.md +20 -0
  80. package/plugins/specweave-kafka/benchmarks/kafka-throughput.benchmark.ts +551 -0
  81. package/plugins/specweave-kafka/examples/README.md +191 -0
  82. package/plugins/specweave-kafka/examples/avro-schema-registry/.env.example +8 -0
  83. package/plugins/specweave-kafka/examples/avro-schema-registry/README.md +69 -0
  84. package/plugins/specweave-kafka/examples/avro-schema-registry/consumer.js +37 -0
  85. package/plugins/specweave-kafka/examples/avro-schema-registry/package.json +14 -0
  86. package/plugins/specweave-kafka/examples/avro-schema-registry/producer.js +57 -0
  87. package/plugins/specweave-kafka/examples/exactly-once-semantics/.env.example +5 -0
  88. package/plugins/specweave-kafka/examples/exactly-once-semantics/README.md +30 -0
  89. package/plugins/specweave-kafka/examples/exactly-once-semantics/eos-pipeline.js +79 -0
  90. package/plugins/specweave-kafka/examples/exactly-once-semantics/package.json +11 -0
  91. package/plugins/specweave-kafka/examples/kafka-streams-app/.env.example +4 -0
  92. package/plugins/specweave-kafka/examples/kafka-streams-app/README.md +30 -0
  93. package/plugins/specweave-kafka/examples/kafka-streams-app/package.json +11 -0
  94. package/plugins/specweave-kafka/examples/kafka-streams-app/windowed-aggregation.js +66 -0
  95. package/plugins/specweave-kafka/examples/n8n-workflow/README.md +54 -0
  96. package/plugins/specweave-kafka/examples/n8n-workflow/docker-compose.yml +19 -0
  97. package/plugins/specweave-kafka/examples/n8n-workflow/kafka-to-slack.json +50 -0
  98. package/plugins/specweave-kafka/examples/simple-producer-consumer/.env.example +15 -0
  99. package/plugins/specweave-kafka/examples/simple-producer-consumer/README.md +183 -0
  100. package/plugins/specweave-kafka/examples/simple-producer-consumer/consumer.js +60 -0
  101. package/plugins/specweave-kafka/examples/simple-producer-consumer/docker-compose.yml +30 -0
  102. package/plugins/specweave-kafka/examples/simple-producer-consumer/package.json +18 -0
  103. package/plugins/specweave-kafka/examples/simple-producer-consumer/producer.js +52 -0
  104. package/plugins/specweave-release/commands/specweave-release-npm.md +4 -4
  105. package/plugins/specweave-docs-preview/.claude-plugin/plugin.json +0 -21
  106. package/plugins/specweave-docs-preview/commands/build.md +0 -489
  107. package/plugins/specweave-docs-preview/commands/preview.md +0 -355
  108. package/plugins/specweave-docs-preview/skills/docs-preview/SKILL.md +0 -386
  109. /package/plugins/specweave-ado/commands/{specweave-ado-clone-repos.md → clone.md} +0 -0
  110. /package/plugins/specweave-ado/commands/{specweave-ado-import-areas.md → import-areas.md} +0 -0
  111. /package/plugins/specweave-ado/commands/{specweave-ado-import-projects.md → import-projects.md} +0 -0
  112. /package/plugins/specweave-github/commands/{specweave-github-cleanup-duplicates.md → cleanup-duplicates.md} +0 -0
  113. /package/plugins/specweave-github/commands/{specweave-github-reconcile.md → reconcile.md} +0 -0
  114. /package/plugins/specweave-github/commands/{specweave-github-status.md → status.md} +0 -0
  115. /package/plugins/specweave-jira/commands/{specweave-jira-import-boards.md → import-boards.md} +0 -0
  116. /package/plugins/specweave-jira/commands/{specweave-jira-import-projects.md → import-projects-full.md} +0 -0
@@ -0,0 +1,50 @@
1
+ {
2
+ "name": "Kafka to Slack Notification",
3
+ "nodes": [
4
+ {
5
+ "parameters": {
6
+ "topic": "user-events",
7
+ "brokers": "localhost:9092",
8
+ "groupId": "n8n-consumer"
9
+ },
10
+ "name": "Kafka Trigger",
11
+ "type": "n8n-nodes-base.kafkaTrigger",
12
+ "typeVersion": 1,
13
+ "position": [250, 300]
14
+ },
15
+ {
16
+ "parameters": {
17
+ "conditions": {
18
+ "string": [
19
+ {
20
+ "value1": "={{$json[\"event_type\"]}}",
21
+ "value2": "purchase"
22
+ }
23
+ ]
24
+ }
25
+ },
26
+ "name": "Filter Purchases",
27
+ "type": "n8n-nodes-base.if",
28
+ "typeVersion": 1,
29
+ "position": [450, 300]
30
+ },
31
+ {
32
+ "parameters": {
33
+ "channel": "#sales",
34
+ "text": "New purchase: {{$json[\"amount\"]}} from user {{$json[\"userId\"]}}"
35
+ },
36
+ "name": "Slack Notification",
37
+ "type": "n8n-nodes-base.slack",
38
+ "typeVersion": 1,
39
+ "position": [650, 300]
40
+ }
41
+ ],
42
+ "connections": {
43
+ "Kafka Trigger": {
44
+ "main": [[{"node": "Filter Purchases", "type": "main", "index": 0}]]
45
+ },
46
+ "Filter Purchases": {
47
+ "main": [[{"node": "Slack Notification", "type": "main", "index": 0}]]
48
+ }
49
+ }
50
+ }
@@ -0,0 +1,15 @@
1
+ # Kafka Configuration
2
+ KAFKA_BROKERS=localhost:9092
3
+ KAFKA_CLIENT_ID=simple-example
4
+ KAFKA_TOPIC=demo-topic
5
+ KAFKA_GROUP_ID=demo-group
6
+
7
+ # For multiple brokers, use comma-separated list:
8
+ # KAFKA_BROKERS=broker1:9092,broker2:9092,broker3:9092
9
+
10
+ # For Confluent Cloud or secure clusters:
11
+ # KAFKA_BROKERS=pkc-xxxxx.us-east-1.aws.confluent.cloud:9092
12
+ # KAFKA_USERNAME=your-api-key
13
+ # KAFKA_PASSWORD=your-api-secret
14
+ # KAFKA_MECHANISM=plain
15
+ # KAFKA_SSL=true
@@ -0,0 +1,183 @@
1
+ # Simple Producer-Consumer Example
2
+
3
+ **Basic Kafka producer and consumer with Node.js**
4
+
5
+ This example demonstrates the simplest way to produce and consume messages with Kafka using KafkaJS.
6
+
7
+ ## Prerequisites
8
+
9
+ - Node.js 18+
10
+ - Kafka cluster running (local or remote)
11
+ - Broker accessible at `localhost:9092` (or update `.env`)
12
+
13
+ ## Setup
14
+
15
+ ```bash
16
+ # Install dependencies
17
+ npm install
18
+
19
+ # Configure environment
20
+ cp .env.example .env
21
+ # Edit .env with your Kafka broker addresses
22
+
23
+ # Start Kafka locally (if needed)
24
+ docker-compose up -d
25
+ ```
26
+
27
+ ## Run
28
+
29
+ ### Terminal 1: Start Consumer
30
+
31
+ ```bash
32
+ npm run consumer
33
+ ```
34
+
35
+ Expected output:
36
+ ```
37
+ ✓ Consumer connected to Kafka
38
+ ✓ Subscribed to topic: demo-topic
39
+ ⏳ Waiting for messages...
40
+
41
+ 📥 Received message:
42
+ Topic: demo-topic
43
+ Partition: 0
44
+ Offset: 0
45
+ Key: user-123
46
+ Value: {"message":"Hello Kafka!","timestamp":"2025-11-15T..."}
47
+ ```
48
+
49
+ ### Terminal 2: Send Messages
50
+
51
+ ```bash
52
+ npm run producer
53
+ ```
54
+
55
+ Expected output:
56
+ ```
57
+ ✓ Producer connected to Kafka
58
+ 📤 Sending message...
59
+ ✅ Message sent successfully!
60
+ Topic: demo-topic
61
+ Partition: 0
62
+ Offset: 0
63
+ ```
64
+
65
+ ## Files
66
+
67
+ - **producer.js** - Sends a test message to Kafka
68
+ - **consumer.js** - Consumes messages from Kafka topic
69
+ - **package.json** - Dependencies and scripts
70
+ - **.env.example** - Environment variables template
71
+ - **docker-compose.yml** - Local Kafka cluster (optional)
72
+
73
+ ## Code Walkthrough
74
+
75
+ ### Producer (producer.js)
76
+
77
+ ```javascript
78
+ const { Kafka } = require('kafkajs');
79
+
80
+ // 1. Create Kafka client
81
+ const kafka = new Kafka({
82
+ clientId: 'my-producer',
83
+ brokers: ['localhost:9092']
84
+ });
85
+
86
+ // 2. Create producer
87
+ const producer = kafka.producer();
88
+
89
+ // 3. Connect and send
90
+ async function run() {
91
+ await producer.connect();
92
+
93
+ await producer.send({
94
+ topic: 'demo-topic',
95
+ messages: [{
96
+ key: 'user-123',
97
+ value: JSON.stringify({ message: 'Hello Kafka!', timestamp: new Date() })
98
+ }]
99
+ });
100
+
101
+ await producer.disconnect();
102
+ }
103
+ ```
104
+
105
+ ### Consumer (consumer.js)
106
+
107
+ ```javascript
108
+ const { Kafka } = require('kafkajs');
109
+
110
+ // 1. Create Kafka client
111
+ const kafka = new Kafka({
112
+ clientId: 'my-consumer',
113
+ brokers: ['localhost:9092']
114
+ });
115
+
116
+ // 2. Create consumer with group ID
117
+ const consumer = kafka.consumer({ groupId: 'demo-group' });
118
+
119
+ // 3. Subscribe and consume
120
+ async function run() {
121
+ await consumer.connect();
122
+ await consumer.subscribe({ topic: 'demo-topic', fromBeginning: true });
123
+
124
+ await consumer.run({
125
+ eachMessage: async ({ topic, partition, message }) => {
126
+ console.log({
127
+ topic,
128
+ partition,
129
+ offset: message.offset,
130
+ key: message.key?.toString(),
131
+ value: message.value?.toString()
132
+ });
133
+ }
134
+ });
135
+ }
136
+ ```
137
+
138
+ ## Troubleshooting
139
+
140
+ ### Error: Connection refused
141
+
142
+ ```bash
143
+ # Check if Kafka is running
144
+ docker ps | grep kafka
145
+
146
+ # If not running, start it
147
+ docker-compose up -d
148
+
149
+ # Wait for Kafka to be ready (~30 seconds)
150
+ docker logs kafka-broker -f
151
+ ```
152
+
153
+ ### Error: Topic does not exist
154
+
155
+ ```bash
156
+ # Create topic manually
157
+ kafka-topics --bootstrap-server localhost:9092 \
158
+ --create --topic demo-topic \
159
+ --partitions 3 --replication-factor 1
160
+ ```
161
+
162
+ ### Consumer not receiving messages
163
+
164
+ ```bash
165
+ # Reset consumer group offsets
166
+ kafka-consumer-groups --bootstrap-server localhost:9092 \
167
+ --group demo-group \
168
+ --topic demo-topic \
169
+ --reset-offsets --to-earliest \
170
+ --execute
171
+ ```
172
+
173
+ ## Next Steps
174
+
175
+ - Try [avro-schema-registry](../avro-schema-registry/) - Add schema validation
176
+ - Try [exactly-once-semantics](../exactly-once-semantics/) - Ensure zero message loss
177
+ - Try [kafka-streams-app](../kafka-streams-app/) - Real-time stream processing
178
+
179
+ ## Documentation
180
+
181
+ - [KafkaJS Documentation](https://kafka.js.org/)
182
+ - [Apache Kafka Documentation](https://kafka.apache.org/documentation/)
183
+ - [SpecWeave Getting Started Guide](../../.specweave/docs/public/guides/kafka-getting-started.md)
@@ -0,0 +1,60 @@
1
+ require('dotenv').config();
2
+ const { Kafka } = require('kafkajs');
3
+
4
+ const kafka = new Kafka({
5
+ clientId: process.env.KAFKA_CLIENT_ID || 'my-consumer',
6
+ brokers: (process.env.KAFKA_BROKERS || 'localhost:9092').split(',')
7
+ });
8
+
9
+ const consumer = kafka.consumer({
10
+ groupId: process.env.KAFKA_GROUP_ID || 'demo-group'
11
+ });
12
+
13
+ async function run() {
14
+ try {
15
+ // Connect consumer
16
+ await consumer.connect();
17
+ console.log('✓ Consumer connected to Kafka');
18
+
19
+ // Subscribe to topic
20
+ const topic = process.env.KAFKA_TOPIC || 'demo-topic';
21
+ await consumer.subscribe({ topic, fromBeginning: true });
22
+ console.log('✓ Subscribed to topic:', topic);
23
+ console.log('⏳ Waiting for messages...\n');
24
+
25
+ // Consume messages
26
+ await consumer.run({
27
+ eachMessage: async ({ topic, partition, message }) => {
28
+ console.log('📥 Received message:');
29
+ console.log(' Topic:', topic);
30
+ console.log(' Partition:', partition);
31
+ console.log(' Offset:', message.offset);
32
+ console.log(' Key:', message.key?.toString());
33
+ console.log(' Value:', message.value?.toString());
34
+
35
+ // Parse headers
36
+ if (message.headers) {
37
+ console.log(' Headers:');
38
+ Object.entries(message.headers).forEach(([key, value]) => {
39
+ console.log(` ${key}:`, value.toString());
40
+ });
41
+ }
42
+
43
+ console.log(''); // Blank line between messages
44
+ }
45
+ });
46
+
47
+ } catch (error) {
48
+ console.error('❌ Error:', error);
49
+ }
50
+ }
51
+
52
+ // Graceful shutdown
53
+ process.on('SIGINT', async () => {
54
+ console.log('\n⏸️ Shutting down consumer...');
55
+ await consumer.disconnect();
56
+ console.log('✓ Consumer disconnected');
57
+ process.exit(0);
58
+ });
59
+
60
+ run().catch(console.error);
@@ -0,0 +1,30 @@
1
+ version: '3.8'
2
+
3
+ services:
4
+ kafka-broker:
5
+ image: confluentinc/cp-kafka:7.5.0
6
+ hostname: kafka-broker
7
+ container_name: kafka-broker
8
+ ports:
9
+ - "9092:9092"
10
+ - "9101:9101"
11
+ environment:
12
+ KAFKA_NODE_ID: 1
13
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
14
+ KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafka-broker:29092,PLAINTEXT_HOST://localhost:9092'
15
+ KAFKA_PROCESS_ROLES: 'broker,controller'
16
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
17
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka-broker:29093'
18
+ KAFKA_LISTENERS: 'PLAINTEXT://kafka-broker:29092,CONTROLLER://kafka-broker:29093,PLAINTEXT_HOST://0.0.0.0:9092'
19
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
20
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
21
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
22
+ CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk'
23
+ KAFKA_JMX_PORT: 9101
24
+ KAFKA_JMX_HOSTNAME: localhost
25
+ volumes:
26
+ - kafka-data:/var/lib/kafka/data
27
+
28
+ volumes:
29
+ kafka-data:
30
+ driver: local
@@ -0,0 +1,18 @@
1
+ {
2
+ "name": "kafka-simple-producer-consumer",
3
+ "version": "1.0.0",
4
+ "description": "Simple Kafka producer and consumer example",
5
+ "main": "producer.js",
6
+ "scripts": {
7
+ "producer": "node producer.js",
8
+ "consumer": "node consumer.js",
9
+ "start": "npm run consumer"
10
+ },
11
+ "keywords": ["kafka", "producer", "consumer", "example"],
12
+ "author": "SpecWeave",
13
+ "license": "MIT",
14
+ "dependencies": {
15
+ "kafkajs": "^2.2.4",
16
+ "dotenv": "^16.3.1"
17
+ }
18
+ }
@@ -0,0 +1,52 @@
1
+ require('dotenv').config();
2
+ const { Kafka } = require('kafkajs');
3
+
4
+ const kafka = new Kafka({
5
+ clientId: process.env.KAFKA_CLIENT_ID || 'my-producer',
6
+ brokers: (process.env.KAFKA_BROKERS || 'localhost:9092').split(',')
7
+ });
8
+
9
+ const producer = kafka.producer();
10
+
11
+ async function run() {
12
+ try {
13
+ // Connect producer
14
+ await producer.connect();
15
+ console.log('✓ Producer connected to Kafka');
16
+
17
+ // Prepare message
18
+ const message = {
19
+ message: 'Hello Kafka!',
20
+ timestamp: new Date().toISOString(),
21
+ source: 'simple-producer'
22
+ };
23
+
24
+ console.log('\n📤 Sending message...');
25
+
26
+ // Send message
27
+ const result = await producer.send({
28
+ topic: process.env.KAFKA_TOPIC || 'demo-topic',
29
+ messages: [{
30
+ key: 'user-123',
31
+ value: JSON.stringify(message),
32
+ headers: {
33
+ 'correlation-id': '12345',
34
+ 'content-type': 'application/json'
35
+ }
36
+ }]
37
+ });
38
+
39
+ console.log('✅ Message sent successfully!');
40
+ console.log(' Topic:', result[0].topicName);
41
+ console.log(' Partition:', result[0].partition);
42
+ console.log(' Offset:', result[0].baseOffset);
43
+
44
+ } catch (error) {
45
+ console.error('❌ Error:', error);
46
+ } finally {
47
+ await producer.disconnect();
48
+ console.log('\n✓ Producer disconnected');
49
+ }
50
+ }
51
+
52
+ run().catch(console.error);
@@ -318,9 +318,9 @@ Show the user:
318
318
  ## Direct Mode Safety Rules
319
319
 
320
320
  - ✅ ALWAYS rebuild before publishing (`npm run rebuild`)
321
- - ✅ Use `--only` for emergency/quick releases or local testing
322
- - ✅ Default mode (GitHub Actions) is preferred for regular releases
323
- - ✅ Direct mode gives immediate feedback (no CI wait time)
321
+ - ✅ Use `--only` when you want to publish but push git later
322
+ - ✅ Default mode (no flags) is preferred - auto-commits, publishes, and pushes
323
+ - ✅ Direct mode gives control over git push timing
324
324
  - ⚠️ Remember to push git changes later to sync GitHub
325
325
 
326
326
  ## Success Criteria (Direct Mode)
@@ -415,7 +415,7 @@ Show the user:
415
415
  5. Publish: `npm publish --registry https://registry.npmjs.org`
416
416
  6. Push: `git push origin develop --follow-tags`
417
417
 
418
- **Or use**: `/specweave-release:npm --only --push` for full release
418
+ **Or use**: `/specweave-release:npm` (no flags) for full instant release
419
419
  ```
420
420
 
421
421
  ## Local Mode Safety Rules
@@ -1,21 +0,0 @@
1
- {
2
- "name": "specweave-docs-preview",
3
- "description": "Interactive documentation preview with Docusaurus. Launch local dev server to view living documentation in beautiful UI with hot reload, auto-generated sidebar, and Mermaid diagrams. Build static sites for deployment.",
4
- "version": "0.24.0",
5
- "author": {
6
- "name": "SpecWeave Team",
7
- "url": "https://spec-weave.com"
8
- },
9
- "homepage": "https://spec-weave.com",
10
- "repository": "https://github.com/anton-abyzov/specweave",
11
- "license": "MIT",
12
- "keywords": [
13
- "docusaurus",
14
- "documentation",
15
- "preview",
16
- "docs-server",
17
- "hot-reload",
18
- "static-site",
19
- "specweave"
20
- ]
21
- }