claude-flow-novice 2.15.1 → 2.15.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/cfn-extras/agents/google-sheets-specialist.md +614 -0
- package/.claude/commands/cfn/create-handoff.md +224 -0
- package/.claude/hooks/cfn-BACKUP_USAGE.md +243 -243
- package/.claude/hooks/cfn-invoke-security-validation.sh +69 -69
- package/.claude/hooks/cfn-post-edit-cfn-retrospective.sh +78 -78
- package/.claude/hooks/cfn-post-edit.config.json +44 -44
- package/.claude/skills/agent-lifecycle/SKILL.md +60 -0
- package/.claude/skills/agent-lifecycle/execute-lifecycle-hook.sh +573 -0
- package/.claude/skills/agent-lifecycle/simple-audit.sh +31 -0
- package/.claude/skills/cfn-hybrid-routing/check-dependencies.sh +51 -51
- package/.claude/skills/cfn-loop-validation/orchestrate-cfn-loop.sh +252 -252
- package/.claude/skills/cfn-redis-coordination/agent-recovery.sh +74 -74
- package/.claude/skills/cfn-redis-coordination/get-context.sh +112 -112
- package/.claude/skills/cfn-transparency-middleware/middleware-config.sh +28 -28
- package/.claude/skills/cfn-transparency-middleware/performance-benchmark.sh +78 -78
- package/.claude/skills/cfn-transparency-middleware/test-integration.sh +161 -161
- package/.claude/skills/cfn-transparency-middleware/test-transparency-skill.sh +367 -367
- package/.claude/skills/cfn-transparency-middleware/tests/input-validation.sh +92 -92
- package/.claude/skills/cfn-transparency-middleware/wrap-agent.sh +131 -131
- package/claude-assets/agents/cfn-dev-team/coordinators/handoff-coordinator.md +662 -0
- package/claude-assets/agents/cfn-dev-team/dev-ops/docker-specialist.md +29 -0
- package/claude-assets/cfn-extras/agents/google-sheets-specialist.md +614 -0
- package/claude-assets/commands/cfn/create-handoff.md +224 -0
- package/claude-assets/hooks/cfn-BACKUP_USAGE.md +243 -243
- package/claude-assets/hooks/cfn-invoke-security-validation.sh +69 -69
- package/claude-assets/hooks/cfn-post-edit-cfn-retrospective.sh +78 -78
- package/claude-assets/hooks/cfn-post-edit.config.json +44 -44
- package/claude-assets/hooks/cfn-post-execution/memory-cleanup.sh +19 -19
- package/claude-assets/hooks/cfn-pre-execution/memory-check.sh +19 -19
- package/claude-assets/skills/agent-lifecycle/execute-lifecycle-hook.sh +572 -572
- package/claude-assets/skills/agent-lifecycle/simple-audit.sh +30 -30
- package/claude-assets/skills/cfn-automatic-memory-persistence/persist-agent-output.sh +48 -48
- package/claude-assets/skills/cfn-automatic-memory-persistence/query-agent-history.sh +34 -34
- package/claude-assets/skills/cfn-deliverable-validation/confidence-calculator.sh +261 -261
- package/claude-assets/skills/cfn-expert-update/update-expert.sh +345 -345
- package/claude-assets/skills/cfn-hybrid-routing/check-dependencies.sh +51 -51
- package/claude-assets/skills/cfn-intervention-detector/detect-intervention.sh +110 -110
- package/claude-assets/skills/cfn-intervention-orchestrator/execute-intervention.sh +58 -58
- package/claude-assets/skills/cfn-loop-validation/orchestrate-cfn-loop.sh +252 -252
- package/claude-assets/skills/cfn-loop2-output-processing/process-validator-output.sh +275 -275
- package/claude-assets/skills/cfn-memory-management/check-memory.sh +159 -159
- package/claude-assets/skills/cfn-memory-management/cleanup-memory.sh +196 -196
- package/claude-assets/skills/cfn-node-heap-sizer/task-mode-heap-limiter.sh +325 -325
- package/claude-assets/skills/cfn-playbook-auto-update/auto-update-playbook.sh +85 -85
- package/claude-assets/skills/cfn-redis-coordination/agent-recovery.sh +74 -74
- package/claude-assets/skills/cfn-redis-coordination/get-context.sh +112 -112
- package/claude-assets/skills/cfn-scope-simplifier/simplify-scope.sh +67 -67
- package/claude-assets/skills/cfn-specialist-injection/recommend-specialist.sh +56 -56
- package/claude-assets/skills/cfn-standardized-error-handling/capture-agent-error.sh +86 -86
- package/claude-assets/skills/cfn-standardized-error-handling/test-error-handling.sh +165 -165
- package/claude-assets/skills/cfn-task-config-init/initialize-config.sh +264 -264
- package/claude-assets/skills/cfn-task-decomposition/task-decomposer.sh +278 -278
- package/claude-assets/skills/cfn-transparency-middleware/middleware-config.sh +28 -28
- package/claude-assets/skills/cfn-transparency-middleware/performance-benchmark.sh +78 -78
- package/claude-assets/skills/cfn-transparency-middleware/test-integration.sh +161 -161
- package/claude-assets/skills/cfn-transparency-middleware/test-transparency-skill.sh +367 -367
- package/claude-assets/skills/cfn-transparency-middleware/tests/input-validation.sh +92 -92
- package/claude-assets/skills/cfn-transparency-middleware/wrap-agent.sh +131 -131
- package/claude-assets/skills/docker-build/SKILL.md +96 -203
- package/claude-assets/skills/docker-build/build.sh +73 -73
- package/claude-assets/skills/integration/agent-handoff.sh +494 -0
- package/claude-assets/skills/integration/file-operations.sh +414 -0
- package/claude-assets/skills/workflow-codification/APPROVAL_WORKFLOW.md +806 -0
- package/claude-assets/skills/workflow-codification/COST_TRACKING.md +637 -0
- package/claude-assets/skills/workflow-codification/EDGE_CASE_TRACKING.md +404 -0
- package/claude-assets/skills/workflow-codification/README_PHASE4.md +457 -0
- package/claude-assets/skills/workflow-codification/SKILL.md +110 -0
- package/claude-assets/skills/workflow-codification/analyze-patterns.sh +899 -0
- package/claude-assets/skills/workflow-codification/approval-workflow.sh +514 -0
- package/claude-assets/skills/workflow-codification/generate-skill-update.sh +525 -0
- package/claude-assets/skills/workflow-codification/review-skill.sh +643 -0
- package/claude-assets/skills/workflow-codification/templates/email-notification.txt +114 -0
- package/claude-assets/skills/workflow-codification/templates/slack-notification.md +85 -0
- package/claude-assets/skills/workflow-codification/test-integration.sh +281 -0
- package/claude-assets/skills/workflow-codification/track-cost-savings.sh +445 -0
- package/claude-assets/skills/workflow-codification/track-edge-case.sh +323 -0
- package/dist/agents/agent-loader.js +165 -146
- package/dist/agents/agent-loader.js.map +1 -1
- package/dist/cli/config-manager.js +91 -109
- package/dist/cli/config-manager.js.map +1 -1
- package/dist/integration/DatabaseHandoff.js +507 -0
- package/dist/integration/DatabaseHandoff.js.map +1 -0
- package/dist/integration/StandardAdapter.js +291 -0
- package/dist/integration/StandardAdapter.js.map +1 -0
- package/dist/lib/agent-output-parser.js +518 -0
- package/dist/lib/agent-output-parser.js.map +1 -0
- package/dist/lib/agent-output-validator.js +950 -0
- package/dist/lib/agent-output-validator.js.map +1 -0
- package/dist/lib/artifact-registry.js +443 -0
- package/dist/lib/artifact-registry.js.map +1 -0
- package/dist/lib/config-validator.js +687 -0
- package/dist/lib/config-validator.js.map +1 -0
- package/dist/types/agent-output.js +44 -0
- package/dist/types/agent-output.js.map +1 -0
- package/dist/types/config.js +28 -0
- package/dist/types/config.js.map +1 -0
- package/package.json +2 -1
- package/scripts/artifact-cleanup.sh +392 -0
- package/scripts/build-linux.sh +78 -0
- package/scripts/deploy-production.sh +355 -355
- package/scripts/docker-playwright-fix.sh +311 -311
- package/scripts/docker-rebuild-all-agents.sh +127 -127
- package/scripts/memory-leak-prevention.sh +305 -305
- package/scripts/migrate-artifacts.sh +563 -0
- package/scripts/migrate-yaml-to-json.sh +465 -0
- package/scripts/run-marketing-tests.sh +42 -42
- package/scripts/update_paths.sh +46 -46
|
@@ -0,0 +1,465 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
# YAML to JSON Configuration Migration Script
|
|
5
|
+
# Purpose: Safely migrate CFN YAML configuration files to standardized JSON format
|
|
6
|
+
# Usage: ./scripts/migrate-yaml-to-json.sh [--dry-run] [--verbose] [--config FILE]
|
|
7
|
+
|
|
8
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
9
|
+
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
|
10
|
+
|
|
11
|
+
# Default configuration
|
|
12
|
+
DRY_RUN=false
|
|
13
|
+
VERBOSE=false
|
|
14
|
+
SPECIFIC_CONFIG=""
|
|
15
|
+
BACKUP_DIR="${PROJECT_ROOT}/.backups/yaml-migration-$(date +%Y%m%d-%H%M%S)"
|
|
16
|
+
|
|
17
|
+
# Color output
|
|
18
|
+
RED='\033[0;31m'
|
|
19
|
+
GREEN='\033[0;32m'
|
|
20
|
+
YELLOW='\033[1;33m'
|
|
21
|
+
BLUE='\033[0;34m'
|
|
22
|
+
NC='\033[0m' # No Color
|
|
23
|
+
|
|
24
|
+
# Parse arguments
|
|
25
|
+
while [[ $# -gt 0 ]]; do
|
|
26
|
+
case $1 in
|
|
27
|
+
--dry-run)
|
|
28
|
+
DRY_RUN=true
|
|
29
|
+
shift
|
|
30
|
+
;;
|
|
31
|
+
--verbose)
|
|
32
|
+
VERBOSE=true
|
|
33
|
+
shift
|
|
34
|
+
;;
|
|
35
|
+
--config)
|
|
36
|
+
SPECIFIC_CONFIG="$2"
|
|
37
|
+
shift 2
|
|
38
|
+
;;
|
|
39
|
+
*)
|
|
40
|
+
echo -e "${RED}Unknown option: $1${NC}"
|
|
41
|
+
exit 1
|
|
42
|
+
;;
|
|
43
|
+
esac
|
|
44
|
+
done
|
|
45
|
+
|
|
46
|
+
# Dependency checking
|
|
47
|
+
check_dependencies() {
|
|
48
|
+
local missing_deps=()
|
|
49
|
+
local install_instructions=""
|
|
50
|
+
|
|
51
|
+
# Check for yq
|
|
52
|
+
if ! command -v yq >/dev/null 2>&1; then
|
|
53
|
+
missing_deps+=("yq")
|
|
54
|
+
fi
|
|
55
|
+
|
|
56
|
+
# Check for jq
|
|
57
|
+
if ! command -v jq >/dev/null 2>&1; then
|
|
58
|
+
missing_deps+=("jq")
|
|
59
|
+
fi
|
|
60
|
+
|
|
61
|
+
# If dependencies are missing, show error with installation instructions
|
|
62
|
+
if [[ ${#missing_deps[@]} -gt 0 ]]; then
|
|
63
|
+
echo -e "${RED}ERROR: Missing required dependencies${NC}"
|
|
64
|
+
echo ""
|
|
65
|
+
echo "The following tools are required but not installed:"
|
|
66
|
+
for dep in "${missing_deps[@]}"; do
|
|
67
|
+
echo " - $dep"
|
|
68
|
+
done
|
|
69
|
+
echo ""
|
|
70
|
+
echo "Installation Instructions:"
|
|
71
|
+
echo "=========================="
|
|
72
|
+
echo ""
|
|
73
|
+
|
|
74
|
+
# Detect OS and provide appropriate installation instructions
|
|
75
|
+
if [[ "$OSTYPE" == "darwin"* ]]; then
|
|
76
|
+
# macOS
|
|
77
|
+
echo "macOS (using Homebrew):"
|
|
78
|
+
for dep in "${missing_deps[@]}"; do
|
|
79
|
+
echo " brew install $dep"
|
|
80
|
+
done
|
|
81
|
+
elif [[ -f /etc/debian_version ]]; then
|
|
82
|
+
# Debian/Ubuntu
|
|
83
|
+
echo "Debian/Ubuntu:"
|
|
84
|
+
for dep in "${missing_deps[@]}"; do
|
|
85
|
+
if [[ "$dep" == "yq" ]]; then
|
|
86
|
+
echo " sudo wget -qO /usr/local/bin/yq https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64"
|
|
87
|
+
echo " sudo chmod +x /usr/local/bin/yq"
|
|
88
|
+
else
|
|
89
|
+
echo " sudo apt-get update && sudo apt-get install -y $dep"
|
|
90
|
+
fi
|
|
91
|
+
done
|
|
92
|
+
elif [[ -f /etc/redhat-release ]]; then
|
|
93
|
+
# RedHat/CentOS/Fedora
|
|
94
|
+
echo "RedHat/CentOS/Fedora:"
|
|
95
|
+
for dep in "${missing_deps[@]}"; do
|
|
96
|
+
if [[ "$dep" == "yq" ]]; then
|
|
97
|
+
echo " sudo wget -qO /usr/local/bin/yq https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64"
|
|
98
|
+
echo " sudo chmod +x /usr/local/bin/yq"
|
|
99
|
+
else
|
|
100
|
+
echo " sudo yum install -y $dep"
|
|
101
|
+
fi
|
|
102
|
+
done
|
|
103
|
+
elif [[ -f /etc/arch-release ]]; then
|
|
104
|
+
# Arch Linux
|
|
105
|
+
echo "Arch Linux:"
|
|
106
|
+
for dep in "${missing_deps[@]}"; do
|
|
107
|
+
echo " sudo pacman -S $dep"
|
|
108
|
+
done
|
|
109
|
+
else
|
|
110
|
+
# Generic Linux
|
|
111
|
+
echo "Linux (generic):"
|
|
112
|
+
for dep in "${missing_deps[@]}"; do
|
|
113
|
+
if [[ "$dep" == "yq" ]]; then
|
|
114
|
+
echo " # Download yq binary from GitHub:"
|
|
115
|
+
echo " sudo wget -qO /usr/local/bin/yq https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64"
|
|
116
|
+
echo " sudo chmod +x /usr/local/bin/yq"
|
|
117
|
+
else
|
|
118
|
+
echo " # Install $dep using your package manager"
|
|
119
|
+
fi
|
|
120
|
+
done
|
|
121
|
+
fi
|
|
122
|
+
|
|
123
|
+
echo ""
|
|
124
|
+
echo "For more installation options, visit:"
|
|
125
|
+
for dep in "${missing_deps[@]}"; do
|
|
126
|
+
if [[ "$dep" == "yq" ]]; then
|
|
127
|
+
echo " - yq: https://github.com/mikefarah/yq#install"
|
|
128
|
+
elif [[ "$dep" == "jq" ]]; then
|
|
129
|
+
echo " - jq: https://stedolan.github.io/jq/download/"
|
|
130
|
+
fi
|
|
131
|
+
done
|
|
132
|
+
echo ""
|
|
133
|
+
|
|
134
|
+
exit 1
|
|
135
|
+
fi
|
|
136
|
+
|
|
137
|
+
# Verify yq is the correct version (mikefarah/yq, not kislyuk/yq)
|
|
138
|
+
local yq_version
|
|
139
|
+
yq_version=$(yq --version 2>&1)
|
|
140
|
+
if [[ ! "$yq_version" =~ mikefarah ]]; then
|
|
141
|
+
echo -e "${YELLOW}WARNING: Detected yq version may not be compatible${NC}"
|
|
142
|
+
echo "This script requires mikefarah/yq, not kislyuk/yq (Python version)"
|
|
143
|
+
echo "Current version: $yq_version"
|
|
144
|
+
echo ""
|
|
145
|
+
echo "To install the correct version:"
|
|
146
|
+
if [[ "$OSTYPE" == "darwin"* ]]; then
|
|
147
|
+
echo " brew install yq"
|
|
148
|
+
else
|
|
149
|
+
echo " sudo wget -qO /usr/local/bin/yq https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64"
|
|
150
|
+
echo " sudo chmod +x /usr/local/bin/yq"
|
|
151
|
+
fi
|
|
152
|
+
echo ""
|
|
153
|
+
echo "More info: https://github.com/mikefarah/yq#install"
|
|
154
|
+
echo ""
|
|
155
|
+
exit 1
|
|
156
|
+
fi
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
# Logging functions
|
|
160
|
+
log_info() {
|
|
161
|
+
echo -e "${BLUE}[INFO]${NC} $1"
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
log_success() {
|
|
165
|
+
echo -e "${GREEN}[SUCCESS]${NC} $1"
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
log_warning() {
|
|
169
|
+
echo -e "${YELLOW}[WARNING]${NC} $1"
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
log_error() {
|
|
173
|
+
echo -e "${RED}[ERROR]${NC} $1"
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
log_verbose() {
|
|
177
|
+
if [[ "$VERBOSE" == "true" ]]; then
|
|
178
|
+
echo -e "${BLUE}[VERBOSE]${NC} $1"
|
|
179
|
+
fi
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
# Identify YAML config files
|
|
183
|
+
identify_yaml_configs() {
|
|
184
|
+
local configs=()
|
|
185
|
+
|
|
186
|
+
if [[ -n "$SPECIFIC_CONFIG" ]]; then
|
|
187
|
+
if [[ -f "$SPECIFIC_CONFIG" ]]; then
|
|
188
|
+
configs+=("$SPECIFIC_CONFIG")
|
|
189
|
+
else
|
|
190
|
+
log_error "Config file not found: $SPECIFIC_CONFIG"
|
|
191
|
+
exit 1
|
|
192
|
+
fi
|
|
193
|
+
else
|
|
194
|
+
# Team configurations
|
|
195
|
+
for file in "${PROJECT_ROOT}"/docker/config/teams/*.yaml; do
|
|
196
|
+
if [[ -f "$file" ]]; then
|
|
197
|
+
configs+=("$file")
|
|
198
|
+
fi
|
|
199
|
+
done
|
|
200
|
+
|
|
201
|
+
# Runtime contract
|
|
202
|
+
if [[ -f "${PROJECT_ROOT}/docker/runtime/cfn-runtime.contract.yml" ]]; then
|
|
203
|
+
configs+=("${PROJECT_ROOT}/docker/runtime/cfn-runtime.contract.yml")
|
|
204
|
+
fi
|
|
205
|
+
fi
|
|
206
|
+
|
|
207
|
+
echo "${configs[@]}"
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
# Extract comments from YAML file
|
|
211
|
+
extract_comments() {
|
|
212
|
+
local yaml_file="$1"
|
|
213
|
+
local temp_comments="${BACKUP_DIR}/$(basename "$yaml_file").comments"
|
|
214
|
+
|
|
215
|
+
# Extract lines starting with # (comments)
|
|
216
|
+
grep "^#" "$yaml_file" > "$temp_comments" 2>/dev/null || true
|
|
217
|
+
|
|
218
|
+
echo "$temp_comments"
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
# Convert YAML to JSON
|
|
222
|
+
convert_yaml_to_json() {
|
|
223
|
+
local yaml_file="$1"
|
|
224
|
+
local json_file="$2"
|
|
225
|
+
|
|
226
|
+
log_verbose "Converting: $yaml_file -> $json_file"
|
|
227
|
+
|
|
228
|
+
# Use yq to convert YAML to JSON with proper formatting
|
|
229
|
+
# yq (version 0.0.0) outputs JSON by default
|
|
230
|
+
yq . "$yaml_file" | jq '.' > "$json_file"
|
|
231
|
+
|
|
232
|
+
if [[ $? -ne 0 ]]; then
|
|
233
|
+
log_error "Failed to convert $yaml_file to JSON"
|
|
234
|
+
return 1
|
|
235
|
+
fi
|
|
236
|
+
|
|
237
|
+
# Validate JSON syntax
|
|
238
|
+
if ! jq empty "$json_file" 2>/dev/null; then
|
|
239
|
+
log_error "Generated JSON is invalid: $json_file"
|
|
240
|
+
return 1
|
|
241
|
+
fi
|
|
242
|
+
|
|
243
|
+
log_verbose "Successfully converted to valid JSON"
|
|
244
|
+
return 0
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
# Add metadata to JSON
|
|
248
|
+
add_migration_metadata() {
|
|
249
|
+
local json_file="$1"
|
|
250
|
+
local yaml_file="$2"
|
|
251
|
+
|
|
252
|
+
# Add migration metadata
|
|
253
|
+
local temp_file="${json_file}.tmp"
|
|
254
|
+
jq --arg source "$(basename "$yaml_file")" \
|
|
255
|
+
--arg timestamp "$(date -u +%Y-%m-%dT%H:%M:%SZ)" \
|
|
256
|
+
'. + {
|
|
257
|
+
"_migration": {
|
|
258
|
+
"source_file": $source,
|
|
259
|
+
"migrated_at": $timestamp,
|
|
260
|
+
"format_version": "1.0"
|
|
261
|
+
}
|
|
262
|
+
}' "$json_file" > "$temp_file"
|
|
263
|
+
|
|
264
|
+
mv "$temp_file" "$json_file"
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
# Create backup
|
|
268
|
+
create_backup() {
|
|
269
|
+
local yaml_file="$1"
|
|
270
|
+
local backup_file="${BACKUP_DIR}/$(basename "$yaml_file").backup"
|
|
271
|
+
|
|
272
|
+
mkdir -p "$BACKUP_DIR"
|
|
273
|
+
cp "$yaml_file" "$backup_file"
|
|
274
|
+
|
|
275
|
+
log_verbose "Created backup: $backup_file"
|
|
276
|
+
echo "$backup_file"
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
# Migrate single file
|
|
280
|
+
migrate_file() {
|
|
281
|
+
local yaml_file="$1"
|
|
282
|
+
|
|
283
|
+
# Determine JSON filename (handle both .yaml and .yml extensions)
|
|
284
|
+
local json_file
|
|
285
|
+
if [[ "$yaml_file" =~ \.yaml$ ]]; then
|
|
286
|
+
json_file="${yaml_file%.yaml}.json"
|
|
287
|
+
elif [[ "$yaml_file" =~ \.yml$ ]]; then
|
|
288
|
+
json_file="${yaml_file%.yml}.json"
|
|
289
|
+
else
|
|
290
|
+
log_error "File is not a YAML file: $yaml_file"
|
|
291
|
+
return 1
|
|
292
|
+
fi
|
|
293
|
+
|
|
294
|
+
log_info "Migrating: $(basename "$yaml_file")"
|
|
295
|
+
|
|
296
|
+
# Create backup
|
|
297
|
+
local backup_file
|
|
298
|
+
backup_file=$(create_backup "$yaml_file")
|
|
299
|
+
|
|
300
|
+
# Extract comments for documentation
|
|
301
|
+
local comments_file
|
|
302
|
+
comments_file=$(extract_comments "$yaml_file")
|
|
303
|
+
|
|
304
|
+
# Convert YAML to JSON
|
|
305
|
+
if [[ "$DRY_RUN" == "true" ]]; then
|
|
306
|
+
log_info "[DRY-RUN] Would convert: $yaml_file -> $json_file"
|
|
307
|
+
|
|
308
|
+
# Show preview of conversion
|
|
309
|
+
local preview_json="${BACKUP_DIR}/$(basename "$json_file").preview"
|
|
310
|
+
convert_yaml_to_json "$yaml_file" "$preview_json"
|
|
311
|
+
|
|
312
|
+
log_info "[DRY-RUN] Preview (first 20 lines):"
|
|
313
|
+
head -20 "$preview_json" | sed 's/^/ /'
|
|
314
|
+
|
|
315
|
+
return 0
|
|
316
|
+
fi
|
|
317
|
+
|
|
318
|
+
# Actual conversion
|
|
319
|
+
if ! convert_yaml_to_json "$yaml_file" "$json_file"; then
|
|
320
|
+
log_error "Migration failed for: $yaml_file"
|
|
321
|
+
return 1
|
|
322
|
+
fi
|
|
323
|
+
|
|
324
|
+
# Add metadata
|
|
325
|
+
add_migration_metadata "$json_file" "$yaml_file"
|
|
326
|
+
|
|
327
|
+
# Remove original YAML file
|
|
328
|
+
rm "$yaml_file"
|
|
329
|
+
|
|
330
|
+
log_success "Migrated: $(basename "$yaml_file") -> $(basename "$json_file")"
|
|
331
|
+
|
|
332
|
+
return 0
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
# Validate all migrated configs
|
|
336
|
+
validate_migrations() {
|
|
337
|
+
local json_files=("$@")
|
|
338
|
+
local failed=0
|
|
339
|
+
|
|
340
|
+
log_info "Validating migrated JSON files..."
|
|
341
|
+
|
|
342
|
+
for json_file in "${json_files[@]}"; do
|
|
343
|
+
if [[ ! -f "$json_file" ]]; then
|
|
344
|
+
continue
|
|
345
|
+
fi
|
|
346
|
+
|
|
347
|
+
if jq empty "$json_file" 2>/dev/null; then
|
|
348
|
+
log_success "Valid JSON: $(basename "$json_file")"
|
|
349
|
+
else
|
|
350
|
+
log_error "Invalid JSON: $(basename "$json_file")"
|
|
351
|
+
((failed++))
|
|
352
|
+
fi
|
|
353
|
+
done
|
|
354
|
+
|
|
355
|
+
if [[ $failed -gt 0 ]]; then
|
|
356
|
+
log_error "$failed file(s) failed validation"
|
|
357
|
+
return 1
|
|
358
|
+
fi
|
|
359
|
+
|
|
360
|
+
log_success "All JSON files validated successfully"
|
|
361
|
+
return 0
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
# Generate migration report
|
|
365
|
+
generate_report() {
|
|
366
|
+
local yaml_files=("$@")
|
|
367
|
+
local report_file="${BACKUP_DIR}/migration-report.txt"
|
|
368
|
+
|
|
369
|
+
{
|
|
370
|
+
echo "YAML to JSON Migration Report"
|
|
371
|
+
echo "=============================="
|
|
372
|
+
echo "Date: $(date)"
|
|
373
|
+
echo "Backup Directory: $BACKUP_DIR"
|
|
374
|
+
echo ""
|
|
375
|
+
echo "Files Migrated:"
|
|
376
|
+
for file in "${yaml_files[@]}"; do
|
|
377
|
+
local json_file="${file%.yaml}.json"
|
|
378
|
+
json_file="${json_file%.yml}.json"
|
|
379
|
+
echo " - $(basename "$file") -> $(basename "$json_file")"
|
|
380
|
+
done
|
|
381
|
+
echo ""
|
|
382
|
+
echo "Rollback Instructions:"
|
|
383
|
+
echo " To restore original YAML files, run:"
|
|
384
|
+
echo " ./scripts/migrate-yaml-to-json.sh --rollback $BACKUP_DIR"
|
|
385
|
+
} > "$report_file"
|
|
386
|
+
|
|
387
|
+
cat "$report_file"
|
|
388
|
+
log_success "Migration report saved: $report_file"
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
# Main migration process
|
|
392
|
+
main() {
|
|
393
|
+
log_info "CFN YAML to JSON Migration Tool"
|
|
394
|
+
log_info "================================"
|
|
395
|
+
|
|
396
|
+
# Check dependencies before proceeding
|
|
397
|
+
check_dependencies
|
|
398
|
+
|
|
399
|
+
if [[ "$DRY_RUN" == "true" ]]; then
|
|
400
|
+
log_warning "DRY-RUN MODE: No files will be modified"
|
|
401
|
+
fi
|
|
402
|
+
|
|
403
|
+
# Identify files to migrate
|
|
404
|
+
local yaml_files_str
|
|
405
|
+
yaml_files_str=$(identify_yaml_configs)
|
|
406
|
+
IFS=' ' read -r -a yaml_files <<< "$yaml_files_str"
|
|
407
|
+
|
|
408
|
+
if [[ ${#yaml_files[@]} -eq 0 ]]; then
|
|
409
|
+
log_error "No YAML configuration files found"
|
|
410
|
+
exit 1
|
|
411
|
+
fi
|
|
412
|
+
|
|
413
|
+
log_info "Found ${#yaml_files[@]} YAML configuration file(s)"
|
|
414
|
+
|
|
415
|
+
# Create backup directory
|
|
416
|
+
mkdir -p "$BACKUP_DIR"
|
|
417
|
+
|
|
418
|
+
# Migrate each file
|
|
419
|
+
local migrated_files=()
|
|
420
|
+
local failed=0
|
|
421
|
+
|
|
422
|
+
for yaml_file in "${yaml_files[@]}"; do
|
|
423
|
+
if migrate_file "$yaml_file"; then
|
|
424
|
+
local json_file="${yaml_file%.yaml}.json"
|
|
425
|
+
json_file="${json_file%.yml}.json"
|
|
426
|
+
migrated_files+=("$json_file")
|
|
427
|
+
else
|
|
428
|
+
((failed++))
|
|
429
|
+
fi
|
|
430
|
+
done
|
|
431
|
+
|
|
432
|
+
if [[ "$DRY_RUN" == "true" ]]; then
|
|
433
|
+
log_info ""
|
|
434
|
+
log_info "DRY-RUN COMPLETE"
|
|
435
|
+
log_info "================"
|
|
436
|
+
log_info "To execute migration, run without --dry-run flag"
|
|
437
|
+
exit 0
|
|
438
|
+
fi
|
|
439
|
+
|
|
440
|
+
# Validate migrations
|
|
441
|
+
if [[ ${#migrated_files[@]} -gt 0 ]]; then
|
|
442
|
+
validate_migrations "${migrated_files[@]}"
|
|
443
|
+
fi
|
|
444
|
+
|
|
445
|
+
# Generate report
|
|
446
|
+
generate_report "${yaml_files[@]}"
|
|
447
|
+
|
|
448
|
+
# Summary
|
|
449
|
+
log_info ""
|
|
450
|
+
log_info "Migration Summary"
|
|
451
|
+
log_info "================="
|
|
452
|
+
log_success "Successfully migrated: $((${#yaml_files[@]} - failed)) file(s)"
|
|
453
|
+
|
|
454
|
+
if [[ $failed -gt 0 ]]; then
|
|
455
|
+
log_error "Failed migrations: $failed file(s)"
|
|
456
|
+
exit 1
|
|
457
|
+
fi
|
|
458
|
+
|
|
459
|
+
log_info ""
|
|
460
|
+
log_success "Migration completed successfully!"
|
|
461
|
+
log_info "Backups saved to: $BACKUP_DIR"
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
# Execute main function
|
|
465
|
+
main
|
|
@@ -1,43 +1,43 @@
|
|
|
1
|
-
#!/bin/bash
|
|
2
|
-
# Run Marketing Infrastructure Integration Tests
|
|
3
|
-
|
|
4
|
-
set -e
|
|
5
|
-
|
|
6
|
-
# Ensure the correct working directory
|
|
7
|
-
cd "$(dirname "$0")/.." || exit 1
|
|
8
|
-
|
|
9
|
-
# Run all marketing test scripts
|
|
10
|
-
echo "🚀 Starting Marketing Infrastructure Integration Tests..."
|
|
11
|
-
|
|
12
|
-
# Function to run test and capture exit code
|
|
13
|
-
run_test() {
|
|
14
|
-
local test_script="$1"
|
|
15
|
-
local start_time=$(date +%s)
|
|
16
|
-
|
|
17
|
-
echo "Running $test_script..."
|
|
18
|
-
|
|
19
|
-
if bash "$test_script"; then
|
|
20
|
-
local end_time=$(date +%s)
|
|
21
|
-
local duration=$((end_time - start_time))
|
|
22
|
-
echo "✅ $test_script PASSED (${duration}s)"
|
|
23
|
-
else
|
|
24
|
-
echo "❌ $test_script FAILED"
|
|
25
|
-
exit 1
|
|
26
|
-
fi
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
# Array of test scripts
|
|
30
|
-
TEST_SCRIPTS=(
|
|
31
|
-
"tests/marketing-email-campaigns-test.sh"
|
|
32
|
-
"tests/marketing-social-publishing-test.sh"
|
|
33
|
-
"tests/marketing-analytics-data-test.sh"
|
|
34
|
-
"tests/marketing-crm-contacts-test.sh"
|
|
35
|
-
)
|
|
36
|
-
|
|
37
|
-
# Run each test script
|
|
38
|
-
for test in "${TEST_SCRIPTS[@]}"; do
|
|
39
|
-
run_test "$test"
|
|
40
|
-
done
|
|
41
|
-
|
|
42
|
-
echo "🎉 All Marketing Infrastructure Integration Tests PASSED!"
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# Run Marketing Infrastructure Integration Tests
|
|
3
|
+
|
|
4
|
+
set -e
|
|
5
|
+
|
|
6
|
+
# Ensure the correct working directory
|
|
7
|
+
cd "$(dirname "$0")/.." || exit 1
|
|
8
|
+
|
|
9
|
+
# Run all marketing test scripts
|
|
10
|
+
echo "🚀 Starting Marketing Infrastructure Integration Tests..."
|
|
11
|
+
|
|
12
|
+
# Function to run test and capture exit code
|
|
13
|
+
run_test() {
|
|
14
|
+
local test_script="$1"
|
|
15
|
+
local start_time=$(date +%s)
|
|
16
|
+
|
|
17
|
+
echo "Running $test_script..."
|
|
18
|
+
|
|
19
|
+
if bash "$test_script"; then
|
|
20
|
+
local end_time=$(date +%s)
|
|
21
|
+
local duration=$((end_time - start_time))
|
|
22
|
+
echo "✅ $test_script PASSED (${duration}s)"
|
|
23
|
+
else
|
|
24
|
+
echo "❌ $test_script FAILED"
|
|
25
|
+
exit 1
|
|
26
|
+
fi
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
# Array of test scripts
|
|
30
|
+
TEST_SCRIPTS=(
|
|
31
|
+
"tests/marketing-email-campaigns-test.sh"
|
|
32
|
+
"tests/marketing-social-publishing-test.sh"
|
|
33
|
+
"tests/marketing-analytics-data-test.sh"
|
|
34
|
+
"tests/marketing-crm-contacts-test.sh"
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
# Run each test script
|
|
38
|
+
for test in "${TEST_SCRIPTS[@]}"; do
|
|
39
|
+
run_test "$test"
|
|
40
|
+
done
|
|
41
|
+
|
|
42
|
+
echo "🎉 All Marketing Infrastructure Integration Tests PASSED!"
|
|
43
43
|
exit 0
|
package/scripts/update_paths.sh
CHANGED
|
@@ -1,47 +1,47 @@
|
|
|
1
|
-
#!/bin/bash
|
|
2
|
-
|
|
3
|
-
# Path replacements
|
|
4
|
-
SKILL_REPLACEMENTS=(
|
|
5
|
-
".claude/skills/redis-coordination/:\.claude/skills/cfn-redis-coordination/"
|
|
6
|
-
".claude/skills/agent-spawning/:\.claude/skills/cfn-agent-spawning/"
|
|
7
|
-
".claude/skills/playbook/:\.claude/skills/cfn-playbook/"
|
|
8
|
-
".claude/skills/:\.claude/skills/cfn-"
|
|
9
|
-
)
|
|
10
|
-
|
|
11
|
-
HOOK_REPLACEMENTS=(
|
|
12
|
-
".claude/hooks/invoke-post-edit.sh:\.claude/hooks/cfn-invoke-post-edit.sh"
|
|
13
|
-
".claude/hooks/post-edit.sh:\.claude/hooks/cfn-post-edit.sh"
|
|
14
|
-
".claude/hooks/:\.claude/hooks/cfn-"
|
|
15
|
-
)
|
|
16
|
-
|
|
17
|
-
DATA_REPLACEMENTS=(
|
|
18
|
-
".claude/data/:\.claude/cfn-data/"
|
|
19
|
-
)
|
|
20
|
-
|
|
21
|
-
# Function to perform replacements in a file
|
|
22
|
-
replace_paths() {
|
|
23
|
-
local file="$1"
|
|
24
|
-
local replacements=("${@:2}")
|
|
25
|
-
|
|
26
|
-
# Perform replacements for each pattern
|
|
27
|
-
for replacement in "${replacements[@]}"; do
|
|
28
|
-
IFS=':' read -r old_path new_path <<< "$replacement"
|
|
29
|
-
sed -i "s|${old_path}|${new_path}|g" "$file"
|
|
30
|
-
done
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
# Find and update markdown files
|
|
34
|
-
find .claude/agents/cfn-dev-team -name "*.md" | while read -r file; do
|
|
35
|
-
echo "Processing $file"
|
|
36
|
-
|
|
37
|
-
# Skill path replacements
|
|
38
|
-
replace_paths "$file" "${SKILL_REPLACEMENTS[@]}"
|
|
39
|
-
|
|
40
|
-
# Hook path replacements
|
|
41
|
-
replace_paths "$file" "${HOOK_REPLACEMENTS[@]}"
|
|
42
|
-
|
|
43
|
-
# Data path replacements
|
|
44
|
-
replace_paths "$file" "${DATA_REPLACEMENTS[@]}"
|
|
45
|
-
done
|
|
46
|
-
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
# Path replacements
|
|
4
|
+
SKILL_REPLACEMENTS=(
|
|
5
|
+
".claude/skills/redis-coordination/:\.claude/skills/cfn-redis-coordination/"
|
|
6
|
+
".claude/skills/agent-spawning/:\.claude/skills/cfn-agent-spawning/"
|
|
7
|
+
".claude/skills/playbook/:\.claude/skills/cfn-playbook/"
|
|
8
|
+
".claude/skills/:\.claude/skills/cfn-"
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
HOOK_REPLACEMENTS=(
|
|
12
|
+
".claude/hooks/invoke-post-edit.sh:\.claude/hooks/cfn-invoke-post-edit.sh"
|
|
13
|
+
".claude/hooks/post-edit.sh:\.claude/hooks/cfn-post-edit.sh"
|
|
14
|
+
".claude/hooks/:\.claude/hooks/cfn-"
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
DATA_REPLACEMENTS=(
|
|
18
|
+
".claude/data/:\.claude/cfn-data/"
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
# Function to perform replacements in a file
|
|
22
|
+
replace_paths() {
|
|
23
|
+
local file="$1"
|
|
24
|
+
local replacements=("${@:2}")
|
|
25
|
+
|
|
26
|
+
# Perform replacements for each pattern
|
|
27
|
+
for replacement in "${replacements[@]}"; do
|
|
28
|
+
IFS=':' read -r old_path new_path <<< "$replacement"
|
|
29
|
+
sed -i "s|${old_path}|${new_path}|g" "$file"
|
|
30
|
+
done
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
# Find and update markdown files
|
|
34
|
+
find .claude/agents/cfn-dev-team -name "*.md" | while read -r file; do
|
|
35
|
+
echo "Processing $file"
|
|
36
|
+
|
|
37
|
+
# Skill path replacements
|
|
38
|
+
replace_paths "$file" "${SKILL_REPLACEMENTS[@]}"
|
|
39
|
+
|
|
40
|
+
# Hook path replacements
|
|
41
|
+
replace_paths "$file" "${HOOK_REPLACEMENTS[@]}"
|
|
42
|
+
|
|
43
|
+
# Data path replacements
|
|
44
|
+
replace_paths "$file" "${DATA_REPLACEMENTS[@]}"
|
|
45
|
+
done
|
|
46
|
+
|
|
47
47
|
echo "Path replacements complete!"
|