claude-flow-novice 2.15.1 → 2.15.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/cfn-extras/agents/google-sheets-specialist.md +614 -0
- package/.claude/commands/cfn/create-handoff.md +224 -0
- package/.claude/hooks/cfn-BACKUP_USAGE.md +243 -243
- package/.claude/hooks/cfn-invoke-security-validation.sh +69 -69
- package/.claude/hooks/cfn-post-edit-cfn-retrospective.sh +78 -78
- package/.claude/hooks/cfn-post-edit.config.json +44 -44
- package/.claude/skills/agent-lifecycle/SKILL.md +60 -0
- package/.claude/skills/agent-lifecycle/execute-lifecycle-hook.sh +573 -0
- package/.claude/skills/agent-lifecycle/simple-audit.sh +31 -0
- package/.claude/skills/cfn-hybrid-routing/check-dependencies.sh +51 -51
- package/.claude/skills/cfn-loop-validation/orchestrate-cfn-loop.sh +252 -252
- package/.claude/skills/cfn-redis-coordination/agent-recovery.sh +74 -74
- package/.claude/skills/cfn-redis-coordination/get-context.sh +112 -112
- package/.claude/skills/cfn-transparency-middleware/middleware-config.sh +28 -28
- package/.claude/skills/cfn-transparency-middleware/performance-benchmark.sh +78 -78
- package/.claude/skills/cfn-transparency-middleware/test-integration.sh +161 -161
- package/.claude/skills/cfn-transparency-middleware/test-transparency-skill.sh +367 -367
- package/.claude/skills/cfn-transparency-middleware/tests/input-validation.sh +92 -92
- package/.claude/skills/cfn-transparency-middleware/wrap-agent.sh +131 -131
- package/claude-assets/agents/cfn-dev-team/coordinators/handoff-coordinator.md +662 -0
- package/claude-assets/agents/cfn-dev-team/dev-ops/docker-specialist.md +29 -0
- package/claude-assets/cfn-extras/agents/google-sheets-specialist.md +614 -0
- package/claude-assets/commands/cfn/create-handoff.md +224 -0
- package/claude-assets/hooks/cfn-BACKUP_USAGE.md +243 -243
- package/claude-assets/hooks/cfn-invoke-security-validation.sh +69 -69
- package/claude-assets/hooks/cfn-post-edit-cfn-retrospective.sh +78 -78
- package/claude-assets/hooks/cfn-post-edit.config.json +44 -44
- package/claude-assets/hooks/cfn-post-execution/memory-cleanup.sh +19 -19
- package/claude-assets/hooks/cfn-pre-execution/memory-check.sh +19 -19
- package/claude-assets/skills/agent-lifecycle/execute-lifecycle-hook.sh +572 -572
- package/claude-assets/skills/agent-lifecycle/simple-audit.sh +30 -30
- package/claude-assets/skills/cfn-automatic-memory-persistence/persist-agent-output.sh +48 -48
- package/claude-assets/skills/cfn-automatic-memory-persistence/query-agent-history.sh +34 -34
- package/claude-assets/skills/cfn-deliverable-validation/confidence-calculator.sh +261 -261
- package/claude-assets/skills/cfn-expert-update/update-expert.sh +345 -345
- package/claude-assets/skills/cfn-hybrid-routing/check-dependencies.sh +51 -51
- package/claude-assets/skills/cfn-intervention-detector/detect-intervention.sh +110 -110
- package/claude-assets/skills/cfn-intervention-orchestrator/execute-intervention.sh +58 -58
- package/claude-assets/skills/cfn-loop-validation/orchestrate-cfn-loop.sh +252 -252
- package/claude-assets/skills/cfn-loop2-output-processing/process-validator-output.sh +275 -275
- package/claude-assets/skills/cfn-memory-management/check-memory.sh +159 -159
- package/claude-assets/skills/cfn-memory-management/cleanup-memory.sh +196 -196
- package/claude-assets/skills/cfn-node-heap-sizer/task-mode-heap-limiter.sh +325 -325
- package/claude-assets/skills/cfn-playbook-auto-update/auto-update-playbook.sh +85 -85
- package/claude-assets/skills/cfn-redis-coordination/agent-recovery.sh +74 -74
- package/claude-assets/skills/cfn-redis-coordination/get-context.sh +112 -112
- package/claude-assets/skills/cfn-scope-simplifier/simplify-scope.sh +67 -67
- package/claude-assets/skills/cfn-specialist-injection/recommend-specialist.sh +56 -56
- package/claude-assets/skills/cfn-standardized-error-handling/capture-agent-error.sh +86 -86
- package/claude-assets/skills/cfn-standardized-error-handling/test-error-handling.sh +165 -165
- package/claude-assets/skills/cfn-task-config-init/initialize-config.sh +264 -264
- package/claude-assets/skills/cfn-task-decomposition/task-decomposer.sh +278 -278
- package/claude-assets/skills/cfn-transparency-middleware/middleware-config.sh +28 -28
- package/claude-assets/skills/cfn-transparency-middleware/performance-benchmark.sh +78 -78
- package/claude-assets/skills/cfn-transparency-middleware/test-integration.sh +161 -161
- package/claude-assets/skills/cfn-transparency-middleware/test-transparency-skill.sh +367 -367
- package/claude-assets/skills/cfn-transparency-middleware/tests/input-validation.sh +92 -92
- package/claude-assets/skills/cfn-transparency-middleware/wrap-agent.sh +131 -131
- package/claude-assets/skills/docker-build/SKILL.md +96 -203
- package/claude-assets/skills/docker-build/build.sh +73 -73
- package/claude-assets/skills/integration/agent-handoff.sh +494 -0
- package/claude-assets/skills/integration/file-operations.sh +414 -0
- package/claude-assets/skills/workflow-codification/APPROVAL_WORKFLOW.md +806 -0
- package/claude-assets/skills/workflow-codification/COST_TRACKING.md +637 -0
- package/claude-assets/skills/workflow-codification/EDGE_CASE_TRACKING.md +404 -0
- package/claude-assets/skills/workflow-codification/README_PHASE4.md +457 -0
- package/claude-assets/skills/workflow-codification/SKILL.md +110 -0
- package/claude-assets/skills/workflow-codification/analyze-patterns.sh +899 -0
- package/claude-assets/skills/workflow-codification/approval-workflow.sh +514 -0
- package/claude-assets/skills/workflow-codification/generate-skill-update.sh +525 -0
- package/claude-assets/skills/workflow-codification/review-skill.sh +643 -0
- package/claude-assets/skills/workflow-codification/templates/email-notification.txt +114 -0
- package/claude-assets/skills/workflow-codification/templates/slack-notification.md +85 -0
- package/claude-assets/skills/workflow-codification/test-integration.sh +281 -0
- package/claude-assets/skills/workflow-codification/track-cost-savings.sh +445 -0
- package/claude-assets/skills/workflow-codification/track-edge-case.sh +323 -0
- package/dist/agents/agent-loader.js +165 -146
- package/dist/agents/agent-loader.js.map +1 -1
- package/dist/cli/config-manager.js +91 -109
- package/dist/cli/config-manager.js.map +1 -1
- package/dist/integration/DatabaseHandoff.js +507 -0
- package/dist/integration/DatabaseHandoff.js.map +1 -0
- package/dist/integration/StandardAdapter.js +291 -0
- package/dist/integration/StandardAdapter.js.map +1 -0
- package/dist/lib/agent-output-parser.js +518 -0
- package/dist/lib/agent-output-parser.js.map +1 -0
- package/dist/lib/agent-output-validator.js +950 -0
- package/dist/lib/agent-output-validator.js.map +1 -0
- package/dist/lib/artifact-registry.js +443 -0
- package/dist/lib/artifact-registry.js.map +1 -0
- package/dist/lib/config-validator.js +687 -0
- package/dist/lib/config-validator.js.map +1 -0
- package/dist/types/agent-output.js +44 -0
- package/dist/types/agent-output.js.map +1 -0
- package/dist/types/config.js +28 -0
- package/dist/types/config.js.map +1 -0
- package/package.json +2 -1
- package/scripts/artifact-cleanup.sh +392 -0
- package/scripts/build-linux.sh +78 -0
- package/scripts/deploy-production.sh +355 -355
- package/scripts/docker-playwright-fix.sh +311 -311
- package/scripts/docker-rebuild-all-agents.sh +127 -127
- package/scripts/memory-leak-prevention.sh +305 -305
- package/scripts/migrate-artifacts.sh +563 -0
- package/scripts/migrate-yaml-to-json.sh +465 -0
- package/scripts/run-marketing-tests.sh +42 -42
- package/scripts/update_paths.sh +46 -46
|
@@ -0,0 +1,563 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# Artifact Migration Script
|
|
3
|
+
# Version: 1.0.0
|
|
4
|
+
# Purpose: Find scattered artifacts and migrate to centralized registry
|
|
5
|
+
#
|
|
6
|
+
# Usage:
|
|
7
|
+
# ./migrate-artifacts.sh [OPTIONS]
|
|
8
|
+
#
|
|
9
|
+
# Options:
|
|
10
|
+
# --dry-run Show what would be migrated without making changes
|
|
11
|
+
# --source-dirs <dirs> Comma-separated list of directories to scan (default: /tmp,docs,artifacts)
|
|
12
|
+
# --db-path <path> Path to SQLite database (default: ./artifacts/database/registry.db)
|
|
13
|
+
# --registry-path <path> Base path for centralized registry (default: ./artifacts/registry)
|
|
14
|
+
# --log-file <path> Path to log file (default: ./artifacts/logs/migration.log)
|
|
15
|
+
# --exclude-patterns <patterns> Exclude files matching patterns (comma-separated)
|
|
16
|
+
# --auto-detect-type Auto-detect artifact type from extension
|
|
17
|
+
# --verbose Enable verbose logging
|
|
18
|
+
# --help Show this help message
|
|
19
|
+
#
|
|
20
|
+
# Exit Codes:
|
|
21
|
+
# 0 - Success
|
|
22
|
+
# 1 - General error
|
|
23
|
+
# 2 - Database error
|
|
24
|
+
# 3 - Validation error
|
|
25
|
+
|
|
26
|
+
set -euo pipefail
|
|
27
|
+
|
|
28
|
+
# ============================================================================
|
|
29
|
+
# Configuration and Defaults
|
|
30
|
+
# ============================================================================
|
|
31
|
+
|
|
32
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
33
|
+
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
|
34
|
+
|
|
35
|
+
# Default configuration
|
|
36
|
+
DRY_RUN=false
|
|
37
|
+
SOURCE_DIRS="/tmp,docs,artifacts"
|
|
38
|
+
DB_PATH="${PROJECT_ROOT}/artifacts/database/registry.db"
|
|
39
|
+
REGISTRY_PATH="${PROJECT_ROOT}/artifacts/registry"
|
|
40
|
+
LOG_FILE="${PROJECT_ROOT}/artifacts/logs/migration.log"
|
|
41
|
+
EXCLUDE_PATTERNS=".git,.DS_Store,node_modules,*.swp,*.tmp"
|
|
42
|
+
AUTO_DETECT_TYPE=false
|
|
43
|
+
VERBOSE=false
|
|
44
|
+
|
|
45
|
+
# Counters
|
|
46
|
+
FILES_FOUND=0
|
|
47
|
+
FILES_MIGRATED=0
|
|
48
|
+
FILES_SKIPPED=0
|
|
49
|
+
ERRORS=0
|
|
50
|
+
|
|
51
|
+
# ============================================================================
|
|
52
|
+
# Logging Functions
|
|
53
|
+
# ============================================================================
|
|
54
|
+
|
|
55
|
+
log() {
|
|
56
|
+
local level="$1"
|
|
57
|
+
shift
|
|
58
|
+
local message="$*"
|
|
59
|
+
local timestamp
|
|
60
|
+
timestamp="$(date '+%Y-%m-%d %H:%M:%S')"
|
|
61
|
+
|
|
62
|
+
echo "[${timestamp}] [${level}] ${message}" | tee -a "$LOG_FILE"
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
log_info() {
|
|
66
|
+
log "INFO" "$@"
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
log_warn() {
|
|
70
|
+
log "WARN" "$@"
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
log_error() {
|
|
74
|
+
log "ERROR" "$@"
|
|
75
|
+
((ERRORS++)) || true
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
log_debug() {
|
|
79
|
+
if [[ "$VERBOSE" == "true" ]]; then
|
|
80
|
+
log "DEBUG" "$@"
|
|
81
|
+
fi
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
# ============================================================================
|
|
85
|
+
# Utility Functions
|
|
86
|
+
# ============================================================================
|
|
87
|
+
|
|
88
|
+
show_help() {
|
|
89
|
+
grep '^#' "$0" | grep -v '#!/usr/bin/env' | sed 's/^# //; s/^#//'
|
|
90
|
+
exit 0
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
ensure_directory() {
|
|
94
|
+
local dir="$1"
|
|
95
|
+
if [[ ! -d "$dir" ]]; then
|
|
96
|
+
mkdir -p "$dir"
|
|
97
|
+
log_debug "Created directory: $dir"
|
|
98
|
+
fi
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
# SQL-safe escaping: replace single quotes with two single quotes (SQL standard)
|
|
102
|
+
# This prevents SQL injection by ensuring user input cannot break out of string literals
|
|
103
|
+
sql_escape() {
|
|
104
|
+
printf %s "$1" | sed "s/'/''/g"
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
check_dependencies() {
|
|
108
|
+
if ! command -v sqlite3 &>/dev/null; then
|
|
109
|
+
log_error "sqlite3 CLI tool is required but not installed"
|
|
110
|
+
log_error "Install with one of the following commands:"
|
|
111
|
+
log_error " - Debian/Ubuntu: apt-get install sqlite3"
|
|
112
|
+
log_error " - RHEL/CentOS: yum install sqlite"
|
|
113
|
+
log_error " - macOS: brew install sqlite3"
|
|
114
|
+
log_error " - Alpine: apk add sqlite"
|
|
115
|
+
exit 1
|
|
116
|
+
fi
|
|
117
|
+
log_debug "Dependency check passed: sqlite3 found"
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
validate_database() {
|
|
121
|
+
if [[ ! -f "$DB_PATH" ]]; then
|
|
122
|
+
log_warn "Database not found, will be created: $DB_PATH"
|
|
123
|
+
ensure_directory "$(dirname "$DB_PATH")"
|
|
124
|
+
|
|
125
|
+
# Initialize database with schema
|
|
126
|
+
local schema_path="${PROJECT_ROOT}/src/database/artifact-registry-schema.sql"
|
|
127
|
+
if [[ -f "$schema_path" ]]; then
|
|
128
|
+
sqlite3 "$DB_PATH" < "$schema_path"
|
|
129
|
+
log_info "Database initialized with schema"
|
|
130
|
+
else
|
|
131
|
+
log_error "Schema file not found: $schema_path"
|
|
132
|
+
exit 2
|
|
133
|
+
fi
|
|
134
|
+
fi
|
|
135
|
+
|
|
136
|
+
# Test database connectivity
|
|
137
|
+
if ! sqlite3 "$DB_PATH" "SELECT COUNT(*) FROM artifacts;" &>/dev/null; then
|
|
138
|
+
log_error "Failed to query database or artifacts table does not exist"
|
|
139
|
+
exit 2
|
|
140
|
+
fi
|
|
141
|
+
|
|
142
|
+
log_debug "Database validated: $DB_PATH"
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
# ============================================================================
|
|
146
|
+
# Type Detection Functions
|
|
147
|
+
# ============================================================================
|
|
148
|
+
|
|
149
|
+
detect_artifact_type() {
|
|
150
|
+
local file_path="$1"
|
|
151
|
+
local extension="${file_path##*.}"
|
|
152
|
+
local basename
|
|
153
|
+
basename="$(basename "$file_path")"
|
|
154
|
+
|
|
155
|
+
# Convert to lowercase for comparison
|
|
156
|
+
extension=$(echo "$extension" | tr '[:upper:]' '[:lower:]')
|
|
157
|
+
|
|
158
|
+
case "$extension" in
|
|
159
|
+
# Code
|
|
160
|
+
js|ts|jsx|tsx|py|rb|go|rs|java|c|cpp|h|hpp|sh|bash)
|
|
161
|
+
echo "code"
|
|
162
|
+
;;
|
|
163
|
+
# Documentation
|
|
164
|
+
md|txt|pdf|doc|docx|rst|adoc)
|
|
165
|
+
echo "documentation"
|
|
166
|
+
;;
|
|
167
|
+
# Test
|
|
168
|
+
test.js|test.ts|spec.js|spec.ts|test.py)
|
|
169
|
+
echo "test"
|
|
170
|
+
;;
|
|
171
|
+
# Config
|
|
172
|
+
json|yaml|yml|toml|ini|conf|cfg|env)
|
|
173
|
+
echo "config"
|
|
174
|
+
;;
|
|
175
|
+
# Binary
|
|
176
|
+
bin|exe|dll|so|dylib|o|a)
|
|
177
|
+
echo "binary"
|
|
178
|
+
;;
|
|
179
|
+
# Data
|
|
180
|
+
csv|tsv|parquet|arrow|db|sqlite|sqlite3)
|
|
181
|
+
echo "data"
|
|
182
|
+
;;
|
|
183
|
+
# Model
|
|
184
|
+
h5|pb|onnx|pkl|pth|pt)
|
|
185
|
+
echo "model"
|
|
186
|
+
;;
|
|
187
|
+
*)
|
|
188
|
+
# Check for test files by name pattern
|
|
189
|
+
if [[ "$basename" =~ test|spec|Test|Spec ]]; then
|
|
190
|
+
echo "test"
|
|
191
|
+
else
|
|
192
|
+
echo "other"
|
|
193
|
+
fi
|
|
194
|
+
;;
|
|
195
|
+
esac
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
detect_format() {
|
|
199
|
+
local file_path="$1"
|
|
200
|
+
local extension="${file_path##*.}"
|
|
201
|
+
echo "${extension,,}" # lowercase extension
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
# ============================================================================
|
|
205
|
+
# File Discovery Functions
|
|
206
|
+
# ============================================================================
|
|
207
|
+
|
|
208
|
+
should_exclude_file() {
|
|
209
|
+
local file_path="$1"
|
|
210
|
+
local filename
|
|
211
|
+
filename="$(basename "$file_path")"
|
|
212
|
+
|
|
213
|
+
IFS=',' read -ra PATTERNS <<< "$EXCLUDE_PATTERNS"
|
|
214
|
+
for pattern in "${PATTERNS[@]}"; do
|
|
215
|
+
pattern=$(echo "$pattern" | xargs) # trim whitespace
|
|
216
|
+
if [[ "$file_path" == *"$pattern"* ]] || [[ "$filename" == $pattern ]]; then
|
|
217
|
+
return 0 # true - should exclude
|
|
218
|
+
fi
|
|
219
|
+
done
|
|
220
|
+
|
|
221
|
+
return 1 # false - should not exclude
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
find_artifacts() {
|
|
225
|
+
local source_dirs="$1"
|
|
226
|
+
|
|
227
|
+
IFS=',' read -ra DIRS <<< "$source_dirs"
|
|
228
|
+
for dir in "${DIRS[@]}"; do
|
|
229
|
+
dir=$(echo "$dir" | xargs) # trim whitespace
|
|
230
|
+
|
|
231
|
+
# Convert relative to absolute path
|
|
232
|
+
if [[ ! "$dir" =~ ^/ ]]; then
|
|
233
|
+
dir="${PROJECT_ROOT}/${dir}"
|
|
234
|
+
fi
|
|
235
|
+
|
|
236
|
+
if [[ ! -d "$dir" ]]; then
|
|
237
|
+
log_warn "Directory not found, skipping: $dir"
|
|
238
|
+
continue
|
|
239
|
+
fi
|
|
240
|
+
|
|
241
|
+
log_info "Scanning directory: $dir"
|
|
242
|
+
|
|
243
|
+
# Find all regular files
|
|
244
|
+
while IFS= read -r -d '' file; do
|
|
245
|
+
((FILES_FOUND++)) || true
|
|
246
|
+
|
|
247
|
+
# Check exclusion patterns
|
|
248
|
+
if should_exclude_file "$file"; then
|
|
249
|
+
log_debug "Excluded: $file"
|
|
250
|
+
((FILES_SKIPPED++)) || true
|
|
251
|
+
continue
|
|
252
|
+
fi
|
|
253
|
+
|
|
254
|
+
# Check if already in registry
|
|
255
|
+
if [[ "$file" == "${REGISTRY_PATH}"* ]]; then
|
|
256
|
+
log_debug "Already in registry: $file"
|
|
257
|
+
((FILES_SKIPPED++)) || true
|
|
258
|
+
continue
|
|
259
|
+
fi
|
|
260
|
+
|
|
261
|
+
migrate_file "$file"
|
|
262
|
+
done < <(find "$dir" -type f -print0 2>/dev/null || true)
|
|
263
|
+
done
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
# ============================================================================
|
|
267
|
+
# Migration Functions
|
|
268
|
+
# ============================================================================
|
|
269
|
+
|
|
270
|
+
calculate_checksum() {
|
|
271
|
+
local file_path="$1"
|
|
272
|
+
sha256sum "$file_path" | awk '{print $1}'
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
generate_artifact_id() {
|
|
276
|
+
local timestamp
|
|
277
|
+
timestamp=$(date +%s)
|
|
278
|
+
local random
|
|
279
|
+
random=$(od -An -N4 -tu4 /dev/urandom | tr -d ' ')
|
|
280
|
+
echo "artifact-${timestamp}-${random}"
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
get_relative_registry_path() {
|
|
284
|
+
local file_path="$1"
|
|
285
|
+
local artifact_type="$2"
|
|
286
|
+
local artifact_id="$3"
|
|
287
|
+
local extension="${file_path##*.}"
|
|
288
|
+
|
|
289
|
+
# Create type-based subdirectory
|
|
290
|
+
local subdir="${artifact_type}s" # code -> codes, test -> tests, etc.
|
|
291
|
+
local filename="${artifact_id}.${extension}"
|
|
292
|
+
|
|
293
|
+
echo "${subdir}/${filename}"
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
migrate_file() {
|
|
297
|
+
local source_file="$1"
|
|
298
|
+
|
|
299
|
+
# Get file metadata
|
|
300
|
+
local size_bytes
|
|
301
|
+
size_bytes=$(stat -f%z "$source_file" 2>/dev/null || stat -c%s "$source_file" 2>/dev/null || echo "0")
|
|
302
|
+
|
|
303
|
+
local checksum
|
|
304
|
+
checksum=$(calculate_checksum "$source_file")
|
|
305
|
+
|
|
306
|
+
local artifact_type
|
|
307
|
+
if [[ "$AUTO_DETECT_TYPE" == "true" ]]; then
|
|
308
|
+
artifact_type=$(detect_artifact_type "$source_file")
|
|
309
|
+
else
|
|
310
|
+
artifact_type="other"
|
|
311
|
+
fi
|
|
312
|
+
|
|
313
|
+
local format
|
|
314
|
+
format=$(detect_format "$source_file")
|
|
315
|
+
|
|
316
|
+
local artifact_id
|
|
317
|
+
artifact_id=$(generate_artifact_id)
|
|
318
|
+
|
|
319
|
+
local rel_path
|
|
320
|
+
rel_path=$(get_relative_registry_path "$source_file" "$artifact_type" "$artifact_id")
|
|
321
|
+
|
|
322
|
+
local dest_file="${REGISTRY_PATH}/${rel_path}"
|
|
323
|
+
local dest_dir
|
|
324
|
+
dest_dir="$(dirname "$dest_file")"
|
|
325
|
+
|
|
326
|
+
local name
|
|
327
|
+
name="$(basename "$source_file")"
|
|
328
|
+
|
|
329
|
+
log_debug "Migrating: $source_file -> $dest_file"
|
|
330
|
+
|
|
331
|
+
if [[ "$DRY_RUN" == "true" ]]; then
|
|
332
|
+
log_info "[DRY-RUN] Would migrate: $source_file"
|
|
333
|
+
log_debug " - ID: $artifact_id"
|
|
334
|
+
log_debug " - Type: $artifact_type"
|
|
335
|
+
log_debug " - Size: $size_bytes bytes"
|
|
336
|
+
log_debug " - Destination: $dest_file"
|
|
337
|
+
((FILES_MIGRATED++)) || true
|
|
338
|
+
return 0
|
|
339
|
+
fi
|
|
340
|
+
|
|
341
|
+
# Create destination directory
|
|
342
|
+
ensure_directory "$dest_dir"
|
|
343
|
+
|
|
344
|
+
# Copy file to registry
|
|
345
|
+
if ! cp -p "$source_file" "$dest_file"; then
|
|
346
|
+
log_error "Failed to copy file: $source_file -> $dest_file"
|
|
347
|
+
return 1
|
|
348
|
+
fi
|
|
349
|
+
|
|
350
|
+
# Register in database
|
|
351
|
+
if ! register_artifact "$artifact_id" "$name" "$artifact_type" "$format" "$dest_file" "$checksum" "$size_bytes"; then
|
|
352
|
+
log_error "Failed to register artifact: $artifact_id"
|
|
353
|
+
# Cleanup copied file
|
|
354
|
+
rm -f "$dest_file"
|
|
355
|
+
return 1
|
|
356
|
+
fi
|
|
357
|
+
|
|
358
|
+
log_info "Migrated: $name (Type: $artifact_type, Size: $size_bytes bytes)"
|
|
359
|
+
((FILES_MIGRATED++)) || true
|
|
360
|
+
return 0
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
register_artifact() {
|
|
364
|
+
local id="$1"
|
|
365
|
+
local name="$2"
|
|
366
|
+
local type="$3"
|
|
367
|
+
local format="$4"
|
|
368
|
+
local storage_location="$5"
|
|
369
|
+
local checksum="$6"
|
|
370
|
+
local size_bytes="$7"
|
|
371
|
+
|
|
372
|
+
# Escape all string values to prevent SQL injection
|
|
373
|
+
# Single quotes are replaced with two single quotes (SQL standard escaping)
|
|
374
|
+
local safe_id=$(sql_escape "$id")
|
|
375
|
+
local safe_name=$(sql_escape "$name")
|
|
376
|
+
local safe_type=$(sql_escape "$type")
|
|
377
|
+
local safe_format=$(sql_escape "$format")
|
|
378
|
+
local safe_location=$(sql_escape "$storage_location")
|
|
379
|
+
local safe_checksum=$(sql_escape "$checksum")
|
|
380
|
+
|
|
381
|
+
# Insert with properly escaped values
|
|
382
|
+
# Note: size_bytes is numeric and doesn't need escaping
|
|
383
|
+
sqlite3 "$DB_PATH" <<EOF
|
|
384
|
+
INSERT INTO artifacts (
|
|
385
|
+
id, name, type, format, storage_location, checksum, size_bytes,
|
|
386
|
+
version, acl_level, retention_policy, retention_days, status, is_compressed
|
|
387
|
+
) VALUES (
|
|
388
|
+
'$safe_id', '$safe_name', '$safe_type', '$safe_format', '$safe_location', '$safe_checksum', $size_bytes,
|
|
389
|
+
1, 2, 'standard', 30, 'active', 0
|
|
390
|
+
);
|
|
391
|
+
EOF
|
|
392
|
+
|
|
393
|
+
return $?
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
# ============================================================================
|
|
397
|
+
# Argument Parsing
|
|
398
|
+
# ============================================================================
|
|
399
|
+
|
|
400
|
+
parse_arguments() {
|
|
401
|
+
while [[ $# -gt 0 ]]; do
|
|
402
|
+
case "$1" in
|
|
403
|
+
--dry-run)
|
|
404
|
+
DRY_RUN=true
|
|
405
|
+
shift
|
|
406
|
+
;;
|
|
407
|
+
--source-dirs)
|
|
408
|
+
SOURCE_DIRS="$2"
|
|
409
|
+
shift 2
|
|
410
|
+
;;
|
|
411
|
+
--db-path)
|
|
412
|
+
DB_PATH="$2"
|
|
413
|
+
shift 2
|
|
414
|
+
;;
|
|
415
|
+
--registry-path)
|
|
416
|
+
REGISTRY_PATH="$2"
|
|
417
|
+
shift 2
|
|
418
|
+
;;
|
|
419
|
+
--log-file)
|
|
420
|
+
LOG_FILE="$2"
|
|
421
|
+
shift 2
|
|
422
|
+
;;
|
|
423
|
+
--exclude-patterns)
|
|
424
|
+
EXCLUDE_PATTERNS="$2"
|
|
425
|
+
shift 2
|
|
426
|
+
;;
|
|
427
|
+
--auto-detect-type)
|
|
428
|
+
AUTO_DETECT_TYPE=true
|
|
429
|
+
shift
|
|
430
|
+
;;
|
|
431
|
+
--verbose)
|
|
432
|
+
VERBOSE=true
|
|
433
|
+
shift
|
|
434
|
+
;;
|
|
435
|
+
--help)
|
|
436
|
+
show_help
|
|
437
|
+
;;
|
|
438
|
+
*)
|
|
439
|
+
log_error "Unknown option: $1"
|
|
440
|
+
show_help
|
|
441
|
+
exit 3
|
|
442
|
+
;;
|
|
443
|
+
esac
|
|
444
|
+
done
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
# ============================================================================
|
|
448
|
+
# Security Testing
|
|
449
|
+
# ============================================================================
|
|
450
|
+
|
|
451
|
+
test_sql_injection() {
|
|
452
|
+
log_info "Running SQL injection protection tests..."
|
|
453
|
+
|
|
454
|
+
# Test 1: Single quote escaping
|
|
455
|
+
local malicious_id="'; DROP TABLE artifacts; --"
|
|
456
|
+
local escaped=$(sql_escape "$malicious_id")
|
|
457
|
+
|
|
458
|
+
if [[ "$escaped" == "''; DROP TABLE artifacts; --" ]]; then
|
|
459
|
+
log_info " ✓ SQL injection protection: Single quote escaping PASS"
|
|
460
|
+
else
|
|
461
|
+
log_error " ✗ SQL injection protection: Single quote escaping FAIL (expected: ''; DROP TABLE artifacts; --, got: $escaped)"
|
|
462
|
+
return 1
|
|
463
|
+
fi
|
|
464
|
+
|
|
465
|
+
# Test 2: Multiple single quotes
|
|
466
|
+
local multi_quotes="test''value"
|
|
467
|
+
local escaped_multi=$(sql_escape "$multi_quotes")
|
|
468
|
+
|
|
469
|
+
if [[ "$escaped_multi" == "test''''value" ]]; then
|
|
470
|
+
log_info " ✓ SQL injection protection: Multiple quotes PASS"
|
|
471
|
+
else
|
|
472
|
+
log_error " ✗ SQL injection protection: Multiple quotes FAIL (expected: test''''value, got: $escaped_multi)"
|
|
473
|
+
return 1
|
|
474
|
+
fi
|
|
475
|
+
|
|
476
|
+
# Test 3: Empty string
|
|
477
|
+
local empty=""
|
|
478
|
+
local escaped_empty=$(sql_escape "$empty")
|
|
479
|
+
|
|
480
|
+
if [[ "$escaped_empty" == "" ]]; then
|
|
481
|
+
log_info " ✓ SQL injection protection: Empty string PASS"
|
|
482
|
+
else
|
|
483
|
+
log_error " ✗ SQL injection protection: Empty string FAIL"
|
|
484
|
+
return 1
|
|
485
|
+
fi
|
|
486
|
+
|
|
487
|
+
# Test 4: String with no quotes
|
|
488
|
+
local normal="normal_value"
|
|
489
|
+
local escaped_normal=$(sql_escape "$normal")
|
|
490
|
+
|
|
491
|
+
if [[ "$escaped_normal" == "normal_value" ]]; then
|
|
492
|
+
log_info " ✓ SQL injection protection: Normal string PASS"
|
|
493
|
+
else
|
|
494
|
+
log_error " ✗ SQL injection protection: Normal string FAIL"
|
|
495
|
+
return 1
|
|
496
|
+
fi
|
|
497
|
+
|
|
498
|
+
log_info "All SQL injection protection tests passed!"
|
|
499
|
+
return 0
|
|
500
|
+
}
|
|
501
|
+
|
|
502
|
+
# ============================================================================
|
|
503
|
+
# Main Execution
|
|
504
|
+
# ============================================================================
|
|
505
|
+
|
|
506
|
+
main() {
|
|
507
|
+
parse_arguments "$@"
|
|
508
|
+
|
|
509
|
+
# Check required dependencies
|
|
510
|
+
check_dependencies
|
|
511
|
+
|
|
512
|
+
# Ensure log directory exists
|
|
513
|
+
ensure_directory "$(dirname "$LOG_FILE")"
|
|
514
|
+
|
|
515
|
+
log_info "Artifact Migration Script - Version 1.0.0"
|
|
516
|
+
log_info "Starting at $(date '+%Y-%m-%d %H:%M:%S')"
|
|
517
|
+
|
|
518
|
+
# Run security tests to validate SQL injection protection
|
|
519
|
+
test_sql_injection || {
|
|
520
|
+
log_error "Security tests failed - aborting migration"
|
|
521
|
+
exit 3
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
log_info "Configuration:"
|
|
525
|
+
log_info " - Source Directories: $SOURCE_DIRS"
|
|
526
|
+
log_info " - Registry Path: $REGISTRY_PATH"
|
|
527
|
+
log_info " - Database Path: $DB_PATH"
|
|
528
|
+
log_info " - Dry Run: $DRY_RUN"
|
|
529
|
+
log_info " - Auto-Detect Type: $AUTO_DETECT_TYPE"
|
|
530
|
+
|
|
531
|
+
# Validate database
|
|
532
|
+
validate_database
|
|
533
|
+
|
|
534
|
+
# Ensure registry path exists
|
|
535
|
+
ensure_directory "$REGISTRY_PATH"
|
|
536
|
+
|
|
537
|
+
# Find and migrate artifacts
|
|
538
|
+
find_artifacts "$SOURCE_DIRS"
|
|
539
|
+
|
|
540
|
+
# Summary
|
|
541
|
+
log_info "=== Migration Summary ==="
|
|
542
|
+
log_info " - Files Found: $FILES_FOUND"
|
|
543
|
+
log_info " - Files Migrated: $FILES_MIGRATED"
|
|
544
|
+
log_info " - Files Skipped: $FILES_SKIPPED"
|
|
545
|
+
log_info " - Errors: $ERRORS"
|
|
546
|
+
|
|
547
|
+
if [[ "$DRY_RUN" == "true" ]]; then
|
|
548
|
+
log_info " - Mode: DRY RUN (no changes made)"
|
|
549
|
+
fi
|
|
550
|
+
|
|
551
|
+
log_info "Migration completed at $(date '+%Y-%m-%d %H:%M:%S')"
|
|
552
|
+
|
|
553
|
+
if [[ $ERRORS -gt 0 ]]; then
|
|
554
|
+
exit 1
|
|
555
|
+
fi
|
|
556
|
+
|
|
557
|
+
exit 0
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
# Run main if executed directly
|
|
561
|
+
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
|
|
562
|
+
main "$@"
|
|
563
|
+
fi
|