claude-flow-novice 2.15.2 → 2.15.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/hooks/cfn-BACKUP_USAGE.md +243 -243
- package/.claude/hooks/cfn-invoke-security-validation.sh +69 -69
- package/.claude/hooks/cfn-post-edit-cfn-retrospective.sh +78 -78
- package/.claude/hooks/cfn-post-edit.config.json +44 -44
- package/.claude/skills/agent-lifecycle/SKILL.md +60 -0
- package/.claude/skills/agent-lifecycle/execute-lifecycle-hook.sh +573 -0
- package/.claude/skills/agent-lifecycle/simple-audit.sh +31 -0
- package/.claude/skills/cfn-hybrid-routing/check-dependencies.sh +51 -51
- package/.claude/skills/cfn-loop-validation/orchestrate-cfn-loop.sh +252 -252
- package/.claude/skills/cfn-redis-coordination/agent-recovery.sh +74 -74
- package/.claude/skills/cfn-redis-coordination/get-context.sh +112 -112
- package/.claude/skills/cfn-transparency-middleware/middleware-config.sh +28 -28
- package/.claude/skills/cfn-transparency-middleware/performance-benchmark.sh +78 -78
- package/.claude/skills/cfn-transparency-middleware/test-integration.sh +161 -161
- package/.claude/skills/cfn-transparency-middleware/test-transparency-skill.sh +367 -367
- package/.claude/skills/cfn-transparency-middleware/tests/input-validation.sh +92 -92
- package/.claude/skills/cfn-transparency-middleware/wrap-agent.sh +131 -131
- package/claude-assets/hooks/cfn-BACKUP_USAGE.md +243 -243
- package/claude-assets/hooks/cfn-invoke-security-validation.sh +69 -69
- package/claude-assets/hooks/cfn-post-edit-cfn-retrospective.sh +78 -78
- package/claude-assets/hooks/cfn-post-edit.config.json +44 -44
- package/claude-assets/hooks/cfn-post-execution/memory-cleanup.sh +19 -19
- package/claude-assets/hooks/cfn-pre-execution/memory-check.sh +19 -19
- package/claude-assets/skills/agent-lifecycle/execute-lifecycle-hook.sh +572 -572
- package/claude-assets/skills/agent-lifecycle/simple-audit.sh +30 -30
- package/claude-assets/skills/cfn-automatic-memory-persistence/persist-agent-output.sh +48 -48
- package/claude-assets/skills/cfn-automatic-memory-persistence/query-agent-history.sh +34 -34
- package/claude-assets/skills/cfn-deliverable-validation/confidence-calculator.sh +261 -261
- package/claude-assets/skills/cfn-expert-update/update-expert.sh +345 -345
- package/claude-assets/skills/cfn-hybrid-routing/check-dependencies.sh +51 -51
- package/claude-assets/skills/cfn-intervention-detector/detect-intervention.sh +110 -110
- package/claude-assets/skills/cfn-intervention-orchestrator/execute-intervention.sh +58 -58
- package/claude-assets/skills/cfn-loop-validation/orchestrate-cfn-loop.sh +252 -252
- package/claude-assets/skills/cfn-loop2-output-processing/process-validator-output.sh +275 -275
- package/claude-assets/skills/cfn-memory-management/check-memory.sh +159 -159
- package/claude-assets/skills/cfn-memory-management/cleanup-memory.sh +196 -196
- package/claude-assets/skills/cfn-node-heap-sizer/task-mode-heap-limiter.sh +325 -325
- package/claude-assets/skills/cfn-playbook-auto-update/auto-update-playbook.sh +85 -85
- package/claude-assets/skills/cfn-redis-coordination/agent-recovery.sh +74 -74
- package/claude-assets/skills/cfn-redis-coordination/get-context.sh +112 -112
- package/claude-assets/skills/cfn-scope-simplifier/simplify-scope.sh +67 -67
- package/claude-assets/skills/cfn-specialist-injection/recommend-specialist.sh +56 -56
- package/claude-assets/skills/cfn-standardized-error-handling/capture-agent-error.sh +86 -86
- package/claude-assets/skills/cfn-standardized-error-handling/test-error-handling.sh +165 -165
- package/claude-assets/skills/cfn-task-config-init/initialize-config.sh +264 -264
- package/claude-assets/skills/cfn-task-decomposition/task-decomposer.sh +278 -278
- package/claude-assets/skills/cfn-transparency-middleware/middleware-config.sh +28 -28
- package/claude-assets/skills/cfn-transparency-middleware/performance-benchmark.sh +78 -78
- package/claude-assets/skills/cfn-transparency-middleware/test-integration.sh +161 -161
- package/claude-assets/skills/cfn-transparency-middleware/test-transparency-skill.sh +367 -367
- package/claude-assets/skills/cfn-transparency-middleware/tests/input-validation.sh +92 -92
- package/claude-assets/skills/cfn-transparency-middleware/wrap-agent.sh +131 -131
- package/claude-assets/skills/docker-build/SKILL.md +96 -203
- package/claude-assets/skills/docker-build/build.sh +73 -73
- package/claude-assets/skills/integration/agent-handoff.sh +494 -0
- package/claude-assets/skills/integration/file-operations.sh +414 -0
- package/claude-assets/skills/workflow-codification/APPROVAL_WORKFLOW.md +806 -0
- package/claude-assets/skills/workflow-codification/COST_TRACKING.md +637 -0
- package/claude-assets/skills/workflow-codification/EDGE_CASE_TRACKING.md +404 -0
- package/claude-assets/skills/workflow-codification/README_PHASE4.md +457 -0
- package/claude-assets/skills/workflow-codification/SKILL.md +110 -0
- package/claude-assets/skills/workflow-codification/analyze-patterns.sh +899 -0
- package/claude-assets/skills/workflow-codification/approval-workflow.sh +514 -0
- package/claude-assets/skills/workflow-codification/generate-skill-update.sh +525 -0
- package/claude-assets/skills/workflow-codification/review-skill.sh +643 -0
- package/claude-assets/skills/workflow-codification/templates/email-notification.txt +114 -0
- package/claude-assets/skills/workflow-codification/templates/slack-notification.md +85 -0
- package/claude-assets/skills/workflow-codification/test-integration.sh +281 -0
- package/claude-assets/skills/workflow-codification/track-cost-savings.sh +445 -0
- package/claude-assets/skills/workflow-codification/track-edge-case.sh +323 -0
- package/dist/cli/config-manager.js +91 -109
- package/dist/cli/config-manager.js.map +1 -1
- package/dist/integration/DatabaseHandoff.js +507 -0
- package/dist/integration/DatabaseHandoff.js.map +1 -0
- package/dist/integration/StandardAdapter.js +291 -0
- package/dist/integration/StandardAdapter.js.map +1 -0
- package/dist/lib/agent-output-parser.js +518 -0
- package/dist/lib/agent-output-parser.js.map +1 -0
- package/dist/lib/agent-output-validator.js +950 -0
- package/dist/lib/agent-output-validator.js.map +1 -0
- package/dist/lib/artifact-registry.js +443 -0
- package/dist/lib/artifact-registry.js.map +1 -0
- package/dist/lib/config-validator.js +687 -0
- package/dist/lib/config-validator.js.map +1 -0
- package/dist/types/agent-output.js +44 -0
- package/dist/types/agent-output.js.map +1 -0
- package/dist/types/config.js +28 -0
- package/dist/types/config.js.map +1 -0
- package/package.json +2 -1
- package/scripts/artifact-cleanup.sh +392 -0
- package/scripts/deploy-production.sh +355 -355
- package/scripts/docker-playwright-fix.sh +311 -311
- package/scripts/docker-rebuild-all-agents.sh +127 -127
- package/scripts/memory-leak-prevention.sh +305 -305
- package/scripts/migrate-artifacts.sh +563 -0
- package/scripts/migrate-yaml-to-json.sh +465 -0
- package/scripts/run-marketing-tests.sh +42 -42
- package/scripts/update_paths.sh +46 -46
|
@@ -0,0 +1,414 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# file-operations.sh - Standard file operation patterns
|
|
3
|
+
#
|
|
4
|
+
# Features:
|
|
5
|
+
# - Atomic writes (temp → final)
|
|
6
|
+
# - Content hashing (SHA256)
|
|
7
|
+
# - Backup/restore (with metadata)
|
|
8
|
+
# - Validation hooks (pre/post)
|
|
9
|
+
#
|
|
10
|
+
# Usage:
|
|
11
|
+
# source file-operations.sh
|
|
12
|
+
# file_write_atomic "/path/to/file" "content" "task-123" "agent-456"
|
|
13
|
+
# file_backup "/path/to/file" "task-123" "agent-456"
|
|
14
|
+
# file_restore "/path/to/file" "backup-id"
|
|
15
|
+
# file_validate "/path/to/file" "expected-hash"
|
|
16
|
+
|
|
17
|
+
set -euo pipefail
|
|
18
|
+
|
|
19
|
+
# Configuration
|
|
20
|
+
BACKUP_DIR="${BACKUP_DIR:-./.backups}"
|
|
21
|
+
TEMP_DIR="${TEMP_DIR:-/tmp/file-ops}"
|
|
22
|
+
LOG_FILE="${LOG_FILE:-/tmp/file-operations.log}"
|
|
23
|
+
|
|
24
|
+
# Ensure directories exist
|
|
25
|
+
mkdir -p "$BACKUP_DIR" "$TEMP_DIR"
|
|
26
|
+
|
|
27
|
+
# --- Logging Functions ---
|
|
28
|
+
|
|
29
|
+
log_structured() {
|
|
30
|
+
local level="$1"
|
|
31
|
+
local message="$2"
|
|
32
|
+
shift 2
|
|
33
|
+
local context="$*"
|
|
34
|
+
|
|
35
|
+
local timestamp
|
|
36
|
+
timestamp=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
37
|
+
|
|
38
|
+
local log_entry
|
|
39
|
+
log_entry=$(cat <<EOF
|
|
40
|
+
{"level":"$level","message":"$message","timestamp":"$timestamp"${context:+,"context":$context}}
|
|
41
|
+
EOF
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
echo "$log_entry" >> "$LOG_FILE"
|
|
45
|
+
|
|
46
|
+
# Also output to stderr for visibility
|
|
47
|
+
if [[ "$level" == "ERROR" ]] || [[ "$level" == "WARN" ]]; then
|
|
48
|
+
echo "$log_entry" >&2
|
|
49
|
+
fi
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
log_info() {
|
|
53
|
+
log_structured "INFO" "$1" "${2:-}"
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
log_warn() {
|
|
57
|
+
log_structured "WARN" "$1" "${2:-}"
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
log_error() {
|
|
61
|
+
log_structured "ERROR" "$1" "${2:-}"
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
log_debug() {
|
|
65
|
+
log_structured "DEBUG" "$1" "${2:-}"
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
# --- Content Hashing ---
|
|
69
|
+
|
|
70
|
+
# Generate SHA256 hash of file content
|
|
71
|
+
# Args: $1=file_path
|
|
72
|
+
# Returns: hex hash string
|
|
73
|
+
file_hash() {
|
|
74
|
+
local file_path="$1"
|
|
75
|
+
|
|
76
|
+
if [[ ! -f "$file_path" ]]; then
|
|
77
|
+
log_error "Cannot hash non-existent file" "{\"file\":\"$file_path\"}"
|
|
78
|
+
return 1
|
|
79
|
+
fi
|
|
80
|
+
|
|
81
|
+
local hash
|
|
82
|
+
hash=$(sha256sum "$file_path" | awk '{print $1}')
|
|
83
|
+
|
|
84
|
+
log_debug "File hash generated" "{\"file\":\"$file_path\",\"hash\":\"$hash\"}"
|
|
85
|
+
echo "$hash"
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
# Verify file hash matches expected value
|
|
89
|
+
# Args: $1=file_path, $2=expected_hash
|
|
90
|
+
# Returns: 0 if match, 1 if mismatch
|
|
91
|
+
file_verify_hash() {
|
|
92
|
+
local file_path="$1"
|
|
93
|
+
local expected_hash="$2"
|
|
94
|
+
|
|
95
|
+
local actual_hash
|
|
96
|
+
actual_hash=$(file_hash "$file_path")
|
|
97
|
+
|
|
98
|
+
if [[ "$actual_hash" != "$expected_hash" ]]; then
|
|
99
|
+
log_error "Hash mismatch" "{\"file\":\"$file_path\",\"expected\":\"$expected_hash\",\"actual\":\"$actual_hash\"}"
|
|
100
|
+
return 1
|
|
101
|
+
fi
|
|
102
|
+
|
|
103
|
+
log_info "Hash verification passed" "{\"file\":\"$file_path\",\"hash\":\"$actual_hash\"}"
|
|
104
|
+
return 0
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
# --- Atomic File Operations ---
|
|
108
|
+
|
|
109
|
+
# Atomic file write: write to temp, then move to final location
|
|
110
|
+
# Args: $1=file_path, $2=content, $3=task_id, $4=agent_id
|
|
111
|
+
# Returns: content hash
|
|
112
|
+
file_write_atomic() {
|
|
113
|
+
local file_path="$1"
|
|
114
|
+
local content="$2"
|
|
115
|
+
local task_id="${3:-unknown}"
|
|
116
|
+
local agent_id="${4:-unknown}"
|
|
117
|
+
|
|
118
|
+
local file_dir
|
|
119
|
+
file_dir=$(dirname "$file_path")
|
|
120
|
+
|
|
121
|
+
local file_name
|
|
122
|
+
file_name=$(basename "$file_path")
|
|
123
|
+
|
|
124
|
+
# Create directory if it doesn't exist
|
|
125
|
+
mkdir -p "$file_dir"
|
|
126
|
+
|
|
127
|
+
# Generate temp file with correlation ID
|
|
128
|
+
local temp_file
|
|
129
|
+
temp_file="${TEMP_DIR}/${task_id}-${agent_id}-${file_name}.tmp"
|
|
130
|
+
|
|
131
|
+
# Pre-write hook (if exists)
|
|
132
|
+
if [[ -n "${FILE_OP_PRE_WRITE_HOOK:-}" ]] && [[ -x "$FILE_OP_PRE_WRITE_HOOK" ]]; then
|
|
133
|
+
log_debug "Executing pre-write hook" "{\"hook\":\"$FILE_OP_PRE_WRITE_HOOK\"}"
|
|
134
|
+
"$FILE_OP_PRE_WRITE_HOOK" "$file_path" "$task_id" "$agent_id" || {
|
|
135
|
+
log_error "Pre-write hook failed" "{\"hook\":\"$FILE_OP_PRE_WRITE_HOOK\"}"
|
|
136
|
+
return 1
|
|
137
|
+
}
|
|
138
|
+
fi
|
|
139
|
+
|
|
140
|
+
# Write to temp file
|
|
141
|
+
echo "$content" > "$temp_file"
|
|
142
|
+
|
|
143
|
+
# Generate hash
|
|
144
|
+
local content_hash
|
|
145
|
+
content_hash=$(file_hash "$temp_file")
|
|
146
|
+
|
|
147
|
+
# Atomic move
|
|
148
|
+
mv "$temp_file" "$file_path"
|
|
149
|
+
|
|
150
|
+
log_info "Atomic write completed" "{\"file\":\"$file_path\",\"hash\":\"$content_hash\",\"task_id\":\"$task_id\",\"agent_id\":\"$agent_id\"}"
|
|
151
|
+
|
|
152
|
+
# Post-write hook (if exists)
|
|
153
|
+
if [[ -n "${FILE_OP_POST_WRITE_HOOK:-}" ]] && [[ -x "$FILE_OP_POST_WRITE_HOOK" ]]; then
|
|
154
|
+
log_debug "Executing post-write hook" "{\"hook\":\"$FILE_OP_POST_WRITE_HOOK\"}"
|
|
155
|
+
"$FILE_OP_POST_WRITE_HOOK" "$file_path" "$task_id" "$agent_id" || {
|
|
156
|
+
log_warn "Post-write hook failed" "{\"hook\":\"$FILE_OP_POST_WRITE_HOOK\"}"
|
|
157
|
+
# Don't fail the operation, just warn
|
|
158
|
+
}
|
|
159
|
+
fi
|
|
160
|
+
|
|
161
|
+
echo "$content_hash"
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
# --- Backup and Restore ---
|
|
165
|
+
|
|
166
|
+
# Backup file with metadata
|
|
167
|
+
# Args: $1=file_path, $2=task_id, $3=agent_id
|
|
168
|
+
# Returns: backup_id
|
|
169
|
+
file_backup() {
|
|
170
|
+
local file_path="$1"
|
|
171
|
+
local task_id="${2:-unknown}"
|
|
172
|
+
local agent_id="${3:-unknown}"
|
|
173
|
+
|
|
174
|
+
if [[ ! -f "$file_path" ]]; then
|
|
175
|
+
log_error "Cannot backup non-existent file" "{\"file\":\"$file_path\"}"
|
|
176
|
+
return 1
|
|
177
|
+
fi
|
|
178
|
+
|
|
179
|
+
# Generate backup ID
|
|
180
|
+
local timestamp
|
|
181
|
+
timestamp=$(date +%s)
|
|
182
|
+
local backup_id="${task_id}-${agent_id}-${timestamp}"
|
|
183
|
+
|
|
184
|
+
local file_hash_val
|
|
185
|
+
file_hash_val=$(file_hash "$file_path")
|
|
186
|
+
|
|
187
|
+
# Create backup directory structure
|
|
188
|
+
local backup_path="${BACKUP_DIR}/${task_id}/${backup_id}"
|
|
189
|
+
mkdir -p "$backup_path"
|
|
190
|
+
|
|
191
|
+
# Copy file
|
|
192
|
+
local file_name
|
|
193
|
+
file_name=$(basename "$file_path")
|
|
194
|
+
cp "$file_path" "${backup_path}/${file_name}"
|
|
195
|
+
|
|
196
|
+
# Create metadata file
|
|
197
|
+
cat > "${backup_path}/metadata.json" <<EOF
|
|
198
|
+
{
|
|
199
|
+
"backup_id": "$backup_id",
|
|
200
|
+
"task_id": "$task_id",
|
|
201
|
+
"agent_id": "$agent_id",
|
|
202
|
+
"original_path": "$file_path",
|
|
203
|
+
"file_name": "$file_name",
|
|
204
|
+
"content_hash": "$file_hash_val",
|
|
205
|
+
"backup_timestamp": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")",
|
|
206
|
+
"file_size": $(stat -c%s "$file_path")
|
|
207
|
+
}
|
|
208
|
+
EOF
|
|
209
|
+
|
|
210
|
+
log_info "File backed up" "{\"file\":\"$file_path\",\"backup_id\":\"$backup_id\",\"hash\":\"$file_hash_val\"}"
|
|
211
|
+
|
|
212
|
+
echo "$backup_id"
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
# Restore file from backup
|
|
216
|
+
# Args: $1=file_path, $2=backup_id
|
|
217
|
+
# Returns: 0 on success
|
|
218
|
+
file_restore() {
|
|
219
|
+
local file_path="$1"
|
|
220
|
+
local backup_id="$2"
|
|
221
|
+
|
|
222
|
+
# Find backup (search all task directories)
|
|
223
|
+
local backup_path
|
|
224
|
+
backup_path=$(find "$BACKUP_DIR" -type d -name "$backup_id" | head -1)
|
|
225
|
+
|
|
226
|
+
if [[ -z "$backup_path" ]]; then
|
|
227
|
+
log_error "Backup not found" "{\"backup_id\":\"$backup_id\"}"
|
|
228
|
+
return 1
|
|
229
|
+
fi
|
|
230
|
+
|
|
231
|
+
local metadata_file="${backup_path}/metadata.json"
|
|
232
|
+
if [[ ! -f "$metadata_file" ]]; then
|
|
233
|
+
log_error "Backup metadata missing" "{\"backup_id\":\"$backup_id\"}"
|
|
234
|
+
return 1
|
|
235
|
+
fi
|
|
236
|
+
|
|
237
|
+
# Extract metadata
|
|
238
|
+
local file_name
|
|
239
|
+
file_name=$(jq -r '.file_name' "$metadata_file")
|
|
240
|
+
|
|
241
|
+
local expected_hash
|
|
242
|
+
expected_hash=$(jq -r '.content_hash' "$metadata_file")
|
|
243
|
+
|
|
244
|
+
local backup_file="${backup_path}/${file_name}"
|
|
245
|
+
|
|
246
|
+
if [[ ! -f "$backup_file" ]]; then
|
|
247
|
+
log_error "Backup file missing" "{\"backup_id\":\"$backup_id\",\"file\":\"$backup_file\"}"
|
|
248
|
+
return 1
|
|
249
|
+
fi
|
|
250
|
+
|
|
251
|
+
# Verify backup integrity
|
|
252
|
+
if ! file_verify_hash "$backup_file" "$expected_hash"; then
|
|
253
|
+
log_error "Backup integrity check failed" "{\"backup_id\":\"$backup_id\"}"
|
|
254
|
+
return 1
|
|
255
|
+
fi
|
|
256
|
+
|
|
257
|
+
# Restore file (atomic)
|
|
258
|
+
local temp_file="${TEMP_DIR}/restore-${backup_id}.tmp"
|
|
259
|
+
cp "$backup_file" "$temp_file"
|
|
260
|
+
mv "$temp_file" "$file_path"
|
|
261
|
+
|
|
262
|
+
log_info "File restored from backup" "{\"file\":\"$file_path\",\"backup_id\":\"$backup_id\"}"
|
|
263
|
+
|
|
264
|
+
return 0
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
# List backups for a file or task
|
|
268
|
+
# Args: $1=task_id
|
|
269
|
+
# Returns: JSON array of backups
|
|
270
|
+
file_list_backups() {
|
|
271
|
+
local task_id="$1"
|
|
272
|
+
|
|
273
|
+
local task_backup_dir="${BACKUP_DIR}/${task_id}"
|
|
274
|
+
|
|
275
|
+
if [[ ! -d "$task_backup_dir" ]]; then
|
|
276
|
+
echo "[]"
|
|
277
|
+
return 0
|
|
278
|
+
fi
|
|
279
|
+
|
|
280
|
+
local backups=()
|
|
281
|
+
|
|
282
|
+
for backup_dir in "$task_backup_dir"/*; do
|
|
283
|
+
if [[ -f "${backup_dir}/metadata.json" ]]; then
|
|
284
|
+
backups+=("$(cat "${backup_dir}/metadata.json")")
|
|
285
|
+
fi
|
|
286
|
+
done
|
|
287
|
+
|
|
288
|
+
# Combine into JSON array
|
|
289
|
+
if [[ ${#backups[@]} -eq 0 ]]; then
|
|
290
|
+
echo "[]"
|
|
291
|
+
else
|
|
292
|
+
printf '%s\n' "${backups[@]}" | jq -s '.'
|
|
293
|
+
fi
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
# --- Validation ---
|
|
297
|
+
|
|
298
|
+
# Validate file exists and optionally check hash
|
|
299
|
+
# Args: $1=file_path, $2=expected_hash (optional)
|
|
300
|
+
# Returns: 0 if valid
|
|
301
|
+
file_validate() {
|
|
302
|
+
local file_path="$1"
|
|
303
|
+
local expected_hash="${2:-}"
|
|
304
|
+
|
|
305
|
+
if [[ ! -f "$file_path" ]]; then
|
|
306
|
+
log_error "File validation failed: file not found" "{\"file\":\"$file_path\"}"
|
|
307
|
+
return 1
|
|
308
|
+
fi
|
|
309
|
+
|
|
310
|
+
# Check readability
|
|
311
|
+
if [[ ! -r "$file_path" ]]; then
|
|
312
|
+
log_error "File validation failed: not readable" "{\"file\":\"$file_path\"}"
|
|
313
|
+
return 1
|
|
314
|
+
fi
|
|
315
|
+
|
|
316
|
+
# Check hash if provided
|
|
317
|
+
if [[ -n "$expected_hash" ]]; then
|
|
318
|
+
if ! file_verify_hash "$file_path" "$expected_hash"; then
|
|
319
|
+
return 1
|
|
320
|
+
fi
|
|
321
|
+
fi
|
|
322
|
+
|
|
323
|
+
log_info "File validation passed" "{\"file\":\"$file_path\"}"
|
|
324
|
+
return 0
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
# --- Cleanup ---
|
|
328
|
+
|
|
329
|
+
# Clean up old backups (older than N days)
|
|
330
|
+
# Args: $1=retention_days (default: 7)
|
|
331
|
+
# Returns: count of deleted backups
|
|
332
|
+
file_cleanup_backups() {
|
|
333
|
+
local retention_days="${1:-7}"
|
|
334
|
+
|
|
335
|
+
log_info "Starting backup cleanup" "{\"retention_days\":$retention_days}"
|
|
336
|
+
|
|
337
|
+
local deleted_count=0
|
|
338
|
+
|
|
339
|
+
# Find backup directories older than retention period
|
|
340
|
+
while IFS= read -r -d '' backup_dir; do
|
|
341
|
+
local metadata_file="${backup_dir}/metadata.json"
|
|
342
|
+
|
|
343
|
+
if [[ -f "$metadata_file" ]]; then
|
|
344
|
+
local backup_timestamp
|
|
345
|
+
backup_timestamp=$(jq -r '.backup_timestamp' "$metadata_file")
|
|
346
|
+
|
|
347
|
+
local backup_age_seconds
|
|
348
|
+
backup_age_seconds=$(( $(date +%s) - $(date -d "$backup_timestamp" +%s) ))
|
|
349
|
+
|
|
350
|
+
local retention_seconds=$((retention_days * 86400))
|
|
351
|
+
|
|
352
|
+
if [[ $backup_age_seconds -gt $retention_seconds ]]; then
|
|
353
|
+
local backup_id
|
|
354
|
+
backup_id=$(jq -r '.backup_id' "$metadata_file")
|
|
355
|
+
|
|
356
|
+
rm -rf "$backup_dir"
|
|
357
|
+
log_info "Deleted old backup" "{\"backup_id\":\"$backup_id\",\"age_days\":$((backup_age_seconds / 86400))}"
|
|
358
|
+
((deleted_count++))
|
|
359
|
+
fi
|
|
360
|
+
fi
|
|
361
|
+
done < <(find "$BACKUP_DIR" -type d -name "*-*-*" -print0)
|
|
362
|
+
|
|
363
|
+
log_info "Backup cleanup completed" "{\"deleted_count\":$deleted_count}"
|
|
364
|
+
|
|
365
|
+
echo "$deleted_count"
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
# --- Main Execution (if run directly) ---
|
|
369
|
+
|
|
370
|
+
# Example usage if script is executed directly
|
|
371
|
+
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
|
|
372
|
+
cat <<'EOF'
|
|
373
|
+
file-operations.sh - Standard file operation patterns
|
|
374
|
+
|
|
375
|
+
USAGE EXAMPLES:
|
|
376
|
+
|
|
377
|
+
# Atomic write
|
|
378
|
+
file_write_atomic "/tmp/test.txt" "Hello, World!" "task-123" "agent-456"
|
|
379
|
+
|
|
380
|
+
# Backup
|
|
381
|
+
backup_id=$(file_backup "/tmp/test.txt" "task-123" "agent-456")
|
|
382
|
+
|
|
383
|
+
# Restore
|
|
384
|
+
file_restore "/tmp/test.txt" "$backup_id"
|
|
385
|
+
|
|
386
|
+
# Validate
|
|
387
|
+
file_validate "/tmp/test.txt"
|
|
388
|
+
|
|
389
|
+
# List backups
|
|
390
|
+
file_list_backups "task-123"
|
|
391
|
+
|
|
392
|
+
# Cleanup old backups (older than 7 days)
|
|
393
|
+
file_cleanup_backups 7
|
|
394
|
+
|
|
395
|
+
HOOKS:
|
|
396
|
+
Set environment variables to enable pre/post-write hooks:
|
|
397
|
+
|
|
398
|
+
export FILE_OP_PRE_WRITE_HOOK="/path/to/pre-write.sh"
|
|
399
|
+
export FILE_OP_POST_WRITE_HOOK="/path/to/post-write.sh"
|
|
400
|
+
|
|
401
|
+
CONFIGURATION:
|
|
402
|
+
BACKUP_DIR - Backup storage directory (default: ./.backups)
|
|
403
|
+
TEMP_DIR - Temporary file directory (default: /tmp/file-ops)
|
|
404
|
+
LOG_FILE - Structured log file (default: /tmp/file-operations.log)
|
|
405
|
+
|
|
406
|
+
BEFORE (Ad-hoc):
|
|
407
|
+
echo "content" > /tmp/file.txt # ❌ No atomicity, no backup, no verification
|
|
408
|
+
|
|
409
|
+
AFTER (Standardized):
|
|
410
|
+
file_write_atomic "/tmp/file.txt" "content" "task-123" "agent-456" # ✅ Atomic, logged, hash verified
|
|
411
|
+
backup_id=$(file_backup "/tmp/file.txt" "task-123" "agent-456") # ✅ Recoverable
|
|
412
|
+
|
|
413
|
+
EOF
|
|
414
|
+
fi
|