s9n-devops-agent 2.0.1 → 2.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cs-devops-agent +16 -1
- package/package.json +2 -1
- package/scripts/changed-areas.sh +93 -0
- package/scripts/coordination/check-file-availability.sh +27 -0
- package/scripts/coordination/declare-file-edits.sh +45 -0
- package/scripts/coordination/release-file-edits.sh +15 -0
- package/scripts/lib/log.sh +47 -0
- package/scripts/post-install.js +176 -0
- package/scripts/repair-house-rules.sh +151 -0
- package/scripts/run-tests +185 -0
- package/scripts/setup-file-coordination.sh +416 -0
package/bin/cs-devops-agent
CHANGED
|
@@ -51,8 +51,23 @@ switch(command) {
|
|
|
51
51
|
case 'start':
|
|
52
52
|
case 'session':
|
|
53
53
|
// Start interactive session manager
|
|
54
|
+
// Try bash script first (for Unix/Mac), fallback to Node implementation for Windows
|
|
54
55
|
const sessionScript = join(rootDir, 'start-devops-session.sh');
|
|
55
|
-
|
|
56
|
+
|
|
57
|
+
if (process.platform === 'win32') {
|
|
58
|
+
// On Windows, check if bash is available, otherwise use Node coordinator
|
|
59
|
+
import('child_process').then(({ execSync }) => {
|
|
60
|
+
try {
|
|
61
|
+
execSync('bash --version', { stdio: 'ignore' });
|
|
62
|
+
// Bash is available, use the shell script
|
|
63
|
+
runShellScript(sessionScript, args.slice(1));
|
|
64
|
+
} catch (e) {
|
|
65
|
+
// Bash not available, use Node.js session coordinator directly
|
|
66
|
+
console.log('Starting DevOps Agent session manager...\n');
|
|
67
|
+
runScript(join(rootDir, 'src', 'session-coordinator.js'), ['start', ...args.slice(1)]);
|
|
68
|
+
}
|
|
69
|
+
});
|
|
70
|
+
} else if (fs.existsSync(sessionScript)) {
|
|
56
71
|
runShellScript(sessionScript, args.slice(1));
|
|
57
72
|
} else {
|
|
58
73
|
console.error('Session script not found. Please ensure the package is properly installed.');
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "s9n-devops-agent",
|
|
3
|
-
"version": "2.0.
|
|
3
|
+
"version": "2.0.3",
|
|
4
4
|
"description": "CS_DevOpsAgent - Intelligent Git Automation System with multi-agent support and session management",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "src/cs-devops-agent-worker.js",
|
|
@@ -10,6 +10,7 @@
|
|
|
10
10
|
"files": [
|
|
11
11
|
"bin/",
|
|
12
12
|
"src/",
|
|
13
|
+
"scripts/",
|
|
13
14
|
"docs/",
|
|
14
15
|
"start-devops-session.sh",
|
|
15
16
|
"cleanup-sessions.sh",
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
ROOT="$(git rev-parse --show-toplevel)"
|
|
4
|
+
|
|
5
|
+
# Redirect log output to stderr to keep stdout clean
|
|
6
|
+
exec 3>&1
|
|
7
|
+
exec 1>&2
|
|
8
|
+
source "$ROOT/scripts/lib/log.sh"
|
|
9
|
+
|
|
10
|
+
BASE="${BASE_REF:-origin/main}"
|
|
11
|
+
HEAD="${HEAD_REF:-HEAD}"
|
|
12
|
+
|
|
13
|
+
# Allow flags: --base <ref> --head <ref>
|
|
14
|
+
while [[ $# -gt 0 ]]; do
|
|
15
|
+
case "$1" in
|
|
16
|
+
--base) BASE="$2"; shift 2 ;;
|
|
17
|
+
--head) HEAD="$2"; shift 2 ;;
|
|
18
|
+
*) log_warn "Unknown flag: $1"; shift ;;
|
|
19
|
+
esac
|
|
20
|
+
done
|
|
21
|
+
|
|
22
|
+
log_group_start "Computing changed areas (BASE=$BASE, HEAD=$HEAD)"
|
|
23
|
+
changed_files=$(git diff --name-only "$BASE" "$HEAD" || true)
|
|
24
|
+
if [ -z "${changed_files}" ]; then
|
|
25
|
+
log_info "No changed files detected."
|
|
26
|
+
log_group_end
|
|
27
|
+
exit 0
|
|
28
|
+
fi
|
|
29
|
+
|
|
30
|
+
# Heuristics:
|
|
31
|
+
# - Ignore tests + scripts + docs when mapping areas
|
|
32
|
+
# - Map first two path segments as <area>/<component> when possible
|
|
33
|
+
# - For monorepos with packages/<pkg> or apps/<app>, area=<pkg|app>, component=inferred child
|
|
34
|
+
areas=()
|
|
35
|
+
|
|
36
|
+
while IFS= read -r f; do
|
|
37
|
+
# Normalize and skip
|
|
38
|
+
[[ -z "$f" ]] && continue
|
|
39
|
+
[[ "$f" =~ ^test_cases/ ]] && { log_debug "Skip test file $f"; continue; }
|
|
40
|
+
[[ "$f" =~ ^scripts/ ]] && { log_debug "Skip script $f"; continue; }
|
|
41
|
+
[[ "$f" =~ ^docs?/ ]] && { log_debug "Skip docs $f"; continue; }
|
|
42
|
+
|
|
43
|
+
# CS_DevOpsAgent-specific patterns
|
|
44
|
+
if [[ "$f" =~ ^worktree-manager\.js$ ]]; then
|
|
45
|
+
area="worktree"
|
|
46
|
+
component="manager"
|
|
47
|
+
elif [[ "$f" =~ ^src/cs-devops-agent-worker\.js$ ]]; then
|
|
48
|
+
area="cs-devops-agent"
|
|
49
|
+
component="worker"
|
|
50
|
+
elif [[ "$f" =~ ^cs-devops-agent-worker\.js$ ]]; then
|
|
51
|
+
area="cs-devops-agent"
|
|
52
|
+
component="worker"
|
|
53
|
+
elif [[ "$f" =~ ^run-with-agent\.js$ ]]; then
|
|
54
|
+
area="agent"
|
|
55
|
+
component="runner"
|
|
56
|
+
elif [[ "$f" =~ ^product_requirement_docs/(.+)\.md$ ]]; then
|
|
57
|
+
area="docs"
|
|
58
|
+
component="${BASH_REMATCH[1]}"
|
|
59
|
+
# Monorepo patterns
|
|
60
|
+
elif [[ "$f" =~ ^packages/([^/]+)/([^/]+)/ ]]; then
|
|
61
|
+
area="${BASH_REMATCH[1]}"
|
|
62
|
+
component="${BASH_REMATCH[2]}"
|
|
63
|
+
elif [[ "$f" =~ ^apps/([^/]+)/([^/]+)/ ]]; then
|
|
64
|
+
area="${BASH_REMATCH[1]}"
|
|
65
|
+
component="${BASH_REMATCH[2]}"
|
|
66
|
+
elif [[ "$f" =~ ^([^/]+)/([^/]+)/ ]]; then
|
|
67
|
+
area="${BASH_REMATCH[1]}"
|
|
68
|
+
component="${BASH_REMATCH[2]}"
|
|
69
|
+
elif [[ "$f" =~ ^([^/]+)/ ]]; then
|
|
70
|
+
area="${BASH_REMATCH[1]}"
|
|
71
|
+
component="default"
|
|
72
|
+
else
|
|
73
|
+
area="root"
|
|
74
|
+
component="default"
|
|
75
|
+
fi
|
|
76
|
+
|
|
77
|
+
path="test_cases/${area}/${component}"
|
|
78
|
+
log_debug "Map $f -> $path"
|
|
79
|
+
areas+=("$path")
|
|
80
|
+
done <<< "$changed_files"
|
|
81
|
+
|
|
82
|
+
# Unique + existing or prospective test dirs
|
|
83
|
+
unique=($(printf "%s\n" "${areas[@]}" | sort -u))
|
|
84
|
+
|
|
85
|
+
# Output test dirs to original stdout
|
|
86
|
+
exec 1>&3
|
|
87
|
+
for a in "${unique[@]}"; do
|
|
88
|
+
echo "$a"
|
|
89
|
+
done
|
|
90
|
+
|
|
91
|
+
# Restore stderr for final log
|
|
92
|
+
exec 1>&2
|
|
93
|
+
log_group_end
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# Check if files are available for editing
|
|
3
|
+
|
|
4
|
+
FILES_TO_CHECK="$@"
|
|
5
|
+
COORD_DIR=".file-coordination/active-edits"
|
|
6
|
+
BLOCKED_FILES=""
|
|
7
|
+
|
|
8
|
+
for file in $FILES_TO_CHECK; do
|
|
9
|
+
# Check if file is being edited
|
|
10
|
+
if [ -n "$(ls -A "$COORD_DIR" 2>/dev/null)" ]; then
|
|
11
|
+
if grep -l "\"$file\"" "$COORD_DIR"/*.json 2>/dev/null | head -1; then
|
|
12
|
+
BLOCKED_BY=$(grep -l "\"$file\"" "$COORD_DIR"/*.json | xargs basename | cut -d. -f1)
|
|
13
|
+
echo "❌ BLOCKED: $file (being edited by $BLOCKED_BY)"
|
|
14
|
+
BLOCKED_FILES="$BLOCKED_FILES $file"
|
|
15
|
+
else
|
|
16
|
+
echo "✅ AVAILABLE: $file"
|
|
17
|
+
fi
|
|
18
|
+
else
|
|
19
|
+
echo "✅ AVAILABLE: $file"
|
|
20
|
+
fi
|
|
21
|
+
done
|
|
22
|
+
|
|
23
|
+
if [ -n "$BLOCKED_FILES" ]; then
|
|
24
|
+
exit 1
|
|
25
|
+
else
|
|
26
|
+
exit 0
|
|
27
|
+
fi
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# Declare files that will be edited
|
|
3
|
+
|
|
4
|
+
AGENT="${1:-unknown}"
|
|
5
|
+
SESSION="${2:-$(date +%s)}"
|
|
6
|
+
shift 2
|
|
7
|
+
FILES="$@"
|
|
8
|
+
|
|
9
|
+
COORD_DIR=".file-coordination"
|
|
10
|
+
DECLARATION_FILE="$COORD_DIR/active-edits/${AGENT}-${SESSION}.json"
|
|
11
|
+
|
|
12
|
+
# Ensure directory exists
|
|
13
|
+
mkdir -p "$COORD_DIR/active-edits"
|
|
14
|
+
|
|
15
|
+
# Check if any files are already being edited
|
|
16
|
+
for file in $FILES; do
|
|
17
|
+
# Check each existing JSON file
|
|
18
|
+
for json_file in "$COORD_DIR/active-edits"/*.json; do
|
|
19
|
+
# Skip if no files exist (glob didn't expand)
|
|
20
|
+
[ -f "$json_file" ] || continue
|
|
21
|
+
# Skip our own declaration file
|
|
22
|
+
[ "$json_file" = "$DECLARATION_FILE" ] && continue
|
|
23
|
+
# Check if this file is already declared
|
|
24
|
+
if grep -q "\"$file\"" "$json_file" 2>/dev/null; then
|
|
25
|
+
agent_name=$(basename "$json_file" | cut -d'-' -f1)
|
|
26
|
+
echo "❌ Cannot declare: $file is already being edited by $agent_name"
|
|
27
|
+
exit 1
|
|
28
|
+
fi
|
|
29
|
+
done
|
|
30
|
+
done
|
|
31
|
+
|
|
32
|
+
# Create declaration
|
|
33
|
+
cat > "$DECLARATION_FILE" << EOF
|
|
34
|
+
{
|
|
35
|
+
"agent": "$AGENT",
|
|
36
|
+
"session": "$SESSION",
|
|
37
|
+
"files": [$(echo $FILES | sed 's/ /", "/g' | sed 's/^/"/;s/$/"/')],
|
|
38
|
+
"operation": "edit",
|
|
39
|
+
"declaredAt": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
|
40
|
+
"estimatedDuration": 300
|
|
41
|
+
}
|
|
42
|
+
EOF
|
|
43
|
+
|
|
44
|
+
echo "✅ Declared edits for: $FILES"
|
|
45
|
+
echo "Declaration saved to: $DECLARATION_FILE"
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# Release files after editing
|
|
3
|
+
|
|
4
|
+
AGENT="${1:-unknown}"
|
|
5
|
+
SESSION="${2:-*}"
|
|
6
|
+
|
|
7
|
+
COORD_DIR=".file-coordination"
|
|
8
|
+
DECLARATION_FILE="$COORD_DIR/active-edits/${AGENT}-${SESSION}.json"
|
|
9
|
+
|
|
10
|
+
if ls $DECLARATION_FILE 1> /dev/null 2>&1; then
|
|
11
|
+
mv $DECLARATION_FILE "$COORD_DIR/completed-edits/" 2>/dev/null || true
|
|
12
|
+
echo "✅ Released files for $AGENT-$SESSION"
|
|
13
|
+
else
|
|
14
|
+
echo "⚠️ No active declaration found for $AGENT-$SESSION"
|
|
15
|
+
fi
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
LOG_LEVEL="${LOG_LEVEL:-info}"
|
|
5
|
+
TRACE="${TRACE:-0}"
|
|
6
|
+
|
|
7
|
+
# Map level to numeric
|
|
8
|
+
_level_num() {
|
|
9
|
+
local level_lower=$(echo "$1" | tr '[:upper:]' '[:lower:]')
|
|
10
|
+
case "$level_lower" in
|
|
11
|
+
debug) echo 10 ;;
|
|
12
|
+
info) echo 20 ;;
|
|
13
|
+
warn) echo 30 ;;
|
|
14
|
+
error) echo 40 ;;
|
|
15
|
+
*) echo 20 ;;
|
|
16
|
+
esac
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
should_log() {
|
|
20
|
+
local want="$(_level_num "${1:-info}")"
|
|
21
|
+
local cur="$(_level_num "$LOG_LEVEL")"
|
|
22
|
+
[ "$want" -ge "$cur" ]
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
ts() { date -u +"%Y-%m-%dT%H:%M:%SZ"; }
|
|
26
|
+
|
|
27
|
+
_log() {
|
|
28
|
+
local lvl="$1"; shift
|
|
29
|
+
if should_log "$lvl"; then
|
|
30
|
+
local lvl_upper=$(echo "$lvl" | tr '[:lower:]' '[:upper:]')
|
|
31
|
+
printf "%s [%s] %s\n" "$(ts)" "$lvl_upper" "$*" 1>&2
|
|
32
|
+
fi
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
log_debug() { _log debug "$@"; }
|
|
36
|
+
log_info() { _log info "$@"; }
|
|
37
|
+
log_warn() { _log warn "$@"; }
|
|
38
|
+
log_error() { _log error "$@"; }
|
|
39
|
+
|
|
40
|
+
log_group_start() {
|
|
41
|
+
# GitHub/Buildkite-style grouping (harmless locally)
|
|
42
|
+
echo "::group::$*" || true
|
|
43
|
+
log_info "$@"
|
|
44
|
+
}
|
|
45
|
+
log_group_end() {
|
|
46
|
+
echo "::endgroup::" || true
|
|
47
|
+
}
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Post-install Script for DevOps Agent
|
|
5
|
+
* =====================================
|
|
6
|
+
*
|
|
7
|
+
* Runs after npm install/update to check and update house rules.
|
|
8
|
+
* Preserves user customizations while updating our managed sections.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import { execSync } from 'child_process';
|
|
12
|
+
import fs from 'fs';
|
|
13
|
+
import path from 'path';
|
|
14
|
+
import { fileURLToPath } from 'url';
|
|
15
|
+
|
|
16
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
17
|
+
const __dirname = path.dirname(__filename);
|
|
18
|
+
|
|
19
|
+
const colors = {
|
|
20
|
+
reset: '\x1b[0m',
|
|
21
|
+
bright: '\x1b[1m',
|
|
22
|
+
dim: '\x1b[2m',
|
|
23
|
+
green: '\x1b[32m',
|
|
24
|
+
yellow: '\x1b[33m',
|
|
25
|
+
blue: '\x1b[36m',
|
|
26
|
+
red: '\x1b[31m'
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
async function main() {
|
|
30
|
+
try {
|
|
31
|
+
// Check if this is a global install - skip if so
|
|
32
|
+
const isGlobal = process.env.npm_config_global === 'true' ||
|
|
33
|
+
process.env.npm_config_global === true ||
|
|
34
|
+
(process.env.npm_config_prefix &&
|
|
35
|
+
(process.cwd().startsWith(process.env.npm_config_prefix) ||
|
|
36
|
+
// Windows specific global paths
|
|
37
|
+
process.cwd().includes('\\npm\\') ||
|
|
38
|
+
process.cwd().includes('\\npm-cache\\') ||
|
|
39
|
+
process.cwd().includes('/lib/node_modules/') ||
|
|
40
|
+
process.cwd().includes('\\node_modules\\')));
|
|
41
|
+
|
|
42
|
+
if (isGlobal) {
|
|
43
|
+
// Skip post-install for global installations
|
|
44
|
+
return;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Find project root (where npm install was run)
|
|
48
|
+
const projectRoot = process.env.INIT_CWD || process.cwd();
|
|
49
|
+
|
|
50
|
+
// Don't run if we're in the DevOps Agent package itself
|
|
51
|
+
const packageJson = path.join(__dirname, '..', 'package.json');
|
|
52
|
+
if (fs.existsSync(packageJson)) {
|
|
53
|
+
const pkg = JSON.parse(fs.readFileSync(packageJson, 'utf8'));
|
|
54
|
+
if (pkg.name === 's9n-devops-agent') {
|
|
55
|
+
// We're in the package directory itself, not a project using it
|
|
56
|
+
const currentPackageJson = path.join(projectRoot, 'package.json');
|
|
57
|
+
if (fs.existsSync(currentPackageJson)) {
|
|
58
|
+
const currentPkg = JSON.parse(fs.readFileSync(currentPackageJson, 'utf8'));
|
|
59
|
+
if (currentPkg.name === 's9n-devops-agent') {
|
|
60
|
+
// Skip post-install when installing the package itself
|
|
61
|
+
return;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
console.log(`\n${colors.bright}DevOps Agent Post-Install${colors.reset}`);
|
|
68
|
+
console.log(`${colors.dim}Checking house rules...${colors.reset}\n`);
|
|
69
|
+
|
|
70
|
+
// Dynamically import HouseRulesManager only when needed
|
|
71
|
+
const { default: HouseRulesManager } = await import('../src/house-rules-manager.js');
|
|
72
|
+
|
|
73
|
+
const manager = new HouseRulesManager(projectRoot);
|
|
74
|
+
const status = manager.getStatus();
|
|
75
|
+
|
|
76
|
+
if (!status.exists) {
|
|
77
|
+
console.log(`${colors.yellow}No house rules found in your project.${colors.reset}`);
|
|
78
|
+
|
|
79
|
+
// Check if running in CI or if user wants auto-setup
|
|
80
|
+
const isCI = process.env.CI || process.env.CONTINUOUS_INTEGRATION ||
|
|
81
|
+
process.env.GITHUB_ACTIONS || process.env.GITLAB_CI ||
|
|
82
|
+
process.env.JENKINS_URL || process.env.TRAVIS;
|
|
83
|
+
|
|
84
|
+
const autoSetup = isCI || process.env.DEVOPS_AUTO_SETUP === 'true';
|
|
85
|
+
|
|
86
|
+
if (autoSetup) {
|
|
87
|
+
console.log(`${colors.blue}Creating house rules automatically...${colors.reset}`);
|
|
88
|
+
const result = await manager.updateHouseRules({ createIfMissing: true });
|
|
89
|
+
if (result.created) {
|
|
90
|
+
console.log(`${colors.green}✓ Created house rules at: ${path.relative(projectRoot, result.path)}${colors.reset}`);
|
|
91
|
+
console.log(`${colors.dim}AI agents will now follow project conventions and coordination protocols.${colors.reset}\n`);
|
|
92
|
+
}
|
|
93
|
+
} else {
|
|
94
|
+
console.log(`${colors.bright}House rules help AI agents understand your project.${colors.reset}`);
|
|
95
|
+
console.log(`\nTo set up house rules, you can:`);
|
|
96
|
+
console.log(` 1. Run ${colors.green}npm start${colors.reset} (recommended - interactive setup)`);
|
|
97
|
+
console.log(` 2. Set ${colors.blue}DEVOPS_AUTO_SETUP=true${colors.reset} before npm install`);
|
|
98
|
+
console.log(` 3. Run ${colors.green}npm run house-rules:update${colors.reset} manually\n`);
|
|
99
|
+
}
|
|
100
|
+
return;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
if (status.needsUpdate) {
|
|
104
|
+
console.log(`${colors.yellow}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${colors.reset}`);
|
|
105
|
+
console.log(`${colors.bright}House Rules Update Available${colors.reset}`);
|
|
106
|
+
console.log(`${colors.yellow}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${colors.reset}\n`);
|
|
107
|
+
|
|
108
|
+
// Show what will be updated
|
|
109
|
+
const sectionsToUpdate = [];
|
|
110
|
+
const sectionsToAdd = [];
|
|
111
|
+
|
|
112
|
+
for (const [name, info] of Object.entries(status.managedSections)) {
|
|
113
|
+
if (info.needsUpdate) {
|
|
114
|
+
if (info.installed) {
|
|
115
|
+
sectionsToUpdate.push(` • ${name} (${info.installedVersion} → ${info.currentVersion})`);
|
|
116
|
+
} else {
|
|
117
|
+
sectionsToAdd.push(` • ${name} (new)`);
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
if (sectionsToUpdate.length > 0) {
|
|
123
|
+
console.log('Sections to update:');
|
|
124
|
+
sectionsToUpdate.forEach(s => console.log(s));
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
if (sectionsToAdd.length > 0) {
|
|
128
|
+
console.log('Sections to add:');
|
|
129
|
+
sectionsToAdd.forEach(s => console.log(s));
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
console.log(`\n${colors.dim}Your custom rules will be preserved.${colors.reset}`);
|
|
133
|
+
|
|
134
|
+
// Check if running in CI environment
|
|
135
|
+
const isCI = process.env.CI || process.env.CONTINUOUS_INTEGRATION ||
|
|
136
|
+
process.env.GITHUB_ACTIONS || process.env.GITLAB_CI ||
|
|
137
|
+
process.env.JENKINS_URL || process.env.TRAVIS;
|
|
138
|
+
|
|
139
|
+
if (isCI) {
|
|
140
|
+
// In CI, auto-update without prompting
|
|
141
|
+
console.log(`${colors.blue}CI environment detected - auto-updating...${colors.reset}`);
|
|
142
|
+
const result = await manager.updateHouseRules();
|
|
143
|
+
if (result.updated) {
|
|
144
|
+
console.log(`${colors.green}✓ House rules updated successfully!${colors.reset}`);
|
|
145
|
+
if (result.updatedSections?.length > 0) {
|
|
146
|
+
console.log(` Updated: ${result.updatedSections.join(', ')}`);
|
|
147
|
+
}
|
|
148
|
+
if (result.addedSections?.length > 0) {
|
|
149
|
+
console.log(` Added: ${result.addedSections.join(', ')}`);
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
} else {
|
|
153
|
+
// In interactive environment, provide instructions
|
|
154
|
+
console.log(`\nTo update, run: ${colors.green}npm start${colors.reset}`);
|
|
155
|
+
console.log(`Or manually: ${colors.green}node node_modules/s9n-devops-agent/src/house-rules-manager.js update${colors.reset}\n`);
|
|
156
|
+
}
|
|
157
|
+
} else {
|
|
158
|
+
console.log(`${colors.green}✓${colors.reset} House rules are up to date.\n`);
|
|
159
|
+
}
|
|
160
|
+
} catch (error) {
|
|
161
|
+
// Silently fail - don't break npm install
|
|
162
|
+
if (process.env.DEBUG) {
|
|
163
|
+
console.error(`${colors.red}Error checking house rules:${colors.reset}`, error.message);
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
// Run if called directly
|
|
169
|
+
if (import.meta.url === `file://${process.argv[1]}`) {
|
|
170
|
+
main().catch(() => {
|
|
171
|
+
// Silent fail for post-install
|
|
172
|
+
process.exit(0);
|
|
173
|
+
});
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
export default main;
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
# House Rules Repair Script
|
|
4
|
+
# ==========================
|
|
5
|
+
# Checks and repairs house rules if they're missing or corrupted
|
|
6
|
+
|
|
7
|
+
set -euo pipefail
|
|
8
|
+
|
|
9
|
+
# Colors for output
|
|
10
|
+
RED='\033[0;31m'
|
|
11
|
+
GREEN='\033[0;32m'
|
|
12
|
+
YELLOW='\033[1;33m'
|
|
13
|
+
BLUE='\033[0;36m'
|
|
14
|
+
BOLD='\033[1m'
|
|
15
|
+
DIM='\033[2m'
|
|
16
|
+
NC='\033[0m' # No Color
|
|
17
|
+
|
|
18
|
+
echo -e "${BOLD}House Rules Health Check${NC}"
|
|
19
|
+
echo -e "${DIM}Checking house rules status...${NC}"
|
|
20
|
+
echo
|
|
21
|
+
|
|
22
|
+
# Get the directory where this script is located
|
|
23
|
+
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
|
24
|
+
ROOT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
|
25
|
+
|
|
26
|
+
# Check if house-rules-manager exists
|
|
27
|
+
if [[ ! -f "$ROOT_DIR/src/house-rules-manager.js" ]]; then
|
|
28
|
+
echo -e "${RED}Error: house-rules-manager.js not found${NC}"
|
|
29
|
+
echo "Please ensure you're running this from the DevOps Agent directory."
|
|
30
|
+
exit 1
|
|
31
|
+
fi
|
|
32
|
+
|
|
33
|
+
# Get status
|
|
34
|
+
STATUS=$(node "$ROOT_DIR/src/house-rules-manager.js" status 2>&1 || echo '{"exists": false}')
|
|
35
|
+
|
|
36
|
+
# Check if house rules exist
|
|
37
|
+
EXISTS=$(echo "$STATUS" | grep -o '"exists"[[:space:]]*:[[:space:]]*true' || echo "")
|
|
38
|
+
NEEDS_UPDATE=$(echo "$STATUS" | grep -o '"needsUpdate"[[:space:]]*:[[:space:]]*true' || echo "")
|
|
39
|
+
|
|
40
|
+
if [[ -z "$EXISTS" ]]; then
|
|
41
|
+
echo -e "${YELLOW}⚠ House rules not found!${NC}"
|
|
42
|
+
echo
|
|
43
|
+
echo "This could happen if:"
|
|
44
|
+
echo " • The file was accidentally deleted"
|
|
45
|
+
echo " • This is a fresh installation"
|
|
46
|
+
echo " • The file was moved to a different location"
|
|
47
|
+
echo
|
|
48
|
+
echo -n "Create house rules now? (Y/n): "
|
|
49
|
+
read CREATE_CHOICE
|
|
50
|
+
|
|
51
|
+
if [[ "${CREATE_CHOICE}" != "n" ]] && [[ "${CREATE_CHOICE}" != "N" ]]; then
|
|
52
|
+
echo -e "${BLUE}Creating house rules...${NC}"
|
|
53
|
+
RESULT=$(node "$ROOT_DIR/src/house-rules-manager.js" update 2>&1)
|
|
54
|
+
|
|
55
|
+
if echo "$RESULT" | grep -q '"created"[[:space:]]*:[[:space:]]*true'; then
|
|
56
|
+
echo -e "${GREEN}✓ House rules created successfully!${NC}"
|
|
57
|
+
|
|
58
|
+
# Extract path if possible
|
|
59
|
+
PATH_LINE=$(echo "$RESULT" | grep -o '"path"[[:space:]]*:[[:space:]]*"[^"]*"' || echo "")
|
|
60
|
+
if [[ -n "$PATH_LINE" ]]; then
|
|
61
|
+
FILE_PATH=$(echo "$PATH_LINE" | sed 's/.*"path"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/')
|
|
62
|
+
echo -e " Location: ${BOLD}$FILE_PATH${NC}"
|
|
63
|
+
fi
|
|
64
|
+
else
|
|
65
|
+
echo -e "${GREEN}✓ House rules restored!${NC}"
|
|
66
|
+
fi
|
|
67
|
+
else
|
|
68
|
+
echo -e "${YELLOW}Skipped house rules creation${NC}"
|
|
69
|
+
exit 0
|
|
70
|
+
fi
|
|
71
|
+
|
|
72
|
+
elif [[ -n "$NEEDS_UPDATE" ]]; then
|
|
73
|
+
echo -e "${YELLOW}House rules need updating${NC}"
|
|
74
|
+
echo
|
|
75
|
+
echo "Your house rules exist but some sections are outdated."
|
|
76
|
+
echo -e "${DIM}Your custom rules will be preserved during the update.${NC}"
|
|
77
|
+
echo
|
|
78
|
+
|
|
79
|
+
# Show what needs updating
|
|
80
|
+
echo "$STATUS" | node -e "
|
|
81
|
+
const input = require('fs').readFileSync(0, 'utf8');
|
|
82
|
+
try {
|
|
83
|
+
const data = JSON.parse(input);
|
|
84
|
+
if (data.managedSections) {
|
|
85
|
+
const updates = [];
|
|
86
|
+
const additions = [];
|
|
87
|
+
for (const [name, info] of Object.entries(data.managedSections)) {
|
|
88
|
+
if (info.needsUpdate) {
|
|
89
|
+
if (info.installed) {
|
|
90
|
+
updates.push(\` • \${name} (\${info.installedVersion} → \${info.currentVersion})\`);
|
|
91
|
+
} else {
|
|
92
|
+
additions.push(\` • \${name} (new)\`);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
if (updates.length > 0) {
|
|
97
|
+
console.log('Sections to update:');
|
|
98
|
+
updates.forEach(u => console.log(u));
|
|
99
|
+
}
|
|
100
|
+
if (additions.length > 0) {
|
|
101
|
+
console.log('\\nSections to add:');
|
|
102
|
+
additions.forEach(a => console.log(a));
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
} catch (e) {
|
|
106
|
+
// Silent fail
|
|
107
|
+
}
|
|
108
|
+
" 2>/dev/null || true
|
|
109
|
+
|
|
110
|
+
echo
|
|
111
|
+
echo -n "Update house rules now? (Y/n): "
|
|
112
|
+
read UPDATE_CHOICE
|
|
113
|
+
|
|
114
|
+
if [[ "${UPDATE_CHOICE}" != "n" ]] && [[ "${UPDATE_CHOICE}" != "N" ]]; then
|
|
115
|
+
echo -e "${BLUE}Updating house rules...${NC}"
|
|
116
|
+
RESULT=$(node "$ROOT_DIR/src/house-rules-manager.js" update 2>&1)
|
|
117
|
+
echo -e "${GREEN}✓ House rules updated!${NC}"
|
|
118
|
+
else
|
|
119
|
+
echo -e "${YELLOW}Skipped house rules update${NC}"
|
|
120
|
+
fi
|
|
121
|
+
|
|
122
|
+
else
|
|
123
|
+
echo -e "${GREEN}✓ House rules are healthy!${NC}"
|
|
124
|
+
echo
|
|
125
|
+
|
|
126
|
+
# Show current status
|
|
127
|
+
echo "$STATUS" | node -e "
|
|
128
|
+
const input = require('fs').readFileSync(0, 'utf8');
|
|
129
|
+
try {
|
|
130
|
+
const data = JSON.parse(input);
|
|
131
|
+
if (data.path) {
|
|
132
|
+
console.log(\` Location: \${data.path}\`);
|
|
133
|
+
}
|
|
134
|
+
if (data.managedSections) {
|
|
135
|
+
console.log(' Managed sections:');
|
|
136
|
+
for (const [name, info] of Object.entries(data.managedSections)) {
|
|
137
|
+
if (info.installed) {
|
|
138
|
+
console.log(\` • \${name} (v\${info.installedVersion})\`);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
} catch (e) {
|
|
143
|
+
// Silent fail
|
|
144
|
+
}
|
|
145
|
+
" 2>/dev/null || true
|
|
146
|
+
|
|
147
|
+
echo
|
|
148
|
+
echo -e "${DIM}All sections are up to date.${NC}"
|
|
149
|
+
fi
|
|
150
|
+
|
|
151
|
+
echo
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
ROOT="$(git rev-parse --show-toplevel)"
|
|
4
|
+
cd "$ROOT"
|
|
5
|
+
|
|
6
|
+
source "$ROOT/scripts/lib/log.sh"
|
|
7
|
+
|
|
8
|
+
SCOPE="changed" # changed | all | path
|
|
9
|
+
TARGET_PATH=""
|
|
10
|
+
BASE_REF="${BASE_REF:-origin/main}"
|
|
11
|
+
HEAD_REF="${HEAD_REF:-HEAD}"
|
|
12
|
+
|
|
13
|
+
# Parse flags
|
|
14
|
+
while [[ $# -gt 0 ]]; do
|
|
15
|
+
case "$1" in
|
|
16
|
+
--all) SCOPE="all"; shift ;;
|
|
17
|
+
--changed) SCOPE="changed"; shift ;;
|
|
18
|
+
--base) BASE_REF="$2"; shift 2 ;;
|
|
19
|
+
--head) HEAD_REF="$2"; shift 2 ;;
|
|
20
|
+
-v|--verbose) LOG_LEVEL="debug"; shift ;;
|
|
21
|
+
-q|--quiet) LOG_LEVEL="warn"; shift ;;
|
|
22
|
+
-*)
|
|
23
|
+
log_warn "Unknown flag: $1"; shift ;;
|
|
24
|
+
*)
|
|
25
|
+
# If a path is provided, treat as direct target
|
|
26
|
+
if [ -d "$1" ]; then
|
|
27
|
+
SCOPE="path"; TARGET_PATH="$1"; shift
|
|
28
|
+
else
|
|
29
|
+
log_warn "Ignoring unknown arg: $1"; shift
|
|
30
|
+
fi
|
|
31
|
+
;;
|
|
32
|
+
esac
|
|
33
|
+
done
|
|
34
|
+
|
|
35
|
+
log_group_start "Test run setup"
|
|
36
|
+
log_info "LOG_LEVEL=$LOG_LEVEL TRACE=$TRACE"
|
|
37
|
+
log_info "SCOPE=$SCOPE BASE_REF=$BASE_REF HEAD_REF=$HEAD_REF TARGET_PATH=${TARGET_PATH:-<none>}"
|
|
38
|
+
log_group_end
|
|
39
|
+
|
|
40
|
+
langs=()
|
|
41
|
+
|
|
42
|
+
has_any() { git ls-files "$1" >/dev/null 2>&1 && [ -n "$(git ls-files "$1")" ]; }
|
|
43
|
+
|
|
44
|
+
# AutoCommit is JavaScript-based
|
|
45
|
+
if has_any "*.js" || has_any "package.json"; then langs+=("node"); fi
|
|
46
|
+
if has_any "*.py"; then langs+=("python"); fi
|
|
47
|
+
if has_any "*.go"; then langs+=("go"); fi
|
|
48
|
+
if has_any "pom.xml" || has_any "build.gradle" || has_any "build.gradle.kts"; then langs+=("java"); fi
|
|
49
|
+
if has_any "Gemfile" || has_any "*.rb"; then langs+=("ruby"); fi
|
|
50
|
+
|
|
51
|
+
if [ ${#langs[@]} -eq 0 ]; then
|
|
52
|
+
log_warn "No detectable languages. Exiting."
|
|
53
|
+
exit 0
|
|
54
|
+
fi
|
|
55
|
+
|
|
56
|
+
# Collect candidate test directories
|
|
57
|
+
declare -a test_dirs=()
|
|
58
|
+
|
|
59
|
+
if [ "$SCOPE" = "path" ]; then
|
|
60
|
+
test_dirs+=("$TARGET_PATH")
|
|
61
|
+
elif [ "$SCOPE" = "changed" ]; then
|
|
62
|
+
if changed=$(scripts/changed-areas.sh --base "$BASE_REF" --head "$HEAD_REF"); then
|
|
63
|
+
while IFS= read -r line; do
|
|
64
|
+
[ -z "$line" ] && continue
|
|
65
|
+
test_dirs+=("$line")
|
|
66
|
+
done <<< "$changed"
|
|
67
|
+
fi
|
|
68
|
+
fi
|
|
69
|
+
|
|
70
|
+
# Fallbacks
|
|
71
|
+
if [ "$SCOPE" = "all" ] || [ ${#test_dirs[@]} -eq 0 ]; then
|
|
72
|
+
if [ "$SCOPE" = "changed" ]; then
|
|
73
|
+
log_warn "No targeted test dirs inferred. Falling back to full suite."
|
|
74
|
+
fi
|
|
75
|
+
# collect all leaf test folders to avoid scanning entire repo
|
|
76
|
+
all_dirs=()
|
|
77
|
+
while IFS= read -r dir; do
|
|
78
|
+
[ -n "$dir" ] && all_dirs+=("$dir")
|
|
79
|
+
done < <(find test_cases -type d 2>/dev/null || true)
|
|
80
|
+
if [ ${#all_dirs[@]} -eq 0 ]; then
|
|
81
|
+
log_warn "No test_cases/ found. Exiting."
|
|
82
|
+
exit 0
|
|
83
|
+
fi
|
|
84
|
+
test_dirs=("${all_dirs[@]}")
|
|
85
|
+
fi
|
|
86
|
+
|
|
87
|
+
# De-dup and ensure dirs exist
|
|
88
|
+
test_dirs_dedup=()
|
|
89
|
+
while IFS= read -r dir; do
|
|
90
|
+
[ -n "$dir" ] && test_dirs_dedup+=("$dir")
|
|
91
|
+
done < <(printf "%s\n" "${test_dirs[@]}" | sort -u)
|
|
92
|
+
test_dirs=("${test_dirs_dedup[@]}")
|
|
93
|
+
log_group_start "Planned test directories"
|
|
94
|
+
printf "%s\n" "${test_dirs[@]}" | sed 's/^/- /'
|
|
95
|
+
log_group_end
|
|
96
|
+
|
|
97
|
+
run_python() {
|
|
98
|
+
if ! command -v pytest >/dev/null 2>&1; then
|
|
99
|
+
log_error "pytest not found. Install with: pip install -U pytest"
|
|
100
|
+
return 1
|
|
101
|
+
fi
|
|
102
|
+
if [ "${#test_dirs[@]}" -gt 0 ]; then
|
|
103
|
+
pytest -q "${test_dirs[@]}"
|
|
104
|
+
else
|
|
105
|
+
pytest -q
|
|
106
|
+
fi
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
run_node() {
|
|
110
|
+
if [ ! -f package.json ]; then
|
|
111
|
+
log_debug "No package.json; skipping node tests."
|
|
112
|
+
return 0
|
|
113
|
+
fi
|
|
114
|
+
|
|
115
|
+
# For AutoCommit, we'll use Jest for testing
|
|
116
|
+
if npx --yes -- jest --version >/dev/null 2>&1; then
|
|
117
|
+
if [ "${#test_dirs[@]}" -gt 0 ]; then
|
|
118
|
+
npx --yes jest --runInBand "${test_dirs[@]}"
|
|
119
|
+
else
|
|
120
|
+
npx --yes jest --runInBand
|
|
121
|
+
fi
|
|
122
|
+
elif npx --yes -- vitest --version >/dev/null 2>&1; then
|
|
123
|
+
if [ "${#test_dirs[@]}" -gt 0 ]; then
|
|
124
|
+
npx --yes vitest run "${test_dirs[@]}"
|
|
125
|
+
else
|
|
126
|
+
npx --yes vitest run
|
|
127
|
+
fi
|
|
128
|
+
else
|
|
129
|
+
log_error "Neither jest nor vitest found. Install: npm i -D jest"
|
|
130
|
+
return 1
|
|
131
|
+
fi
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
run_go() {
|
|
135
|
+
# go test cannot target our folder layout directly; let's run full module
|
|
136
|
+
go test ./...
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
run_java() {
|
|
140
|
+
if [ -f "gradlew" ]; then
|
|
141
|
+
./gradlew test
|
|
142
|
+
elif [ -f "mvnw" ]; then
|
|
143
|
+
./mvnw -q -DskipTests=false test
|
|
144
|
+
elif command -v mvn >/dev/null 2>&1; then
|
|
145
|
+
mvn -q -DskipTests=false test
|
|
146
|
+
elif command -v gradle >/dev/null 2>&1; then
|
|
147
|
+
gradle test
|
|
148
|
+
else
|
|
149
|
+
log_error "No Java build tool found (Gradle/Maven)."
|
|
150
|
+
return 1
|
|
151
|
+
fi
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
run_ruby() {
|
|
155
|
+
if command -v rspec >/dev/null 2>&1; then
|
|
156
|
+
if [ "${#test_dirs[@]}" -gt 0 ]; then
|
|
157
|
+
rspec "${test_dirs[@]}"
|
|
158
|
+
else
|
|
159
|
+
rspec
|
|
160
|
+
fi
|
|
161
|
+
else
|
|
162
|
+
log_error "rspec not found. Install: bundle add rspec && bundle exec rspec --init"
|
|
163
|
+
return 1
|
|
164
|
+
fi
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
rc=0
|
|
168
|
+
for l in "${langs[@]}"; do
|
|
169
|
+
log_group_start "Running $l tests"
|
|
170
|
+
case "$l" in
|
|
171
|
+
python) run_python || rc=$? ;;
|
|
172
|
+
node) run_node || rc=$? ;;
|
|
173
|
+
go) run_go || rc=$? ;;
|
|
174
|
+
java) run_java || rc=$? ;;
|
|
175
|
+
ruby) run_ruby || rc=$? ;;
|
|
176
|
+
esac
|
|
177
|
+
log_group_end
|
|
178
|
+
done
|
|
179
|
+
|
|
180
|
+
if [ "$rc" -ne 0 ]; then
|
|
181
|
+
log_error "One or more test suites failed with code $rc"
|
|
182
|
+
else
|
|
183
|
+
log_info "All planned test suites passed."
|
|
184
|
+
fi
|
|
185
|
+
exit $rc
|
|
@@ -0,0 +1,416 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
# ============================================================================
|
|
4
|
+
# FILE COORDINATION SYSTEM SETUP
|
|
5
|
+
# ============================================================================
|
|
6
|
+
# Creates a simple file-based coordination system where agents declare
|
|
7
|
+
# which files they're about to edit, preventing conflicts
|
|
8
|
+
# ============================================================================
|
|
9
|
+
|
|
10
|
+
set -euo pipefail
|
|
11
|
+
|
|
12
|
+
# Check if we're in the CS_DevOpsAgent directory and go up to the actual project
|
|
13
|
+
CURRENT_DIR="$(pwd)"
|
|
14
|
+
if [[ "$CURRENT_DIR" == *"/CS_DevOpsAgent"* ]] || [[ "$CURRENT_DIR" == *"/DevOpsAgent"* ]]; then
|
|
15
|
+
# Go up to the parent of Scripts_Dev (or wherever DevOpsAgent is)
|
|
16
|
+
ROOT="$(cd "$(dirname "$(dirname "$CURRENT_DIR")")" && pwd)"
|
|
17
|
+
# If that's MVPEmails, go up one more to DistilledConceptExtractor
|
|
18
|
+
if [[ "$(basename "$ROOT")" == "Scripts_Dev" ]]; then
|
|
19
|
+
ROOT="$(dirname "$ROOT")"
|
|
20
|
+
fi
|
|
21
|
+
else
|
|
22
|
+
ROOT="$(git rev-parse --show-toplevel 2>/dev/null || pwd)"
|
|
23
|
+
fi
|
|
24
|
+
|
|
25
|
+
COORD_DIR="$ROOT/.file-coordination"
|
|
26
|
+
ACTIVE_EDITS="$COORD_DIR/active-edits"
|
|
27
|
+
COMPLETED_EDITS="$COORD_DIR/completed-edits"
|
|
28
|
+
BLOCKED_FILES="$COORD_DIR/blocked-files"
|
|
29
|
+
|
|
30
|
+
echo "[INFO] Setting up file coordination system at: $ROOT"
|
|
31
|
+
|
|
32
|
+
# Check for existing house rules
|
|
33
|
+
# First check if HOUSERULES_PATH was passed from the parent script
|
|
34
|
+
if [ -n "$HOUSERULES_PATH" ] && [ -f "$HOUSERULES_PATH" ]; then
|
|
35
|
+
# Use the path that was already found by the parent script
|
|
36
|
+
HOUSERULES_FOUND=true
|
|
37
|
+
echo "[INFO] Using house rules from parent script: $HOUSERULES_PATH"
|
|
38
|
+
elif [ -f "$ROOT/houserules.md" ]; then
|
|
39
|
+
HOUSERULES_PATH="$ROOT/houserules.md"
|
|
40
|
+
HOUSERULES_FOUND=true
|
|
41
|
+
echo "[INFO] Found existing house rules at: houserules.md"
|
|
42
|
+
elif [ -f "$ROOT/HOUSERULES.md" ]; then
|
|
43
|
+
HOUSERULES_PATH="$ROOT/HOUSERULES.md"
|
|
44
|
+
HOUSERULES_FOUND=true
|
|
45
|
+
echo "[INFO] Found existing house rules at: HOUSERULES.md"
|
|
46
|
+
elif [ -f "$ROOT/.github/HOUSERULES.md" ]; then
|
|
47
|
+
HOUSERULES_PATH="$ROOT/.github/HOUSERULES.md"
|
|
48
|
+
HOUSERULES_FOUND=true
|
|
49
|
+
echo "[INFO] Found existing house rules at: .github/HOUSERULES.md"
|
|
50
|
+
elif [ -f "$ROOT/docs/houserules.md" ]; then
|
|
51
|
+
HOUSERULES_PATH="$ROOT/docs/houserules.md"
|
|
52
|
+
HOUSERULES_FOUND=true
|
|
53
|
+
echo "[INFO] Found existing house rules at: docs/houserules.md"
|
|
54
|
+
else
|
|
55
|
+
echo ""
|
|
56
|
+
echo "[PROMPT] No house rules found for AI agents."
|
|
57
|
+
echo " House rules help AI agents understand your project conventions."
|
|
58
|
+
echo ""
|
|
59
|
+
echo " Options:"
|
|
60
|
+
echo " 1) Create new house rules at houserules.md (recommended)"
|
|
61
|
+
echo " 2) Specify path to existing house rules"
|
|
62
|
+
echo " 3) Skip (not recommended)"
|
|
63
|
+
echo ""
|
|
64
|
+
echo " Enter choice (1/2/3): "
|
|
65
|
+
read -r CHOICE
|
|
66
|
+
|
|
67
|
+
case "$CHOICE" in
|
|
68
|
+
1|"")
|
|
69
|
+
HOUSERULES_PATH="$ROOT/houserules.md"
|
|
70
|
+
HOUSERULES_FOUND=false
|
|
71
|
+
echo "[INFO] Will create comprehensive house rules at: houserules.md"
|
|
72
|
+
;;
|
|
73
|
+
2)
|
|
74
|
+
echo "[PROMPT] Please enter the path to your house rules file (relative to $ROOT):"
|
|
75
|
+
read -r CUSTOM_PATH
|
|
76
|
+
if [ -f "$ROOT/$CUSTOM_PATH" ]; then
|
|
77
|
+
HOUSERULES_PATH="$ROOT/$CUSTOM_PATH"
|
|
78
|
+
HOUSERULES_FOUND=true
|
|
79
|
+
echo "[INFO] Using house rules at: $CUSTOM_PATH"
|
|
80
|
+
else
|
|
81
|
+
echo "[WARNING] File not found. Will create house rules at: houserules.md"
|
|
82
|
+
HOUSERULES_PATH="$ROOT/houserules.md"
|
|
83
|
+
HOUSERULES_FOUND=false
|
|
84
|
+
fi
|
|
85
|
+
;;
|
|
86
|
+
3)
|
|
87
|
+
echo "[WARNING] Skipping house rules setup. This is not recommended!"
|
|
88
|
+
echo "[WARNING] AI agents may not follow project conventions without house rules."
|
|
89
|
+
HOUSERULES_PATH="$ROOT/houserules.md"
|
|
90
|
+
HOUSERULES_FOUND=false
|
|
91
|
+
;;
|
|
92
|
+
*)
|
|
93
|
+
HOUSERULES_PATH="$ROOT/houserules.md"
|
|
94
|
+
HOUSERULES_FOUND=false
|
|
95
|
+
echo "[INFO] Invalid choice. Will create house rules at: houserules.md"
|
|
96
|
+
;;
|
|
97
|
+
esac
|
|
98
|
+
fi
|
|
99
|
+
|
|
100
|
+
# Create coordination directories
|
|
101
|
+
mkdir -p "$ACTIVE_EDITS"
|
|
102
|
+
mkdir -p "$COMPLETED_EDITS"
|
|
103
|
+
mkdir -p "$BLOCKED_FILES"
|
|
104
|
+
|
|
105
|
+
# Create the declaration template
|
|
106
|
+
cat > "$COORD_DIR/DECLARE_TEMPLATE.json" << 'EOF'
|
|
107
|
+
{
|
|
108
|
+
"agent": "agent-name",
|
|
109
|
+
"session": "session-id",
|
|
110
|
+
"files": [
|
|
111
|
+
"path/to/file1.js",
|
|
112
|
+
"path/to/file2.js"
|
|
113
|
+
],
|
|
114
|
+
"operation": "edit|create|delete",
|
|
115
|
+
"reason": "Brief description of changes",
|
|
116
|
+
"declaredAt": "ISO-8601",
|
|
117
|
+
"estimatedDuration": 300
|
|
118
|
+
}
|
|
119
|
+
EOF
|
|
120
|
+
|
|
121
|
+
# Create the check script for agents
|
|
122
|
+
cat > "$ROOT/check-file-availability.sh" << 'SCRIPT'
|
|
123
|
+
#!/bin/bash
|
|
124
|
+
# Check if files are available for editing
|
|
125
|
+
|
|
126
|
+
FILES_TO_CHECK="$@"
|
|
127
|
+
COORD_DIR=".file-coordination/active-edits"
|
|
128
|
+
BLOCKED_FILES=""
|
|
129
|
+
|
|
130
|
+
for file in $FILES_TO_CHECK; do
|
|
131
|
+
# Check if file is being edited
|
|
132
|
+
if grep -l "\"$file\"" "$COORD_DIR"/*.json 2>/dev/null | head -1; then
|
|
133
|
+
BLOCKED_BY=$(grep -l "\"$file\"" "$COORD_DIR"/*.json | xargs basename | cut -d. -f1)
|
|
134
|
+
echo "❌ BLOCKED: $file (being edited by $BLOCKED_BY)"
|
|
135
|
+
BLOCKED_FILES="$BLOCKED_FILES $file"
|
|
136
|
+
else
|
|
137
|
+
echo "✅ AVAILABLE: $file"
|
|
138
|
+
fi
|
|
139
|
+
done
|
|
140
|
+
|
|
141
|
+
if [ -n "$BLOCKED_FILES" ]; then
|
|
142
|
+
exit 1
|
|
143
|
+
else
|
|
144
|
+
exit 0
|
|
145
|
+
fi
|
|
146
|
+
SCRIPT
|
|
147
|
+
chmod +x "$ROOT/check-file-availability.sh"
|
|
148
|
+
|
|
149
|
+
# Create the declaration script
|
|
150
|
+
cat > "$ROOT/declare-file-edits.sh" << 'SCRIPT'
|
|
151
|
+
#!/bin/bash
|
|
152
|
+
# Declare files that will be edited
|
|
153
|
+
|
|
154
|
+
AGENT="${1:-unknown}"
|
|
155
|
+
SESSION="${2:-$(date +%s)}"
|
|
156
|
+
shift 2
|
|
157
|
+
FILES="$@"
|
|
158
|
+
|
|
159
|
+
COORD_DIR=".file-coordination"
|
|
160
|
+
DECLARATION_FILE="$COORD_DIR/active-edits/${AGENT}-${SESSION}.json"
|
|
161
|
+
|
|
162
|
+
# Check if any files are already being edited
|
|
163
|
+
for file in $FILES; do
|
|
164
|
+
if grep -l "\"$file\"" "$COORD_DIR/active-edits"/*.json 2>/dev/null | grep -v "$DECLARATION_FILE" | head -1; then
|
|
165
|
+
echo "❌ Cannot declare: $file is already being edited"
|
|
166
|
+
exit 1
|
|
167
|
+
fi
|
|
168
|
+
done
|
|
169
|
+
|
|
170
|
+
# Create declaration
|
|
171
|
+
cat > "$DECLARATION_FILE" << EOF
|
|
172
|
+
{
|
|
173
|
+
"agent": "$AGENT",
|
|
174
|
+
"session": "$SESSION",
|
|
175
|
+
"files": [$(echo $FILES | sed 's/ /", "/g' | sed 's/^/"/;s/$/"/')]
|
|
176
|
+
"operation": "edit",
|
|
177
|
+
"declaredAt": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
|
178
|
+
"estimatedDuration": 300
|
|
179
|
+
}
|
|
180
|
+
EOF
|
|
181
|
+
|
|
182
|
+
echo "✅ Declared edits for: $FILES"
|
|
183
|
+
echo "Declaration saved to: $DECLARATION_FILE"
|
|
184
|
+
SCRIPT
|
|
185
|
+
chmod +x "$ROOT/declare-file-edits.sh"
|
|
186
|
+
|
|
187
|
+
# Create the release script
|
|
188
|
+
cat > "$ROOT/release-file-edits.sh" << 'SCRIPT'
|
|
189
|
+
#!/bin/bash
|
|
190
|
+
# Release files after editing
|
|
191
|
+
|
|
192
|
+
AGENT="${1:-unknown}"
|
|
193
|
+
SESSION="${2:-*}"
|
|
194
|
+
|
|
195
|
+
COORD_DIR=".file-coordination"
|
|
196
|
+
DECLARATION_FILE="$COORD_DIR/active-edits/${AGENT}-${SESSION}.json"
|
|
197
|
+
|
|
198
|
+
if ls $DECLARATION_FILE 1> /dev/null 2>&1; then
|
|
199
|
+
mv $DECLARATION_FILE "$COORD_DIR/completed-edits/" 2>/dev/null || true
|
|
200
|
+
echo "✅ Released files for $AGENT-$SESSION"
|
|
201
|
+
else
|
|
202
|
+
echo "⚠️ No active declaration found for $AGENT-$SESSION"
|
|
203
|
+
fi
|
|
204
|
+
SCRIPT
|
|
205
|
+
chmod +x "$ROOT/release-file-edits.sh"
|
|
206
|
+
|
|
207
|
+
# Add to .gitignore
|
|
208
|
+
if ! grep -q "^.file-coordination" "$ROOT/.gitignore" 2>/dev/null; then
|
|
209
|
+
echo ".file-coordination" >> "$ROOT/.gitignore"
|
|
210
|
+
echo "[INFO] Added .file-coordination to .gitignore"
|
|
211
|
+
fi
|
|
212
|
+
|
|
213
|
+
# Add or update house rules with file coordination protocol
|
|
214
|
+
update_houserules() {
|
|
215
|
+
local houserules_file="$1"
|
|
216
|
+
local is_new_file="$2"
|
|
217
|
+
|
|
218
|
+
# Check if coordination rules already exist
|
|
219
|
+
if grep -q "File Coordination Protocol" "$houserules_file" 2>/dev/null; then
|
|
220
|
+
echo "[INFO] File coordination rules already present in house rules"
|
|
221
|
+
return
|
|
222
|
+
fi
|
|
223
|
+
|
|
224
|
+
echo "[INFO] Setting up house rules with file coordination protocol"
|
|
225
|
+
|
|
226
|
+
# Create backup if file exists
|
|
227
|
+
if [ -f "$houserules_file" ] && [ -s "$houserules_file" ]; then
|
|
228
|
+
cp "$houserules_file" "${houserules_file}.backup.$(date +%Y%m%d_%H%M%S)"
|
|
229
|
+
echo "[INFO] Created backup of existing house rules"
|
|
230
|
+
fi
|
|
231
|
+
|
|
232
|
+
# If creating new file, add comprehensive template
|
|
233
|
+
if [ "$is_new_file" = "true" ] || [ ! -f "$houserules_file" ] || [ ! -s "$houserules_file" ]; then
|
|
234
|
+
cat > "$houserules_file" << 'FULL_TEMPLATE'
|
|
235
|
+
# House Rules for AI Agents
|
|
236
|
+
|
|
237
|
+
**IMPORTANT: All AI agents (Claude, Cline, Copilot, etc.) must read and follow these rules at the start of each session.**
|
|
238
|
+
|
|
239
|
+
## Core Principles
|
|
240
|
+
|
|
241
|
+
1. **Always preserve existing functionality** - Never break working code
|
|
242
|
+
2. **Follow existing patterns** - Match the codebase style and conventions
|
|
243
|
+
3. **Communicate clearly** - Document your changes and reasoning
|
|
244
|
+
4. **Coordinate with others** - Follow the file coordination protocol below
|
|
245
|
+
|
|
246
|
+
## Project Conventions
|
|
247
|
+
|
|
248
|
+
### Code Style
|
|
249
|
+
- Follow existing indentation and formatting patterns
|
|
250
|
+
- Maintain consistent naming conventions used in the project
|
|
251
|
+
- Keep functions small and focused
|
|
252
|
+
- Write clear, descriptive variable and function names
|
|
253
|
+
|
|
254
|
+
### Git Workflow
|
|
255
|
+
- Write clear, descriptive commit messages
|
|
256
|
+
- Follow conventional commit format when applicable (feat:, fix:, docs:, etc.)
|
|
257
|
+
- Keep commits atomic and focused on a single change
|
|
258
|
+
- Never commit sensitive information or credentials
|
|
259
|
+
|
|
260
|
+
### Testing
|
|
261
|
+
- Write tests for new functionality
|
|
262
|
+
- Ensure existing tests pass before committing
|
|
263
|
+
- Update tests when changing functionality
|
|
264
|
+
|
|
265
|
+
### Documentation
|
|
266
|
+
- Update README files when adding new features
|
|
267
|
+
- Document complex logic with clear comments
|
|
268
|
+
- Keep API documentation up to date
|
|
269
|
+
- Update CHANGELOG for significant changes
|
|
270
|
+
|
|
271
|
+
FULL_TEMPLATE
|
|
272
|
+
fi
|
|
273
|
+
|
|
274
|
+
# Create temporary file with coordination rules at the top (after any main header)
|
|
275
|
+
TEMP_FILE=$(mktemp)
|
|
276
|
+
|
|
277
|
+
# Check if file has a main header (starts with #)
|
|
278
|
+
if head -n1 "$houserules_file" | grep -q '^#'; then
|
|
279
|
+
# Keep the main header
|
|
280
|
+
head -n1 "$houserules_file" > "$TEMP_FILE"
|
|
281
|
+
echo "" >> "$TEMP_FILE"
|
|
282
|
+
|
|
283
|
+
# Add coordination rules
|
|
284
|
+
cat >> "$TEMP_FILE" << 'COORDINATION_RULES'
|
|
285
|
+
## 🚨 CRITICAL: File Coordination Protocol (MUST FOLLOW)
|
|
286
|
+
|
|
287
|
+
**IMPORTANT: Always check the house rules at the beginning of each session!**
|
|
288
|
+
|
|
289
|
+
To prevent conflicts with other agents editing the same files, you MUST follow this protocol:
|
|
290
|
+
|
|
291
|
+
### Before Editing ANY Files:
|
|
292
|
+
|
|
293
|
+
1. **DECLARE YOUR INTENT FIRST**
|
|
294
|
+
Create a file at `.file-coordination/active-edits/<your-name>-<session>.json` with:
|
|
295
|
+
```json
|
|
296
|
+
{
|
|
297
|
+
"agent": "<your-name>",
|
|
298
|
+
"session": "<session-id>",
|
|
299
|
+
"files": ["list", "of", "files", "you", "will", "edit"],
|
|
300
|
+
"operation": "edit",
|
|
301
|
+
"reason": "Brief description of what you're doing",
|
|
302
|
+
"declaredAt": "<current-ISO-8601-timestamp>",
|
|
303
|
+
"estimatedDuration": 300
|
|
304
|
+
}
|
|
305
|
+
```
|
|
306
|
+
|
|
307
|
+
2. **CHECK FOR CONFLICTS**
|
|
308
|
+
- Read ALL files in `.file-coordination/active-edits/`
|
|
309
|
+
- If ANY other agent has declared the same files, you must:
|
|
310
|
+
- WAIT for them to finish, OR
|
|
311
|
+
- Choose different files to edit
|
|
312
|
+
|
|
313
|
+
3. **ONLY EDIT DECLARED FILES**
|
|
314
|
+
- Never edit files you haven't declared
|
|
315
|
+
- Stay within your declared scope
|
|
316
|
+
|
|
317
|
+
4. **RELEASE WHEN DONE**
|
|
318
|
+
- Delete your declaration file after completing edits
|
|
319
|
+
- Or move it to `.file-coordination/completed-edits/`
|
|
320
|
+
|
|
321
|
+
### If You Detect a Conflict:
|
|
322
|
+
- DO NOT proceed with edits
|
|
323
|
+
- Report the conflict to the user
|
|
324
|
+
- Wait or choose alternative files
|
|
325
|
+
|
|
326
|
+
### Helper Scripts Available:
|
|
327
|
+
- `./scripts/coordination/check-file-availability.sh <files>` - Check if files are available
|
|
328
|
+
- `./scripts/coordination/declare-file-edits.sh <agent> <session> <files>` - Declare your intent
|
|
329
|
+
- `./scripts/coordination/release-file-edits.sh <agent> <session>` - Release files after editing
|
|
330
|
+
|
|
331
|
+
**This coordination prevents wasted work and merge conflicts!**
|
|
332
|
+
|
|
333
|
+
---
|
|
334
|
+
|
|
335
|
+
COORDINATION_RULES
|
|
336
|
+
|
|
337
|
+
# Add the rest of the original content (skipping the first line)
|
|
338
|
+
tail -n +2 "$houserules_file" >> "$TEMP_FILE"
|
|
339
|
+
else
|
|
340
|
+
# No header, add coordination rules at the top
|
|
341
|
+
cat >> "$TEMP_FILE" << 'COORDINATION_RULES'
|
|
342
|
+
## 🚨 CRITICAL: File Coordination Protocol (MUST FOLLOW)
|
|
343
|
+
|
|
344
|
+
**IMPORTANT: Always check the house rules at the beginning of each session!**
|
|
345
|
+
|
|
346
|
+
To prevent conflicts with other agents editing the same files, you MUST follow this protocol:
|
|
347
|
+
|
|
348
|
+
### Before Editing ANY Files:
|
|
349
|
+
|
|
350
|
+
1. **DECLARE YOUR INTENT FIRST**
|
|
351
|
+
Create a file at `.file-coordination/active-edits/<your-name>-<session>.json` with:
|
|
352
|
+
```json
|
|
353
|
+
{
|
|
354
|
+
"agent": "<your-name>",
|
|
355
|
+
"session": "<session-id>",
|
|
356
|
+
"files": ["list", "of", "files", "you", "will", "edit"],
|
|
357
|
+
"operation": "edit",
|
|
358
|
+
"reason": "Brief description of what you're doing",
|
|
359
|
+
"declaredAt": "<current-ISO-8601-timestamp>",
|
|
360
|
+
"estimatedDuration": 300
|
|
361
|
+
}
|
|
362
|
+
```
|
|
363
|
+
|
|
364
|
+
2. **CHECK FOR CONFLICTS**
|
|
365
|
+
- Read ALL files in `.file-coordination/active-edits/`
|
|
366
|
+
- If ANY other agent has declared the same files, you must:
|
|
367
|
+
- WAIT for them to finish, OR
|
|
368
|
+
- Choose different files to edit
|
|
369
|
+
|
|
370
|
+
3. **ONLY EDIT DECLARED FILES**
|
|
371
|
+
- Never edit files you haven't declared
|
|
372
|
+
- Stay within your declared scope
|
|
373
|
+
|
|
374
|
+
4. **RELEASE WHEN DONE**
|
|
375
|
+
- Delete your declaration file after completing edits
|
|
376
|
+
- Or move it to `.file-coordination/completed-edits/`
|
|
377
|
+
|
|
378
|
+
### If You Detect a Conflict:
|
|
379
|
+
- DO NOT proceed with edits
|
|
380
|
+
- Report the conflict to the user
|
|
381
|
+
- Wait or choose alternative files
|
|
382
|
+
|
|
383
|
+
### Helper Scripts Available:
|
|
384
|
+
- `./scripts/coordination/check-file-availability.sh <files>` - Check if files are available
|
|
385
|
+
- `./scripts/coordination/declare-file-edits.sh <agent> <session> <files>` - Declare your intent
|
|
386
|
+
- `./scripts/coordination/release-file-edits.sh <agent> <session>` - Release files after editing
|
|
387
|
+
|
|
388
|
+
**This coordination prevents wasted work and merge conflicts!**
|
|
389
|
+
|
|
390
|
+
---
|
|
391
|
+
|
|
392
|
+
COORDINATION_RULES
|
|
393
|
+
|
|
394
|
+
# Add original content
|
|
395
|
+
cat "$houserules_file" >> "$TEMP_FILE"
|
|
396
|
+
fi
|
|
397
|
+
|
|
398
|
+
# Replace original file
|
|
399
|
+
mv "$TEMP_FILE" "$houserules_file"
|
|
400
|
+
echo "[SUCCESS] Updated house rules with file coordination protocol"
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
# Update the house rules file
|
|
404
|
+
# Pass whether this is a new file (HOUSERULES_FOUND is false for new files)
|
|
405
|
+
if [ "$HOUSERULES_FOUND" = "false" ]; then
|
|
406
|
+
update_houserules "$HOUSERULES_PATH" "true"
|
|
407
|
+
else
|
|
408
|
+
update_houserules "$HOUSERULES_PATH" "false"
|
|
409
|
+
fi
|
|
410
|
+
|
|
411
|
+
echo "[SUCCESS] File coordination system created!"
|
|
412
|
+
echo ""
|
|
413
|
+
echo "Usage:"
|
|
414
|
+
echo " 1. Check availability: ./scripts/coordination/check-file-availability.sh file1 file2"
|
|
415
|
+
echo " 2. Declare edits: ./scripts/coordination/declare-file-edits.sh agent-name session-id file1 file2"
|
|
416
|
+
echo " 3. Release files: ./scripts/coordination/release-file-edits.sh agent-name session-id"
|