@aipper/aiws-spec 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +12 -0
- package/docs/cli-interface.md +288 -0
- package/docs/spec-contract.md +183 -0
- package/package.json +18 -0
- package/templates/workspace/.agents/skills/aiws-change-archive/SKILL.md +23 -0
- package/templates/workspace/.agents/skills/aiws-change-list/SKILL.md +18 -0
- package/templates/workspace/.agents/skills/aiws-change-new/SKILL.md +26 -0
- package/templates/workspace/.agents/skills/aiws-change-next/SKILL.md +19 -0
- package/templates/workspace/.agents/skills/aiws-change-start/SKILL.md +27 -0
- package/templates/workspace/.agents/skills/aiws-change-status/SKILL.md +19 -0
- package/templates/workspace/.agents/skills/aiws-change-sync/SKILL.md +19 -0
- package/templates/workspace/.agents/skills/aiws-change-templates-init/SKILL.md +18 -0
- package/templates/workspace/.agents/skills/aiws-change-templates-which/SKILL.md +18 -0
- package/templates/workspace/.agents/skills/aiws-change-validate/SKILL.md +23 -0
- package/templates/workspace/.agents/skills/aiws-hooks-install/SKILL.md +30 -0
- package/templates/workspace/.agents/skills/aiws-hooks-status/SKILL.md +18 -0
- package/templates/workspace/.agents/skills/aiws-init/SKILL.md +27 -0
- package/templates/workspace/.agents/skills/aiws-rollback/SKILL.md +18 -0
- package/templates/workspace/.agents/skills/aiws-update/SKILL.md +26 -0
- package/templates/workspace/.agents/skills/aiws-validate/SKILL.md +22 -0
- package/templates/workspace/.agents/skills/ws-analyze/SKILL.md +26 -0
- package/templates/workspace/.agents/skills/ws-commit/SKILL.md +50 -0
- package/templates/workspace/.agents/skills/ws-dev/SKILL.md +34 -0
- package/templates/workspace/.agents/skills/ws-migrate/SKILL.md +54 -0
- package/templates/workspace/.agents/skills/ws-plan/SKILL.md +39 -0
- package/templates/workspace/.agents/skills/ws-preflight/SKILL.md +29 -0
- package/templates/workspace/.agents/skills/ws-req-change/SKILL.md +33 -0
- package/templates/workspace/.agents/skills/ws-req-contract-sync/SKILL.md +17 -0
- package/templates/workspace/.agents/skills/ws-req-contract-validate/SKILL.md +12 -0
- package/templates/workspace/.agents/skills/ws-req-flow-sync/SKILL.md +28 -0
- package/templates/workspace/.agents/skills/ws-req-review/SKILL.md +32 -0
- package/templates/workspace/.agents/skills/ws-review/SKILL.md +24 -0
- package/templates/workspace/.agents/skills/ws-rule/SKILL.md +23 -0
- package/templates/workspace/.aiws/manifest.json +36 -0
- package/templates/workspace/.claude/commands/aiws-init.md +19 -0
- package/templates/workspace/.claude/commands/aiws-rollback.md +12 -0
- package/templates/workspace/.claude/commands/aiws-update.md +18 -0
- package/templates/workspace/.claude/commands/aiws-validate.md +13 -0
- package/templates/workspace/.claude/commands/ws-analyze.md +27 -0
- package/templates/workspace/.claude/commands/ws-dev.md +24 -0
- package/templates/workspace/.claude/commands/ws-migrate.md +22 -0
- package/templates/workspace/.claude/commands/ws-preflight.md +27 -0
- package/templates/workspace/.claude/commands/ws-req-change.md +34 -0
- package/templates/workspace/.claude/commands/ws-req-contract-sync.md +18 -0
- package/templates/workspace/.claude/commands/ws-req-contract-validate.md +13 -0
- package/templates/workspace/.claude/commands/ws-req-flow-sync.md +20 -0
- package/templates/workspace/.claude/commands/ws-req-review.md +33 -0
- package/templates/workspace/.claude/commands/ws-review.md +25 -0
- package/templates/workspace/.claude/commands/ws-rule.md +24 -0
- package/templates/workspace/.codex/prompts/aiws-init.md +23 -0
- package/templates/workspace/.codex/prompts/aiws-rollback.md +16 -0
- package/templates/workspace/.codex/prompts/aiws-update.md +22 -0
- package/templates/workspace/.codex/prompts/aiws-validate.md +17 -0
- package/templates/workspace/.codex/prompts/ws-analyze.md +32 -0
- package/templates/workspace/.codex/prompts/ws-dev.md +29 -0
- package/templates/workspace/.codex/prompts/ws-migrate.md +27 -0
- package/templates/workspace/.codex/prompts/ws-preflight.md +32 -0
- package/templates/workspace/.codex/prompts/ws-req-change.md +39 -0
- package/templates/workspace/.codex/prompts/ws-req-contract-sync.md +23 -0
- package/templates/workspace/.codex/prompts/ws-req-contract-validate.md +18 -0
- package/templates/workspace/.codex/prompts/ws-req-flow-sync.md +25 -0
- package/templates/workspace/.codex/prompts/ws-req-review.md +38 -0
- package/templates/workspace/.codex/prompts/ws-review.md +30 -0
- package/templates/workspace/.codex/prompts/ws-rule.md +29 -0
- package/templates/workspace/.githooks/pre-commit +32 -0
- package/templates/workspace/.githooks/pre-push +32 -0
- package/templates/workspace/.iflow/agents/feature-reviewer.md +27 -0
- package/templates/workspace/.iflow/agents/requirements-analyst.md +24 -0
- package/templates/workspace/.iflow/agents/server-commit-manager.md +28 -0
- package/templates/workspace/.iflow/agents/server-fix-implementer.md +31 -0
- package/templates/workspace/.iflow/agents/server-test-planner.md +28 -0
- package/templates/workspace/.iflow/agents/server-test-triager.md +30 -0
- package/templates/workspace/.iflow/commands/aiws-init.toml +24 -0
- package/templates/workspace/.iflow/commands/aiws-rollback.toml +18 -0
- package/templates/workspace/.iflow/commands/aiws-update.toml +23 -0
- package/templates/workspace/.iflow/commands/aiws-validate.toml +18 -0
- package/templates/workspace/.iflow/commands/server-commit.toml +27 -0
- package/templates/workspace/.iflow/commands/server-drain.toml +99 -0
- package/templates/workspace/.iflow/commands/server-fix-and-commit.toml +27 -0
- package/templates/workspace/.iflow/commands/server-fix.toml +65 -0
- package/templates/workspace/.iflow/commands/server-test-plan.toml +62 -0
- package/templates/workspace/.iflow/commands/server-test.toml +58 -0
- package/templates/workspace/.iflow/commands/server-triage.toml +38 -0
- package/templates/workspace/.iflow/commands/server_test-plan.toml +12 -0
- package/templates/workspace/.iflow/commands/server_test.toml +12 -0
- package/templates/workspace/.iflow/commands/ws-analyze.toml +33 -0
- package/templates/workspace/.iflow/commands/ws-contract-check.toml +69 -0
- package/templates/workspace/.iflow/commands/ws-dev.toml +34 -0
- package/templates/workspace/.iflow/commands/ws-doctor.toml +141 -0
- package/templates/workspace/.iflow/commands/ws-env-doctor.toml +74 -0
- package/templates/workspace/.iflow/commands/ws-feature-deliver.toml +44 -0
- package/templates/workspace/.iflow/commands/ws-feature-plan.toml +47 -0
- package/templates/workspace/.iflow/commands/ws-init.toml +53 -0
- package/templates/workspace/.iflow/commands/ws-memory-bank-init.toml +100 -0
- package/templates/workspace/.iflow/commands/ws-migrate.toml +59 -0
- package/templates/workspace/.iflow/commands/ws-preflight.toml +30 -0
- package/templates/workspace/.iflow/commands/ws-req-change.toml +52 -0
- package/templates/workspace/.iflow/commands/ws-req-contract-sync.toml +25 -0
- package/templates/workspace/.iflow/commands/ws-req-contract-validate.toml +16 -0
- package/templates/workspace/.iflow/commands/ws-req-flow-sync.toml +36 -0
- package/templates/workspace/.iflow/commands/ws-req-review.toml +56 -0
- package/templates/workspace/.iflow/commands/ws-review.toml +32 -0
- package/templates/workspace/.iflow/commands/ws-rule.toml +43 -0
- package/templates/workspace/.opencode/command/aiws-init.md +19 -0
- package/templates/workspace/.opencode/command/aiws-rollback.md +12 -0
- package/templates/workspace/.opencode/command/aiws-update.md +18 -0
- package/templates/workspace/.opencode/command/aiws-validate.md +13 -0
- package/templates/workspace/.opencode/command/ws-analyze.md +27 -0
- package/templates/workspace/.opencode/command/ws-dev.md +24 -0
- package/templates/workspace/.opencode/command/ws-migrate.md +22 -0
- package/templates/workspace/.opencode/command/ws-preflight.md +27 -0
- package/templates/workspace/.opencode/command/ws-req-change.md +34 -0
- package/templates/workspace/.opencode/command/ws-req-contract-sync.md +18 -0
- package/templates/workspace/.opencode/command/ws-req-contract-validate.md +13 -0
- package/templates/workspace/.opencode/command/ws-req-flow-sync.md +20 -0
- package/templates/workspace/.opencode/command/ws-req-review.md +33 -0
- package/templates/workspace/.opencode/command/ws-review.md +25 -0
- package/templates/workspace/.opencode/command/ws-rule.md +24 -0
- package/templates/workspace/AGENTS.md +22 -0
- package/templates/workspace/AI_PROJECT.md +86 -0
- package/templates/workspace/AI_WORKSPACE.md +167 -0
- package/templates/workspace/REQUIREMENTS.md +94 -0
- package/templates/workspace/changes/README.md +55 -0
- package/templates/workspace/changes/templates/design.md +29 -0
- package/templates/workspace/changes/templates/proposal.md +59 -0
- package/templates/workspace/changes/templates/tasks.md +33 -0
- package/templates/workspace/issues/problem-issues.csv +2 -0
- package/templates/workspace/manifest.json +205 -0
- package/templates/workspace/memory-bank/README.md +14 -0
- package/templates/workspace/memory-bank/architecture.md +9 -0
- package/templates/workspace/memory-bank/implementation-plan.md +11 -0
- package/templates/workspace/memory-bank/progress.md +10 -0
- package/templates/workspace/memory-bank/tech-stack.md +11 -0
- package/templates/workspace/requirements/CHANGELOG.md +13 -0
- package/templates/workspace/requirements/requirements-issues.csv +2 -0
- package/templates/workspace/secrets/test-accounts.example.json +32 -0
- package/templates/workspace/tools/iflow_watchdog.sh +138 -0
- package/templates/workspace/tools/install_iflow_watchdog_systemd_user.sh +118 -0
- package/templates/workspace/tools/requirements_contract.py +285 -0
- package/templates/workspace/tools/requirements_contract_sync.py +290 -0
- package/templates/workspace/tools/requirements_flow_gen.py +250 -0
- package/templates/workspace/tools/server_test_runner.py +1902 -0
- package/templates/workspace/tools/systemd/iflow-watchdog@.service +16 -0
- package/templates/workspace/tools/systemd/iflow-watchdog@.timer +11 -0
- package/templates/workspace/tools/ws_change_check.py +323 -0
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
die() { echo "error: $*" >&2; exit 2; }
|
|
5
|
+
have() { command -v "$1" >/dev/null 2>&1; }
|
|
6
|
+
|
|
7
|
+
usage() {
|
|
8
|
+
cat <<'EOF'
|
|
9
|
+
Usage:
|
|
10
|
+
bash tools/install_iflow_watchdog_systemd_user.sh
|
|
11
|
+
bash tools/install_iflow_watchdog_systemd_user.sh --name <instance>
|
|
12
|
+
bash tools/install_iflow_watchdog_systemd_user.sh --workspace <abs_path>
|
|
13
|
+
bash tools/install_iflow_watchdog_systemd_user.sh --name <instance> --workspace <abs_path>
|
|
14
|
+
|
|
15
|
+
What it does:
|
|
16
|
+
- Installs systemd user instance units:
|
|
17
|
+
~/.config/systemd/user/iflow-watchdog@.service
|
|
18
|
+
~/.config/systemd/user/iflow-watchdog@.timer
|
|
19
|
+
- Writes per-instance env file:
|
|
20
|
+
~/.config/iflow-watchdog/<instance>.env
|
|
21
|
+
- Enables and starts the timer:
|
|
22
|
+
systemctl --user enable --now iflow-watchdog@<instance>.timer
|
|
23
|
+
|
|
24
|
+
Notes:
|
|
25
|
+
- You can run multiple instances on one server (different --name and --workspace).
|
|
26
|
+
- Unattended mode uses --yolo; only use in test environment.
|
|
27
|
+
Defaults:
|
|
28
|
+
- --workspace defaults to current directory (pwd)
|
|
29
|
+
- --name defaults to basename(--workspace)
|
|
30
|
+
EOF
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
name=""
|
|
34
|
+
workspace=""
|
|
35
|
+
while [[ $# -gt 0 ]]; do
|
|
36
|
+
case "$1" in
|
|
37
|
+
--name) name="${2:-}"; shift 2 ;;
|
|
38
|
+
--workspace) workspace="${2:-}"; shift 2 ;;
|
|
39
|
+
-h|--help) usage; exit 0 ;;
|
|
40
|
+
*) die "unknown arg: $1" ;;
|
|
41
|
+
esac
|
|
42
|
+
done
|
|
43
|
+
|
|
44
|
+
if [[ -z "$workspace" ]]; then
|
|
45
|
+
workspace="$(pwd)"
|
|
46
|
+
fi
|
|
47
|
+
|
|
48
|
+
if [[ -z "$name" ]]; then
|
|
49
|
+
name="$(basename "$workspace")"
|
|
50
|
+
fi
|
|
51
|
+
|
|
52
|
+
[[ "$workspace" == /* ]] || die "--workspace must be an absolute path"
|
|
53
|
+
[[ -d "$workspace" ]] || die "workspace not found: $workspace"
|
|
54
|
+
|
|
55
|
+
have systemctl || die "missing: systemctl"
|
|
56
|
+
|
|
57
|
+
if [[ ! -f "$workspace/AI_WORKSPACE.md" ]] || [[ ! -f "$workspace/REQUIREMENTS.md" ]]; then
|
|
58
|
+
die "workspace must contain AI_WORKSPACE.md and REQUIREMENTS.md: $workspace"
|
|
59
|
+
fi
|
|
60
|
+
|
|
61
|
+
# Best-effort check that systemd --user is available (common pitfall on servers).
|
|
62
|
+
if [[ -z "${XDG_RUNTIME_DIR:-}" ]] || ! systemctl --user is-system-running >/dev/null 2>&1; then
|
|
63
|
+
die "systemd --user not running (try: loginctl enable-linger $USER, then re-login)"
|
|
64
|
+
fi
|
|
65
|
+
|
|
66
|
+
unit_dir="$HOME/.config/systemd/user"
|
|
67
|
+
env_dir="$HOME/.config/iflow-watchdog"
|
|
68
|
+
mkdir -p "$unit_dir" "$env_dir"
|
|
69
|
+
|
|
70
|
+
src_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
71
|
+
tpl_service="$src_dir/systemd/iflow-watchdog@.service"
|
|
72
|
+
tpl_timer="$src_dir/systemd/iflow-watchdog@.timer"
|
|
73
|
+
[[ -f "$tpl_service" ]] || die "missing template: $tpl_service"
|
|
74
|
+
[[ -f "$tpl_timer" ]] || die "missing template: $tpl_timer"
|
|
75
|
+
|
|
76
|
+
cp -a "$tpl_service" "$unit_dir/iflow-watchdog@.service"
|
|
77
|
+
cp -a "$tpl_timer" "$unit_dir/iflow-watchdog@.timer"
|
|
78
|
+
|
|
79
|
+
env_file="$env_dir/${name}.env"
|
|
80
|
+
|
|
81
|
+
# Only allow unattended --yolo if AI_WORKSPACE.md declares test environment.
|
|
82
|
+
yo="0"
|
|
83
|
+
if grep -Eq '^\s*-\s*environment\s*:\s*"?test"?\s*$' "$workspace/AI_WORKSPACE.md"; then
|
|
84
|
+
yo="1"
|
|
85
|
+
fi
|
|
86
|
+
|
|
87
|
+
cat >"$env_file" <<EOF
|
|
88
|
+
# Generated by tools/install_iflow_watchdog_systemd_user.sh
|
|
89
|
+
WORKSPACE_DIR=${workspace}
|
|
90
|
+
IFLOW_WATCHDOG_ROOT=.
|
|
91
|
+
# Unattended mode (uses iflow --yolo). Only safe when environment=test.
|
|
92
|
+
IFLOW_WATCHDOG_YOLO=${yo}
|
|
93
|
+
# Optional: override logs dir (default: <workspace>/.agentdocs/tmp/server-test)
|
|
94
|
+
# IFLOW_WATCHDOG_LOG_DIR=${workspace}/.agentdocs/tmp/server-test
|
|
95
|
+
IFLOW_DRAIN_MAX_ENDPOINTS=30
|
|
96
|
+
IFLOW_WATCHDOG_SLEEP_S=10
|
|
97
|
+
EOF
|
|
98
|
+
|
|
99
|
+
systemctl --user daemon-reload
|
|
100
|
+
|
|
101
|
+
if [[ "$yo" == "1" ]]; then
|
|
102
|
+
systemctl --user enable --now "iflow-watchdog@${name}.timer"
|
|
103
|
+
enabled_msg="enabled"
|
|
104
|
+
else
|
|
105
|
+
enabled_msg="not enabled (edit IFLOW_WATCHDOG_YOLO=1 after confirming test env)"
|
|
106
|
+
fi
|
|
107
|
+
|
|
108
|
+
echo "OK: installed iflow-watchdog@${name}"
|
|
109
|
+
echo "env: $env_file"
|
|
110
|
+
echo "unit: $unit_dir/iflow-watchdog@.service"
|
|
111
|
+
echo "timer: iflow-watchdog@${name}.timer ($enabled_msg)"
|
|
112
|
+
echo "tune: edit $env_file, then run:"
|
|
113
|
+
echo " systemctl --user daemon-reload"
|
|
114
|
+
echo " systemctl --user restart iflow-watchdog@${name}.service"
|
|
115
|
+
if [[ "$yo" != "1" ]]; then
|
|
116
|
+
echo "enable:"
|
|
117
|
+
echo " systemctl --user enable --now iflow-watchdog@${name}.timer"
|
|
118
|
+
fi
|
|
@@ -0,0 +1,285 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import argparse
|
|
5
|
+
import csv
|
|
6
|
+
import sys
|
|
7
|
+
import time
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Dict, List, Optional
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
COLUMNS = [
|
|
14
|
+
"Req_ID",
|
|
15
|
+
"Title",
|
|
16
|
+
"Change_Type",
|
|
17
|
+
"Module",
|
|
18
|
+
"CRUD",
|
|
19
|
+
"Actor",
|
|
20
|
+
"Scenario",
|
|
21
|
+
"Preconditions",
|
|
22
|
+
"Inputs",
|
|
23
|
+
"Outputs",
|
|
24
|
+
"Data_Model",
|
|
25
|
+
"Business_Logic",
|
|
26
|
+
"API_Impact",
|
|
27
|
+
"NonFunctional",
|
|
28
|
+
"Spec_Status",
|
|
29
|
+
"Impl_Status",
|
|
30
|
+
"Tests",
|
|
31
|
+
"Evidence",
|
|
32
|
+
"Owner",
|
|
33
|
+
"Created_At",
|
|
34
|
+
"Updated_At",
|
|
35
|
+
"Notes",
|
|
36
|
+
]
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
SPEC_STATUS = {"DRAFT", "READY"}
|
|
40
|
+
IMPL_STATUS = {"TODO", "DOING", "DONE", "BLOCKED", "SKIP"}
|
|
41
|
+
CHANGE_TYPE = {"ADD", "UPDATE", "DELETE", "CLARIFY"}
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def now_utc() -> str:
|
|
45
|
+
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def read_rows(path: Path) -> List[Dict[str, str]]:
|
|
49
|
+
if not path.exists():
|
|
50
|
+
return []
|
|
51
|
+
with path.open("r", encoding="utf-8", newline="") as f:
|
|
52
|
+
reader = csv.DictReader(f)
|
|
53
|
+
rows: List[Dict[str, str]] = []
|
|
54
|
+
for r in reader:
|
|
55
|
+
rows.append({k: (r.get(k) or "") for k in COLUMNS})
|
|
56
|
+
return rows
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def write_rows(path: Path, rows: List[Dict[str, str]]) -> None:
|
|
60
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
61
|
+
with path.open("w", encoding="utf-8", newline="") as f:
|
|
62
|
+
writer = csv.DictWriter(f, fieldnames=COLUMNS)
|
|
63
|
+
writer.writeheader()
|
|
64
|
+
for r in rows:
|
|
65
|
+
writer.writerow({k: (r.get(k) or "") for k in COLUMNS})
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def ensure_header(path: Path) -> None:
|
|
69
|
+
if path.exists():
|
|
70
|
+
with path.open("r", encoding="utf-8", newline="") as f:
|
|
71
|
+
first = f.readline()
|
|
72
|
+
if first.strip().split(",")[:3] == ["Req_ID", "Title", "Change_Type"]:
|
|
73
|
+
return
|
|
74
|
+
write_rows(path, [])
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def next_req_id(rows: List[Dict[str, str]]) -> str:
|
|
78
|
+
max_n = 0
|
|
79
|
+
for r in rows:
|
|
80
|
+
rid = (r.get("Req_ID") or "").strip()
|
|
81
|
+
if rid.startswith("REQ-") and rid[4:].isdigit():
|
|
82
|
+
max_n = max(max_n, int(rid[4:]))
|
|
83
|
+
return f"REQ-{max_n+1:04d}"
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def normalize_choice(value: str, allowed: set[str], field: str) -> str:
|
|
87
|
+
v = value.strip().upper()
|
|
88
|
+
if v and v not in allowed:
|
|
89
|
+
raise SystemExit(f"invalid {field}: {value!r}, allowed: {sorted(allowed)}")
|
|
90
|
+
return v
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def add_row(
|
|
94
|
+
*,
|
|
95
|
+
path: Path,
|
|
96
|
+
title: str,
|
|
97
|
+
change_type: str,
|
|
98
|
+
module: str,
|
|
99
|
+
crud: str,
|
|
100
|
+
actor: str,
|
|
101
|
+
owner: str,
|
|
102
|
+
spec_status: str,
|
|
103
|
+
impl_status: str,
|
|
104
|
+
) -> str:
|
|
105
|
+
rows = read_rows(path)
|
|
106
|
+
rid = next_req_id(rows)
|
|
107
|
+
ts = now_utc()
|
|
108
|
+
rows.append(
|
|
109
|
+
{
|
|
110
|
+
"Req_ID": rid,
|
|
111
|
+
"Title": title.strip(),
|
|
112
|
+
"Change_Type": normalize_choice(change_type, CHANGE_TYPE, "Change_Type") or "ADD",
|
|
113
|
+
"Module": module.strip(),
|
|
114
|
+
"CRUD": crud.strip(),
|
|
115
|
+
"Actor": actor.strip(),
|
|
116
|
+
"Scenario": "",
|
|
117
|
+
"Preconditions": "",
|
|
118
|
+
"Inputs": "",
|
|
119
|
+
"Outputs": "",
|
|
120
|
+
"Data_Model": "",
|
|
121
|
+
"Business_Logic": "",
|
|
122
|
+
"API_Impact": "",
|
|
123
|
+
"NonFunctional": "",
|
|
124
|
+
"Spec_Status": normalize_choice(spec_status, SPEC_STATUS, "Spec_Status") or "DRAFT",
|
|
125
|
+
"Impl_Status": normalize_choice(impl_status, IMPL_STATUS, "Impl_Status") or "TODO",
|
|
126
|
+
"Tests": "",
|
|
127
|
+
"Evidence": "",
|
|
128
|
+
"Owner": owner.strip(),
|
|
129
|
+
"Created_At": ts,
|
|
130
|
+
"Updated_At": ts,
|
|
131
|
+
"Notes": "",
|
|
132
|
+
}
|
|
133
|
+
)
|
|
134
|
+
write_rows(path, rows)
|
|
135
|
+
return rid
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def set_fields(*, path: Path, req_id: str, updates: Dict[str, str]) -> None:
|
|
139
|
+
rows = read_rows(path)
|
|
140
|
+
found = False
|
|
141
|
+
ts = now_utc()
|
|
142
|
+
for r in rows:
|
|
143
|
+
if (r.get("Req_ID") or "").strip() == req_id:
|
|
144
|
+
found = True
|
|
145
|
+
for k, v in updates.items():
|
|
146
|
+
if k not in COLUMNS:
|
|
147
|
+
raise SystemExit(f"unknown column: {k}")
|
|
148
|
+
r[k] = v
|
|
149
|
+
r["Updated_At"] = ts
|
|
150
|
+
break
|
|
151
|
+
if not found:
|
|
152
|
+
raise SystemExit(f"Req_ID not found: {req_id}")
|
|
153
|
+
write_rows(path, rows)
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
@dataclass(frozen=True)
|
|
157
|
+
class ValidateProblem:
|
|
158
|
+
req_id: str
|
|
159
|
+
field: str
|
|
160
|
+
message: str
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def validate_rows(rows: List[Dict[str, str]]) -> List[ValidateProblem]:
|
|
164
|
+
problems: List[ValidateProblem] = []
|
|
165
|
+
for r in rows:
|
|
166
|
+
rid = (r.get("Req_ID") or "").strip()
|
|
167
|
+
if not rid or rid == "REQ-000":
|
|
168
|
+
continue
|
|
169
|
+
spec = (r.get("Spec_Status") or "").strip().upper()
|
|
170
|
+
impl = (r.get("Impl_Status") or "").strip().upper()
|
|
171
|
+
if spec and spec not in SPEC_STATUS:
|
|
172
|
+
problems.append(ValidateProblem(rid, "Spec_Status", f"invalid value: {spec}"))
|
|
173
|
+
if impl and impl not in IMPL_STATUS:
|
|
174
|
+
problems.append(ValidateProblem(rid, "Impl_Status", f"invalid value: {impl}"))
|
|
175
|
+
ctype = (r.get("Change_Type") or "").strip().upper()
|
|
176
|
+
if ctype and ctype not in CHANGE_TYPE:
|
|
177
|
+
problems.append(ValidateProblem(rid, "Change_Type", f"invalid value: {ctype}"))
|
|
178
|
+
|
|
179
|
+
if spec == "READY":
|
|
180
|
+
required = [
|
|
181
|
+
"Title",
|
|
182
|
+
"Module",
|
|
183
|
+
"CRUD",
|
|
184
|
+
"Scenario",
|
|
185
|
+
"Inputs",
|
|
186
|
+
"Outputs",
|
|
187
|
+
"Business_Logic",
|
|
188
|
+
"Tests",
|
|
189
|
+
]
|
|
190
|
+
for f in required:
|
|
191
|
+
if not (r.get(f) or "").strip():
|
|
192
|
+
problems.append(ValidateProblem(rid, f, "required when Spec_Status=READY"))
|
|
193
|
+
if impl == "DONE":
|
|
194
|
+
for f in ["Evidence"]:
|
|
195
|
+
if not (r.get(f) or "").strip():
|
|
196
|
+
problems.append(ValidateProblem(rid, f, "required when Impl_Status=DONE"))
|
|
197
|
+
return problems
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def main(argv: List[str]) -> int:
|
|
201
|
+
p = argparse.ArgumentParser(description="Manage requirements execution contract CSV (requirements/requirements-issues.csv).")
|
|
202
|
+
p.add_argument("--workspace", default=".", help="workspace root")
|
|
203
|
+
p.add_argument("--csv", default="requirements/requirements-issues.csv", help="contract CSV path (relative to workspace)")
|
|
204
|
+
sub = p.add_subparsers(dest="cmd", required=True)
|
|
205
|
+
|
|
206
|
+
sub.add_parser("init", help="create CSV with header if missing")
|
|
207
|
+
|
|
208
|
+
add = sub.add_parser("add", help="append a new requirement row (DRAFT by default)")
|
|
209
|
+
add.add_argument("--title", required=True)
|
|
210
|
+
add.add_argument("--change-type", default="ADD")
|
|
211
|
+
add.add_argument("--module", default="")
|
|
212
|
+
add.add_argument("--crud", default="")
|
|
213
|
+
add.add_argument("--actor", default="")
|
|
214
|
+
add.add_argument("--owner", default="")
|
|
215
|
+
add.add_argument("--spec-status", default="DRAFT")
|
|
216
|
+
add.add_argument("--impl-status", default="TODO")
|
|
217
|
+
|
|
218
|
+
setp = sub.add_parser("set", help="update fields for a given Req_ID")
|
|
219
|
+
setp.add_argument("req_id")
|
|
220
|
+
setp.add_argument("--field", action="append", default=[], help="key=value (repeatable)")
|
|
221
|
+
|
|
222
|
+
sub.add_parser("validate", help="validate READY/DONE requirements for completeness")
|
|
223
|
+
|
|
224
|
+
args = p.parse_args(argv)
|
|
225
|
+
root = Path(args.workspace).resolve()
|
|
226
|
+
csv_path = (root / args.csv).resolve()
|
|
227
|
+
|
|
228
|
+
if args.cmd == "init":
|
|
229
|
+
ensure_header(csv_path)
|
|
230
|
+
print(f"OK: ensured {csv_path.relative_to(root)}")
|
|
231
|
+
return 0
|
|
232
|
+
|
|
233
|
+
ensure_header(csv_path)
|
|
234
|
+
|
|
235
|
+
if args.cmd == "add":
|
|
236
|
+
rid = add_row(
|
|
237
|
+
path=csv_path,
|
|
238
|
+
title=args.title,
|
|
239
|
+
change_type=args.change_type,
|
|
240
|
+
module=args.module,
|
|
241
|
+
crud=args.crud,
|
|
242
|
+
actor=args.actor,
|
|
243
|
+
owner=args.owner,
|
|
244
|
+
spec_status=args.spec_status,
|
|
245
|
+
impl_status=args.impl_status,
|
|
246
|
+
)
|
|
247
|
+
print(f"OK: added {rid} -> {csv_path.relative_to(root)}")
|
|
248
|
+
return 0
|
|
249
|
+
|
|
250
|
+
if args.cmd == "set":
|
|
251
|
+
updates: Dict[str, str] = {}
|
|
252
|
+
for kv in args.field:
|
|
253
|
+
if "=" not in kv:
|
|
254
|
+
raise SystemExit(f"invalid --field: {kv!r}, expected key=value")
|
|
255
|
+
k, v = kv.split("=", 1)
|
|
256
|
+
updates[k] = v
|
|
257
|
+
if "Spec_Status" in updates:
|
|
258
|
+
updates["Spec_Status"] = normalize_choice(updates["Spec_Status"], SPEC_STATUS, "Spec_Status")
|
|
259
|
+
if "Impl_Status" in updates:
|
|
260
|
+
updates["Impl_Status"] = normalize_choice(updates["Impl_Status"], IMPL_STATUS, "Impl_Status")
|
|
261
|
+
if "Change_Type" in updates:
|
|
262
|
+
updates["Change_Type"] = normalize_choice(updates["Change_Type"], CHANGE_TYPE, "Change_Type")
|
|
263
|
+
set_fields(path=csv_path, req_id=args.req_id, updates=updates)
|
|
264
|
+
print(f"OK: updated {args.req_id} -> {csv_path.relative_to(root)}")
|
|
265
|
+
return 0
|
|
266
|
+
|
|
267
|
+
if args.cmd == "validate":
|
|
268
|
+
rows = read_rows(csv_path)
|
|
269
|
+
problems = validate_rows(rows)
|
|
270
|
+
if not problems:
|
|
271
|
+
print(f"OK: contract validated ({csv_path.relative_to(root)})")
|
|
272
|
+
return 0
|
|
273
|
+
print(f"ERROR: contract validation failed ({csv_path.relative_to(root)})", file=sys.stderr)
|
|
274
|
+
for pr in problems[:80]:
|
|
275
|
+
print(f"- {pr.req_id}: {pr.field}: {pr.message}", file=sys.stderr)
|
|
276
|
+
if len(problems) > 80:
|
|
277
|
+
print(f"... and {len(problems)-80} more", file=sys.stderr)
|
|
278
|
+
return 2
|
|
279
|
+
|
|
280
|
+
raise SystemExit("unreachable")
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
if __name__ == "__main__":
|
|
284
|
+
raise SystemExit(main(sys.argv[1:]))
|
|
285
|
+
|
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import argparse
|
|
5
|
+
import csv
|
|
6
|
+
import json
|
|
7
|
+
import time
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any, Dict, List, Optional
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
FLOW_SPEC_BEGIN = "<!-- FLOW_SPEC_BEGIN -->"
|
|
14
|
+
FLOW_SPEC_END = "<!-- FLOW_SPEC_END -->"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
COLUMNS = [
|
|
18
|
+
"Req_ID",
|
|
19
|
+
"Title",
|
|
20
|
+
"Change_Type",
|
|
21
|
+
"Module",
|
|
22
|
+
"CRUD",
|
|
23
|
+
"Actor",
|
|
24
|
+
"Scenario",
|
|
25
|
+
"Preconditions",
|
|
26
|
+
"Inputs",
|
|
27
|
+
"Outputs",
|
|
28
|
+
"Data_Model",
|
|
29
|
+
"Business_Logic",
|
|
30
|
+
"API_Impact",
|
|
31
|
+
"NonFunctional",
|
|
32
|
+
"Spec_Status",
|
|
33
|
+
"Impl_Status",
|
|
34
|
+
"Tests",
|
|
35
|
+
"Evidence",
|
|
36
|
+
"Owner",
|
|
37
|
+
"Created_At",
|
|
38
|
+
"Updated_At",
|
|
39
|
+
"Notes",
|
|
40
|
+
]
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _now_utc() -> str:
|
|
44
|
+
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _read_text(path: Path) -> str:
|
|
48
|
+
return path.read_text(encoding="utf-8", errors="replace")
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def extract_flow_spec_json(requirements_text: str) -> Dict[str, Any]:
|
|
52
|
+
if FLOW_SPEC_BEGIN not in requirements_text or FLOW_SPEC_END not in requirements_text:
|
|
53
|
+
return {}
|
|
54
|
+
body = requirements_text.split(FLOW_SPEC_BEGIN, 1)[1].split(FLOW_SPEC_END, 1)[0]
|
|
55
|
+
start = body.find("```")
|
|
56
|
+
if start == -1:
|
|
57
|
+
return {}
|
|
58
|
+
fence = body[start:]
|
|
59
|
+
lines = fence.splitlines()
|
|
60
|
+
if not lines:
|
|
61
|
+
return {}
|
|
62
|
+
header = lines[0].strip().lower()
|
|
63
|
+
if not header.startswith("```") or "json" not in header:
|
|
64
|
+
return {}
|
|
65
|
+
payload_lines: List[str] = []
|
|
66
|
+
for ln in lines[1:]:
|
|
67
|
+
if ln.strip().startswith("```"):
|
|
68
|
+
break
|
|
69
|
+
payload_lines.append(ln)
|
|
70
|
+
raw = "\n".join(payload_lines).strip()
|
|
71
|
+
if not raw:
|
|
72
|
+
return {}
|
|
73
|
+
data = json.loads(raw)
|
|
74
|
+
if not isinstance(data, dict):
|
|
75
|
+
return {}
|
|
76
|
+
return data
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
@dataclass(frozen=True)
|
|
80
|
+
class FlowStep:
|
|
81
|
+
method: str
|
|
82
|
+
path: str
|
|
83
|
+
name: str = ""
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
@dataclass(frozen=True)
|
|
87
|
+
class Flow:
|
|
88
|
+
flow_id: str
|
|
89
|
+
title: str
|
|
90
|
+
steps: List[FlowStep]
|
|
91
|
+
notes: str = ""
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def parse_flows(data: Dict[str, Any]) -> List[Flow]:
|
|
95
|
+
flows_raw = data.get("flows")
|
|
96
|
+
if not isinstance(flows_raw, list):
|
|
97
|
+
return []
|
|
98
|
+
out: List[Flow] = []
|
|
99
|
+
for item in flows_raw:
|
|
100
|
+
if not isinstance(item, dict):
|
|
101
|
+
continue
|
|
102
|
+
flow_id = str(item.get("id") or "").strip()
|
|
103
|
+
if not flow_id:
|
|
104
|
+
continue
|
|
105
|
+
title = str(item.get("title") or flow_id).strip()
|
|
106
|
+
notes = str(item.get("notes") or "").strip()
|
|
107
|
+
steps_raw = item.get("steps") or []
|
|
108
|
+
if not isinstance(steps_raw, list):
|
|
109
|
+
continue
|
|
110
|
+
steps: List[FlowStep] = []
|
|
111
|
+
for s in steps_raw:
|
|
112
|
+
if not isinstance(s, dict):
|
|
113
|
+
continue
|
|
114
|
+
method = str(s.get("method") or "GET").strip().upper()
|
|
115
|
+
path = str(s.get("path") or "").strip()
|
|
116
|
+
name = str(s.get("name") or "").strip()
|
|
117
|
+
if not path:
|
|
118
|
+
continue
|
|
119
|
+
steps.append(FlowStep(method=method, path=path, name=name))
|
|
120
|
+
if steps:
|
|
121
|
+
out.append(Flow(flow_id=flow_id, title=title, steps=steps, notes=notes))
|
|
122
|
+
return out
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def _read_csv_rows(path: Path) -> List[Dict[str, str]]:
|
|
126
|
+
if not path.exists():
|
|
127
|
+
return []
|
|
128
|
+
with path.open("r", encoding="utf-8", newline="") as f:
|
|
129
|
+
reader = csv.DictReader(f)
|
|
130
|
+
rows: List[Dict[str, str]] = []
|
|
131
|
+
for r in reader:
|
|
132
|
+
rows.append({k: (r.get(k) or "") for k in COLUMNS})
|
|
133
|
+
return rows
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def _write_csv_rows(path: Path, rows: List[Dict[str, str]]) -> None:
|
|
137
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
138
|
+
with path.open("w", encoding="utf-8", newline="") as f:
|
|
139
|
+
writer = csv.DictWriter(f, fieldnames=COLUMNS)
|
|
140
|
+
writer.writeheader()
|
|
141
|
+
for r in rows:
|
|
142
|
+
writer.writerow({k: (r.get(k) or "") for k in COLUMNS})
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def _ensure_header(path: Path) -> None:
|
|
146
|
+
if path.exists():
|
|
147
|
+
with path.open("r", encoding="utf-8", newline="") as f:
|
|
148
|
+
first = f.readline().strip()
|
|
149
|
+
if first.split(",")[:3] == ["Req_ID", "Title", "Change_Type"]:
|
|
150
|
+
return
|
|
151
|
+
_write_csv_rows(path, [])
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def _next_req_id(rows: List[Dict[str, str]]) -> str:
|
|
155
|
+
max_n = 0
|
|
156
|
+
for r in rows:
|
|
157
|
+
rid = (r.get("Req_ID") or "").strip()
|
|
158
|
+
if rid.startswith("REQ-") and rid[4:].isdigit():
|
|
159
|
+
max_n = max(max_n, int(rid[4:]))
|
|
160
|
+
return f"REQ-{max_n+1:04d}"
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def _find_row_by_source(rows: List[Dict[str, str]], source: str) -> Optional[Dict[str, str]]:
|
|
164
|
+
needle = f"source={source}"
|
|
165
|
+
for r in rows:
|
|
166
|
+
notes = (r.get("Notes") or "").strip()
|
|
167
|
+
if needle in notes:
|
|
168
|
+
return r
|
|
169
|
+
return None
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def _append_source(notes: str, source: str) -> str:
|
|
173
|
+
token = f"source={source}"
|
|
174
|
+
s = (notes or "").strip()
|
|
175
|
+
if token in s:
|
|
176
|
+
return s
|
|
177
|
+
return (s + ("\n" if s else "") + token).strip()
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def upsert_from_flows(
|
|
181
|
+
*,
|
|
182
|
+
csv_path: Path,
|
|
183
|
+
flows: List[Flow],
|
|
184
|
+
default_owner: str,
|
|
185
|
+
default_tests: str,
|
|
186
|
+
) -> Dict[str, int]:
|
|
187
|
+
_ensure_header(csv_path)
|
|
188
|
+
rows = _read_csv_rows(csv_path)
|
|
189
|
+
now = _now_utc()
|
|
190
|
+
|
|
191
|
+
created = 0
|
|
192
|
+
updated = 0
|
|
193
|
+
|
|
194
|
+
for flow in flows:
|
|
195
|
+
source = f"flow:{flow.flow_id}"
|
|
196
|
+
row = _find_row_by_source(rows, source)
|
|
197
|
+
steps_str = " -> ".join([f"{s.method} {s.path}".strip() for s in flow.steps])
|
|
198
|
+
if row is None:
|
|
199
|
+
rid = _next_req_id(rows)
|
|
200
|
+
rows.append(
|
|
201
|
+
{
|
|
202
|
+
"Req_ID": rid,
|
|
203
|
+
"Title": flow.title,
|
|
204
|
+
"Change_Type": "CLARIFY",
|
|
205
|
+
"Module": "",
|
|
206
|
+
"CRUD": "",
|
|
207
|
+
"Actor": "",
|
|
208
|
+
"Scenario": flow.title,
|
|
209
|
+
"Preconditions": "",
|
|
210
|
+
"Inputs": "",
|
|
211
|
+
"Outputs": "",
|
|
212
|
+
"Data_Model": "",
|
|
213
|
+
"Business_Logic": flow.notes,
|
|
214
|
+
"API_Impact": steps_str,
|
|
215
|
+
"NonFunctional": "",
|
|
216
|
+
"Spec_Status": "DRAFT",
|
|
217
|
+
"Impl_Status": "TODO",
|
|
218
|
+
"Tests": default_tests,
|
|
219
|
+
"Evidence": "",
|
|
220
|
+
"Owner": default_owner,
|
|
221
|
+
"Created_At": now,
|
|
222
|
+
"Updated_At": now,
|
|
223
|
+
"Notes": _append_source("", source),
|
|
224
|
+
}
|
|
225
|
+
)
|
|
226
|
+
created += 1
|
|
227
|
+
else:
|
|
228
|
+
row["Title"] = (row.get("Title") or "").strip() or flow.title
|
|
229
|
+
row["Scenario"] = (row.get("Scenario") or "").strip() or flow.title
|
|
230
|
+
row["Business_Logic"] = (row.get("Business_Logic") or "").strip() or flow.notes
|
|
231
|
+
row["API_Impact"] = (row.get("API_Impact") or "").strip() or steps_str
|
|
232
|
+
row["Tests"] = (row.get("Tests") or "").strip() or default_tests
|
|
233
|
+
row["Owner"] = (row.get("Owner") or "").strip() or default_owner
|
|
234
|
+
row["Notes"] = _append_source(row.get("Notes") or "", source)
|
|
235
|
+
row["Updated_At"] = now
|
|
236
|
+
updated += 1
|
|
237
|
+
|
|
238
|
+
_write_csv_rows(csv_path, rows)
|
|
239
|
+
return {"created": created, "updated": updated, "total_flows": len(flows)}
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
def main(argv: List[str]) -> int:
|
|
243
|
+
p = argparse.ArgumentParser(description="Backfill requirements contract CSV from REQUIREMENTS.md FlowSpec.")
|
|
244
|
+
p.add_argument("--workspace", default=".", help="workspace root")
|
|
245
|
+
p.add_argument("--requirements", default="REQUIREMENTS.md", help="requirements markdown path (relative to workspace)")
|
|
246
|
+
p.add_argument("--out-csv", default="requirements/requirements-issues.csv", help="output contract CSV path")
|
|
247
|
+
p.add_argument("--owner", default="", help="default Owner for new rows")
|
|
248
|
+
p.add_argument(
|
|
249
|
+
"--default-tests",
|
|
250
|
+
default="",
|
|
251
|
+
help="default Tests for new rows (leave empty to force manual fill later)",
|
|
252
|
+
)
|
|
253
|
+
args = p.parse_args(argv)
|
|
254
|
+
|
|
255
|
+
root = Path(args.workspace).resolve()
|
|
256
|
+
req_path = (root / args.requirements).resolve()
|
|
257
|
+
csv_path = (root / args.out_csv).resolve()
|
|
258
|
+
|
|
259
|
+
if not req_path.exists():
|
|
260
|
+
raise SystemExit(f"missing requirements file: {req_path}")
|
|
261
|
+
|
|
262
|
+
spec = extract_flow_spec_json(_read_text(req_path))
|
|
263
|
+
flows = parse_flows(spec) if spec else []
|
|
264
|
+
if not flows:
|
|
265
|
+
print("WARN: no FlowSpec found in REQUIREMENTS.md (or flows empty); nothing to sync.")
|
|
266
|
+
print("hint: add/update FlowSpec between FLOW_SPEC markers, then re-run.")
|
|
267
|
+
_ensure_header(csv_path)
|
|
268
|
+
print(f"OK: ensured {csv_path.relative_to(root)}")
|
|
269
|
+
return 0
|
|
270
|
+
|
|
271
|
+
default_owner = args.owner.strip()
|
|
272
|
+
default_tests = args.default_tests.strip()
|
|
273
|
+
stats = upsert_from_flows(
|
|
274
|
+
csv_path=csv_path,
|
|
275
|
+
flows=flows,
|
|
276
|
+
default_owner=default_owner,
|
|
277
|
+
default_tests=default_tests,
|
|
278
|
+
)
|
|
279
|
+
|
|
280
|
+
print(f"OK: synced from FlowSpec -> {csv_path.relative_to(root)}")
|
|
281
|
+
print(f"- flows: {stats['total_flows']}")
|
|
282
|
+
print(f"- created: {stats['created']}")
|
|
283
|
+
print(f"- updated: {stats['updated']}")
|
|
284
|
+
print("next: fill missing fields, then run: python3 tools/requirements_contract.py validate")
|
|
285
|
+
return 0
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
if __name__ == "__main__":
|
|
289
|
+
raise SystemExit(main(__import__('sys').argv[1:]))
|
|
290
|
+
|