bmad-plus 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/CHANGELOG.md +75 -0
  2. package/README.md +482 -0
  3. package/osint-agent-package/README.md +88 -0
  4. package/osint-agent-package/SETUP_KEYS.md +108 -0
  5. package/osint-agent-package/agents/osint-investigator.md +80 -0
  6. package/osint-agent-package/install.ps1 +87 -0
  7. package/osint-agent-package/install.sh +76 -0
  8. package/osint-agent-package/skills/bmad-osint-investigate/SKILL.md +147 -0
  9. package/osint-agent-package/skills/bmad-osint-investigate/osint/SKILL.md +452 -0
  10. package/osint-agent-package/skills/bmad-osint-investigate/osint/assets/dossier-template.md +116 -0
  11. package/osint-agent-package/skills/bmad-osint-investigate/osint/references/content-extraction.md +100 -0
  12. package/osint-agent-package/skills/bmad-osint-investigate/osint/references/enrichment-databases-fr.md +148 -0
  13. package/osint-agent-package/skills/bmad-osint-investigate/osint/references/platforms.md +130 -0
  14. package/osint-agent-package/skills/bmad-osint-investigate/osint/references/psychoprofile.md +69 -0
  15. package/osint-agent-package/skills/bmad-osint-investigate/osint/references/tools.md +281 -0
  16. package/osint-agent-package/skills/bmad-osint-investigate/osint/scripts/_http.py +101 -0
  17. package/osint-agent-package/skills/bmad-osint-investigate/osint/scripts/apify.py +260 -0
  18. package/osint-agent-package/skills/bmad-osint-investigate/osint/scripts/brightdata.py +101 -0
  19. package/osint-agent-package/skills/bmad-osint-investigate/osint/scripts/diagnose.py +141 -0
  20. package/osint-agent-package/skills/bmad-osint-investigate/osint/scripts/exa.py +79 -0
  21. package/osint-agent-package/skills/bmad-osint-investigate/osint/scripts/jina.py +71 -0
  22. package/osint-agent-package/skills/bmad-osint-investigate/osint/scripts/mcp-client.py +136 -0
  23. package/osint-agent-package/skills/bmad-osint-investigate/osint/scripts/parallel.py +85 -0
  24. package/osint-agent-package/skills/bmad-osint-investigate/osint/scripts/perplexity.py +102 -0
  25. package/osint-agent-package/skills/bmad-osint-investigate/osint/scripts/tavily.py +72 -0
  26. package/osint-agent-package/skills/bmad-osint-investigate/osint/scripts/volley.py +208 -0
  27. package/osint-agent-package/skills/bmad-osint-investigator/SKILL.md +15 -0
  28. package/package.json +51 -0
  29. package/readme-international/README.de.md +392 -0
  30. package/readme-international/README.es.md +484 -0
  31. package/readme-international/README.fr.md +482 -0
  32. package/src/bmad-plus/agents/agent-architect-dev/SKILL.md +96 -0
  33. package/src/bmad-plus/agents/agent-architect-dev/bmad-skill-manifest.yaml +13 -0
  34. package/src/bmad-plus/agents/agent-maker/SKILL.md +201 -0
  35. package/src/bmad-plus/agents/agent-maker/bmad-skill-manifest.yaml +13 -0
  36. package/src/bmad-plus/agents/agent-orchestrator/SKILL.md +137 -0
  37. package/src/bmad-plus/agents/agent-orchestrator/bmad-skill-manifest.yaml +13 -0
  38. package/src/bmad-plus/agents/agent-quality/SKILL.md +83 -0
  39. package/src/bmad-plus/agents/agent-quality/bmad-skill-manifest.yaml +13 -0
  40. package/src/bmad-plus/agents/agent-shadow/SKILL.md +71 -0
  41. package/src/bmad-plus/agents/agent-shadow/bmad-skill-manifest.yaml +13 -0
  42. package/src/bmad-plus/agents/agent-strategist/SKILL.md +80 -0
  43. package/src/bmad-plus/agents/agent-strategist/bmad-skill-manifest.yaml +13 -0
  44. package/src/bmad-plus/data/role-triggers.yaml +209 -0
  45. package/src/bmad-plus/module-help.csv +10 -0
  46. package/src/bmad-plus/module.yaml +174 -0
  47. package/src/bmad-plus/skills/bmad-plus-autopilot/SKILL.md +99 -0
  48. package/src/bmad-plus/skills/bmad-plus-parallel/SKILL.md +93 -0
  49. package/src/bmad-plus/skills/bmad-plus-sync/SKILL.md +69 -0
  50. package/tools/bmad-plus-npx.js +33 -0
  51. package/tools/cli/bmad-plus-cli.js +50 -0
  52. package/tools/cli/commands/install.js +437 -0
  53. package/tools/cli/commands/uninstall.js +70 -0
@@ -0,0 +1,71 @@
1
+ #!/usr/bin/env python3
2
+ """Jina AI — read URLs, search, deep search. Stdlib only."""
3
+
4
+ import json
5
+ import os
6
+ import sys
7
+
8
+ sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
9
+ from _http import https_request, get_key, get_workspace
10
+
11
+
12
+ def init():
13
+ workspace, _, _ = get_workspace()
14
+ return get_key("JINA_API_KEY",
15
+ file_fallback=os.path.join(workspace, "scripts", "jina-api-key.txt"),
16
+ help_url="https://jina.ai/api-key")
17
+
18
+
19
+ def read_url(token, url):
20
+ """URL → clean markdown."""
21
+ status, _, data = https_request("GET", f"https://r.jina.ai/{url}",
22
+ headers={"Authorization": f"Bearer {token}",
23
+ "Accept": "application/json"})
24
+ print(data[:5000])
25
+
26
+
27
+ def search(token, query):
28
+ """Web search → markdown results."""
29
+ from urllib.parse import quote
30
+ status, _, data = https_request("GET", f"https://s.jina.ai/{quote(query)}",
31
+ headers={"Authorization": f"Bearer {token}",
32
+ "Accept": "application/json"})
33
+ print(data[:5000])
34
+
35
+
36
+ def deepsearch(token, query):
37
+ """Deep research with AI reasoning."""
38
+ import json as j
39
+ status, _, data = https_request("POST", "https://deepsearch.jina.ai",
40
+ headers={"Authorization": f"Bearer {token}",
41
+ "Content-Type": "application/json",
42
+ "Accept": "application/json"},
43
+ body=j.dumps({"query": query}),
44
+ timeout=300)
45
+ print(data[:5000])
46
+
47
+
48
+ def main():
49
+ if len(sys.argv) < 3:
50
+ print("Usage: jina.py read|search|deepsearch <arg>")
51
+ print()
52
+ print(" read <url> - any URL → clean markdown")
53
+ print(" search <query> - web search → markdown (10 results)")
54
+ print(" deepsearch <q> - deep research with AI reasoning")
55
+ sys.exit(1)
56
+ token = init()
57
+ cmd = sys.argv[1]
58
+ arg = " ".join(sys.argv[2:])
59
+ if cmd == "read":
60
+ read_url(token, arg)
61
+ elif cmd == "search":
62
+ search(token, arg)
63
+ elif cmd == "deepsearch":
64
+ deepsearch(token, arg)
65
+ else:
66
+ print(f"Unknown: {cmd} (use read|search|deepsearch)", file=sys.stderr)
67
+ sys.exit(1)
68
+
69
+
70
+ if __name__ == "__main__":
71
+ main()
@@ -0,0 +1,136 @@
1
+ #!/usr/bin/env python3
2
+ """Lightweight MCP client for Streamable HTTP/SSE transport.
3
+
4
+ Usage:
5
+ python3 mcp-client.py <mcp_url> --list-tools
6
+ python3 mcp-client.py <mcp_url> <tool_name> '<json_args>'
7
+ """
8
+
9
+ import json
10
+ import sys
11
+ import http.client
12
+ from urllib.parse import urlparse, urlencode
13
+
14
+
15
+ def mcp_request(url: str, method: str, params: dict | None = None,
16
+ req_id: int = 1, session_id: str | None = None) -> tuple[dict, str | None]:
17
+ """Send MCP JSON-RPC request and parse SSE response. Returns (result, session_id)."""
18
+ parsed = urlparse(url)
19
+
20
+ payload = {
21
+ "jsonrpc": "2.0",
22
+ "id": req_id,
23
+ "method": method,
24
+ }
25
+ if params:
26
+ payload["params"] = params
27
+
28
+ data = json.dumps(payload).encode()
29
+
30
+ headers = {
31
+ "Content-Type": "application/json",
32
+ "Accept": "application/json, text/event-stream",
33
+ }
34
+ if session_id:
35
+ headers["Mcp-Session-Id"] = session_id
36
+
37
+ conn = http.client.HTTPSConnection(parsed.hostname, timeout=120)
38
+ path = parsed.path
39
+ if parsed.query:
40
+ path += "?" + parsed.query
41
+
42
+ conn.request("POST", path, body=data, headers=headers)
43
+ resp = conn.getresponse()
44
+
45
+ # Get session ID from response headers
46
+ new_session_id = resp.getheader("Mcp-Session-Id") or session_id
47
+
48
+ body = resp.read().decode()
49
+ conn.close()
50
+
51
+ if resp.status >= 400:
52
+ return {"error": f"HTTP {resp.status}: {body[:200]}"}, new_session_id
53
+
54
+ # Parse SSE: look for data: lines
55
+ for line in body.split("\n"):
56
+ if line.startswith("data: "):
57
+ try:
58
+ return json.loads(line[6:]), new_session_id
59
+ except json.JSONDecodeError:
60
+ continue
61
+
62
+ # Try direct JSON
63
+ try:
64
+ return json.loads(body), new_session_id
65
+ except json.JSONDecodeError:
66
+ return {"raw": body[:500]}, new_session_id
67
+
68
+
69
+ def init_and_call(url: str, method: str, params: dict | None = None) -> dict:
70
+ """Initialize session then make a call."""
71
+ # Step 1: Initialize
72
+ init_result, session_id = mcp_request(url, "initialize", {
73
+ "protocolVersion": "2024-11-05",
74
+ "capabilities": {},
75
+ "clientInfo": {"name": "osint-skill", "version": "3.1"},
76
+ }, req_id=1)
77
+
78
+ if "error" in init_result:
79
+ return init_result
80
+
81
+ # Step 2: Send initialized notification (optional but polite)
82
+ # Step 3: Make actual call
83
+ result, _ = mcp_request(url, method, params, req_id=2, session_id=session_id)
84
+ return result
85
+
86
+
87
+ def list_tools(url: str) -> list:
88
+ """List available tools on MCP server."""
89
+ result = init_and_call(url, "tools/list")
90
+ tools = result.get("result", {}).get("tools", [])
91
+ return tools
92
+
93
+
94
+ def call_tool(url: str, tool_name: str, arguments: dict) -> dict:
95
+ """Call a specific tool on MCP server."""
96
+ result = init_and_call(url, "tools/call", {
97
+ "name": tool_name,
98
+ "arguments": arguments,
99
+ })
100
+ return result
101
+
102
+
103
+ def main():
104
+ if len(sys.argv) < 3:
105
+ print(__doc__)
106
+ sys.exit(1)
107
+
108
+ url = sys.argv[1]
109
+
110
+ if sys.argv[2] == "--list-tools":
111
+ tools = list_tools(url)
112
+ if not tools:
113
+ print("No tools found or error occurred")
114
+ return
115
+ for t in tools:
116
+ desc = t.get("description", "")[:100]
117
+ print(f" {t['name']}: {desc}")
118
+ return
119
+
120
+ tool_name = sys.argv[2]
121
+ args = json.loads(sys.argv[3]) if len(sys.argv) > 3 else {}
122
+
123
+ result = call_tool(url, tool_name, args)
124
+
125
+ # Extract content from MCP response
126
+ content = result.get("result", {}).get("content", [])
127
+ if content:
128
+ for item in content:
129
+ if item.get("type") == "text":
130
+ print(item["text"])
131
+ else:
132
+ print(json.dumps(result, indent=2, ensure_ascii=False))
133
+
134
+
135
+ if __name__ == "__main__":
136
+ main()
@@ -0,0 +1,85 @@
1
+ #!/usr/bin/env python3
2
+ """Parallel AI — search, extract, task. Stdlib only."""
3
+
4
+ import json
5
+ import os
6
+ import sys
7
+
8
+ sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
9
+ from _http import api_post, https_request, get_key, get_workspace
10
+
11
+ BASE = "https://api.parallel.ai/v1beta"
12
+ BETA_HEADER = "parallel-beta"
13
+ BETA_VALUE = "search-extract-2025-10-10"
14
+
15
+
16
+ def init():
17
+ workspace, _, _ = get_workspace()
18
+ return get_key("PARALLEL_API_KEY",
19
+ file_fallback=os.path.join(workspace, "scripts", "parallel-api-key.txt"),
20
+ help_url="https://platform.parallel.ai")
21
+
22
+
23
+ def search(token, query):
24
+ """AI-powered web search with LLM-optimized excerpts."""
25
+ data = api_post(f"{BASE}/search", {
26
+ "objective": query,
27
+ "search_queries": [query],
28
+ "max_results": 10,
29
+ "excerpts": {"max_chars_per_result": 5000},
30
+ }, headers={
31
+ "x-api-key": token,
32
+ BETA_HEADER: BETA_VALUE,
33
+ })
34
+ if data:
35
+ print(json.dumps(data, indent=2, ensure_ascii=False)[:5000])
36
+
37
+
38
+ def extract(token, url):
39
+ """URL → clean markdown (JS-heavy, PDF)."""
40
+ data = api_post(f"{BASE}/extract", {
41
+ "url": url,
42
+ "full_content": True,
43
+ }, headers={
44
+ "x-api-key": token,
45
+ BETA_HEADER: BETA_VALUE,
46
+ })
47
+ if data:
48
+ print(json.dumps(data, indent=2, ensure_ascii=False)[:5000])
49
+
50
+
51
+ def task(token, task_text):
52
+ """Complex research task with structured output."""
53
+ data = api_post(f"{BASE}/task", {
54
+ "task": task_text,
55
+ }, headers={
56
+ "x-api-key": token,
57
+ })
58
+ if data:
59
+ print(json.dumps(data, indent=2, ensure_ascii=False)[:5000])
60
+
61
+
62
+ def main():
63
+ if len(sys.argv) < 3:
64
+ print("Usage: parallel.py search|extract|task <arg>")
65
+ print()
66
+ print(" search <query> - AI-powered web search")
67
+ print(" extract <url> - URL → clean markdown")
68
+ print(" task <task> - Complex research task")
69
+ sys.exit(1)
70
+ token = init()
71
+ cmd = sys.argv[1]
72
+ arg = " ".join(sys.argv[2:])
73
+ if cmd == "search":
74
+ search(token, arg)
75
+ elif cmd == "extract":
76
+ extract(token, arg)
77
+ elif cmd == "task":
78
+ task(token, arg)
79
+ else:
80
+ print(f"Unknown: {cmd} (use search|extract|task)", file=sys.stderr)
81
+ sys.exit(1)
82
+
83
+
84
+ if __name__ == "__main__":
85
+ main()
@@ -0,0 +1,102 @@
1
+ #!/usr/bin/env python3
2
+ """Perplexity API — search + sonar + deep research. Stdlib only."""
3
+
4
+ import json
5
+ import os
6
+ import sys
7
+
8
+ sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
9
+ from _http import api_post, get_key, truncate
10
+
11
+ API_KEY = None
12
+
13
+
14
+ def init():
15
+ global API_KEY
16
+ API_KEY = get_key("PERPLEXITY_API_KEY", help_url="https://perplexity.ai/settings/api")
17
+
18
+
19
+ def search(query):
20
+ """Search API — ranked web results."""
21
+ data = api_post("https://api.perplexity.ai/search",
22
+ {"query": [query]},
23
+ headers={"Authorization": f"Bearer {API_KEY}"})
24
+ if not data:
25
+ return
26
+ if "results" in data:
27
+ for r in data["results"][:10]:
28
+ print(f'🔗 {r.get("title", "")}')
29
+ print(f' {r.get("url", "")}')
30
+ print(f' {truncate(r.get("snippet", ""), 200)}')
31
+ print()
32
+ elif "error" in data:
33
+ print(f'ERROR: {json.dumps(data["error"])}', file=sys.stderr)
34
+ else:
35
+ print(json.dumps(data, indent=2)[:2000])
36
+
37
+
38
+ def sonar(query):
39
+ """Sonar API — AI answer with citations."""
40
+ data = api_post("https://api.perplexity.ai/chat/completions",
41
+ {"model": "sonar",
42
+ "messages": [{"role": "user", "content": query}]},
43
+ headers={"Authorization": f"Bearer {API_KEY}"})
44
+ if not data:
45
+ return
46
+ if "choices" in data:
47
+ msg = data["choices"][0]["message"]
48
+ print(msg.get("content", ""))
49
+ cits = data.get("citations", msg.get("citations", []))
50
+ if cits:
51
+ print("\n--- Sources ---")
52
+ for i, c in enumerate(cits[:10], 1):
53
+ print(f"{i}. {c if isinstance(c, str) else c.get('url', c)}")
54
+ elif "error" in data:
55
+ print(f'ERROR: {json.dumps(data["error"])}', file=sys.stderr)
56
+ else:
57
+ print(json.dumps(data, indent=2)[:2000])
58
+
59
+
60
+ def deep(query):
61
+ """Deep Research via sonar-deep-research."""
62
+ data = api_post("https://api.perplexity.ai/chat/completions",
63
+ {"model": "sonar-deep-research",
64
+ "messages": [{"role": "user", "content": query}]},
65
+ headers={"Authorization": f"Bearer {API_KEY}"},
66
+ timeout=300)
67
+ if not data:
68
+ return
69
+ if "choices" in data:
70
+ msg = data["choices"][0]["message"]
71
+ print(msg.get("content", ""))
72
+ cits = data.get("citations", msg.get("citations", []))
73
+ if cits:
74
+ print("\n--- Sources ---")
75
+ for i, c in enumerate(cits[:15], 1):
76
+ print(f"{i}. {c if isinstance(c, str) else c.get('url', c)}")
77
+ elif "error" in data:
78
+ print(f'ERROR: {json.dumps(data["error"])}', file=sys.stderr)
79
+ else:
80
+ print(json.dumps(data, indent=2)[:2000])
81
+
82
+
83
+ def main():
84
+ if len(sys.argv) < 3:
85
+ print("Usage: perplexity.py search|sonar|deep <query>")
86
+ sys.exit(1)
87
+ init()
88
+ cmd = sys.argv[1]
89
+ query = " ".join(sys.argv[2:])
90
+ if cmd == "search":
91
+ search(query)
92
+ elif cmd == "sonar":
93
+ sonar(query)
94
+ elif cmd == "deep":
95
+ deep(query)
96
+ else:
97
+ print(f"Unknown command: {cmd} (use search|sonar|deep)", file=sys.stderr)
98
+ sys.exit(1)
99
+
100
+
101
+ if __name__ == "__main__":
102
+ main()
@@ -0,0 +1,72 @@
1
+ #!/usr/bin/env python3
2
+ """Tavily API — AI-optimized search for agents. Stdlib only."""
3
+
4
+ import json
5
+ import os
6
+ import sys
7
+
8
+ sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
9
+ from _http import api_post, get_key, truncate
10
+
11
+
12
+ def init():
13
+ return get_key("TAVILY_API_KEY", help_url="https://app.tavily.com/home")
14
+
15
+
16
+ def search(api_key, query, depth="basic"):
17
+ """Tavily search — basic ($0.005) or advanced (deep)."""
18
+ data = api_post("https://api.tavily.com/search", {
19
+ "api_key": api_key,
20
+ "query": query,
21
+ "search_depth": depth,
22
+ "max_results": 10,
23
+ "include_answer": True,
24
+ "include_raw_content": False,
25
+ })
26
+ if not data:
27
+ return
28
+ if data.get("answer"):
29
+ print(f'💡 {data["answer"]}\n')
30
+ for r in data.get("results", [])[:10]:
31
+ print(f'🔗 {r.get("title", "")}')
32
+ print(f' {r.get("url", "")}')
33
+ print(f' {truncate(r.get("content", ""), 300 if depth == "advanced" else 200)}')
34
+ score = r.get("score", "")
35
+ if score:
36
+ print(f" relevance: {score:.2f}")
37
+ print()
38
+
39
+
40
+ def extract(api_key, url):
41
+ """Extract content from a URL."""
42
+ data = api_post("https://api.tavily.com/extract", {
43
+ "api_key": api_key,
44
+ "urls": [url],
45
+ })
46
+ if not data:
47
+ return
48
+ for r in data.get("results", []):
49
+ print(f'📄 {r.get("url", "")}')
50
+ print(r.get("raw_content", r.get("content", ""))[:3000])
51
+
52
+
53
+ def main():
54
+ if len(sys.argv) < 3:
55
+ print("Usage: tavily.py search|extract|deep <query>")
56
+ sys.exit(1)
57
+ api_key = init()
58
+ cmd = sys.argv[1]
59
+ query = " ".join(sys.argv[2:])
60
+ if cmd == "search":
61
+ search(api_key, query, "basic")
62
+ elif cmd == "deep":
63
+ search(api_key, query, "advanced")
64
+ elif cmd == "extract":
65
+ extract(api_key, query)
66
+ else:
67
+ print(f"Unknown: {cmd} (use search|extract|deep)", file=sys.stderr)
68
+ sys.exit(1)
69
+
70
+
71
+ if __name__ == "__main__":
72
+ main()
@@ -0,0 +1,208 @@
1
+ #!/usr/bin/env python3
2
+ """OSINT First Volley + Merge — parallel search across all engines, then deduplicate.
3
+
4
+ Replaces: first-volley.sh + merge-volley.sh
5
+ Uses concurrent.futures for parallel execution (stdlib).
6
+
7
+ Usage:
8
+ python volley.py search "Full Name" ["context keywords"]
9
+ python volley.py merge /tmp/osint-<timestamp>
10
+ """
11
+
12
+ import concurrent.futures
13
+ import json
14
+ import os
15
+ import re
16
+ import subprocess
17
+ import sys
18
+ import time
19
+
20
+ sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
21
+ from _http import get_workspace
22
+
23
+
24
+ def run_script(scripts_dir, script_name, cmd, query, output_file):
25
+ """Run a Python OSINT script and save output to file."""
26
+ script_path = os.path.join(scripts_dir, script_name)
27
+ if not os.path.isfile(script_path):
28
+ return
29
+ try:
30
+ result = subprocess.run(
31
+ [sys.executable, script_path, cmd, query],
32
+ capture_output=True, text=True, timeout=60,
33
+ env=os.environ.copy()
34
+ )
35
+ with open(output_file, "w", encoding="utf-8") as f:
36
+ f.write(result.stdout or "")
37
+ if result.stderr:
38
+ print(f" ⚠️ {script_name}: {result.stderr[:200]}", file=sys.stderr)
39
+ except subprocess.TimeoutExpired:
40
+ print(f" ⏱️ {script_name}: timeout", file=sys.stderr)
41
+ except Exception as e:
42
+ print(f" ❌ {script_name}: {e}", file=sys.stderr)
43
+
44
+
45
+ def first_volley(name, context=""):
46
+ """Launch parallel searches across all available engines."""
47
+ workspace, skill_dir, scripts_dir = get_workspace()
48
+ query = f"{name} {context}".strip()
49
+ timestamp = int(time.time())
50
+ outdir = os.path.join(os.environ.get("TEMP", "/tmp"), f"osint-{timestamp}")
51
+ os.makedirs(outdir, exist_ok=True)
52
+
53
+ print(f"🔍 First Volley: {query}")
54
+ print(f" Output: {outdir}")
55
+ print()
56
+
57
+ # Build list of searches to run
58
+ searches = []
59
+
60
+ # Jina — general + social
61
+ if os.environ.get("JINA_API_KEY") or os.path.isfile(
62
+ os.path.join(workspace, "scripts", "jina-api-key.txt")):
63
+ searches.append(("jina.py", "search", query,
64
+ os.path.join(outdir, "jina-general.json")))
65
+ searches.append(("jina.py", "search",
66
+ f"{name} instagram linkedin facebook telegram",
67
+ os.path.join(outdir, "jina-social.json")))
68
+
69
+ # Parallel — general + social
70
+ if os.environ.get("PARALLEL_API_KEY") or os.path.isfile(
71
+ os.path.join(workspace, "scripts", "parallel-api-key.txt")):
72
+ searches.append(("parallel.py", "search", query,
73
+ os.path.join(outdir, "parallel-general.json")))
74
+ searches.append(("parallel.py", "search",
75
+ f"{name} instagram linkedin telegram facebook profile",
76
+ os.path.join(outdir, "parallel-social.json")))
77
+
78
+ # Tavily
79
+ if os.environ.get("TAVILY_API_KEY"):
80
+ searches.append(("tavily.py", "search", query,
81
+ os.path.join(outdir, "tavily-general.json")))
82
+
83
+ # Exa
84
+ if os.environ.get("EXA_API_KEY"):
85
+ searches.append(("exa.py", "people", name,
86
+ os.path.join(outdir, "exa-people.json")))
87
+
88
+ # Perplexity
89
+ if os.environ.get("PERPLEXITY_API_KEY"):
90
+ searches.append(("perplexity.py", "sonar", query,
91
+ os.path.join(outdir, "perplexity-sonar.json")))
92
+
93
+ if not searches:
94
+ print("❌ No API keys found. Set at least one search API key.")
95
+ print(" Run: python diagnose.py")
96
+ sys.exit(1)
97
+
98
+ print(f"Launching {len(searches)} parallel searches...")
99
+
100
+ # Execute in parallel
101
+ with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
102
+ futures = []
103
+ for script, cmd, q, outfile in searches:
104
+ print(f" → {script} {cmd}")
105
+ futures.append(executor.submit(run_script, scripts_dir, script, cmd, q, outfile))
106
+ concurrent.futures.wait(futures, timeout=90)
107
+
108
+ print()
109
+ print(f"✅ All searches complete. Results in {outdir}")
110
+
111
+ # List result files
112
+ for f in os.listdir(outdir):
113
+ fpath = os.path.join(outdir, f)
114
+ size = os.path.getsize(fpath)
115
+ print(f" {f} ({size:,} bytes)")
116
+ print()
117
+ print(f"Run: python volley.py merge {outdir}")
118
+ return outdir
119
+
120
+
121
+ def merge_volley(outdir):
122
+ """Merge and deduplicate first-volley results."""
123
+ if not os.path.isdir(outdir):
124
+ print(f"Error: {outdir} not found")
125
+ sys.exit(1)
126
+
127
+ print(f"=== MERGE: {outdir} ===")
128
+ print()
129
+
130
+ # Extract all URLs from result files
131
+ all_urls = set()
132
+ for fname in os.listdir(outdir):
133
+ fpath = os.path.join(outdir, fname)
134
+ if not os.path.isfile(fpath):
135
+ continue
136
+ try:
137
+ with open(fpath, "r", encoding="utf-8") as f:
138
+ content = f.read()
139
+ urls = re.findall(r'https?://[^\s"\'<>]+', content)
140
+ all_urls.update(urls)
141
+ except Exception:
142
+ continue
143
+
144
+ if not all_urls:
145
+ print("⚠️ No URLs found in results.")
146
+ sys.exit(0)
147
+
148
+ print(f"📊 Total unique URLs: {len(all_urls)}")
149
+ print()
150
+
151
+ # Group by platform
152
+ platforms = {
153
+ "🔗 LinkedIn": lambda u: "linkedin.com" in u.lower(),
154
+ "📸 Instagram": lambda u: "instagram.com" in u.lower(),
155
+ "📘 Facebook": lambda u: "facebook.com" in u.lower(),
156
+ "✈️ Telegram": lambda u: "t.me" in u.lower(),
157
+ "🐦 Twitter/X": lambda u: "twitter.com" in u.lower() or "x.com" in u.lower(),
158
+ "📺 VK": lambda u: "vk.com" in u.lower(),
159
+ }
160
+
161
+ categorized = set()
162
+ for label, matcher in platforms.items():
163
+ matches = [u for u in all_urls if matcher(u)]
164
+ print(f"{label}:")
165
+ if matches:
166
+ for u in list(matches)[:10]:
167
+ print(f" {u}")
168
+ categorized.update(matches)
169
+ else:
170
+ print(" (none)")
171
+ print()
172
+
173
+ # Other
174
+ other = [u for u in all_urls if u not in categorized]
175
+ print("📰 Media/Other:")
176
+ for u in other[:20]:
177
+ print(f" {u}")
178
+ print()
179
+
180
+ # Save merged
181
+ merged_file = os.path.join(outdir, "merged-urls.txt")
182
+ with open(merged_file, "w", encoding="utf-8") as f:
183
+ f.write("\n".join(sorted(all_urls)))
184
+ print(f"✅ Saved to {merged_file}")
185
+ print("=== END MERGE ===")
186
+
187
+
188
+ def main():
189
+ if len(sys.argv) < 3:
190
+ print("Usage:")
191
+ print(' python volley.py search "Full Name" ["context"]')
192
+ print(" python volley.py merge /tmp/osint-<timestamp>")
193
+ sys.exit(1)
194
+
195
+ cmd = sys.argv[1]
196
+ if cmd == "search":
197
+ name = sys.argv[2]
198
+ context = sys.argv[3] if len(sys.argv) > 3 else ""
199
+ first_volley(name, context)
200
+ elif cmd == "merge":
201
+ merge_volley(sys.argv[2])
202
+ else:
203
+ print(f"Unknown: {cmd} (use search|merge)", file=sys.stderr)
204
+ sys.exit(1)
205
+
206
+
207
+ if __name__ == "__main__":
208
+ main()
@@ -0,0 +1,15 @@
1
+ ---
2
+ name: bmad-osint-investigator
3
+ description: "OSINT Intelligence Analyst agent — systematic intelligence gathering on individuals. From a name or handle to a scored dossier with psychoprofile, career map, and confidence grades. 55+ Apify actors, 7 search APIs."
4
+ ---
5
+
6
+ You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command.
7
+
8
+ <agent-activation CRITICAL="TRUE">
9
+ 1. LOAD the FULL agent file from {project-root}/_bmad/bmm/agents/osint-investigator.md
10
+ 2. READ its entire contents - this contains the complete agent persona, menu, and instructions
11
+ 3. FOLLOW every step in the <activation> section precisely
12
+ 4. DISPLAY the welcome/greeting as instructed
13
+ 5. PRESENT the numbered menu
14
+ 6. WAIT for user input before proceeding
15
+ </agent-activation>