nightpay 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,174 @@
1
+ #!/usr/bin/env bash
2
+ # nightpay MIP-003 service — HTTP endpoints for Masumi registry
3
+ #
4
+ # Jobs persisted in SQLite — survives restarts, handles 1M+ jobs.
5
+ # Threaded server — handles concurrent requests.
6
+ #
7
+ # Usage: ./mip003-server.sh [port]
8
+ # Default port: 8090
9
+ #
10
+ # Register with Masumi after starting:
11
+ # curl -X POST http://localhost:3001/api/v1/registry \
12
+ # -H "token: $MASUMI_API_KEY" \
13
+ # -H "Content-Type: application/json" \
14
+ # -d '{
15
+ # "name": "nightpay",
16
+ # "description": "Anonymous community bounty board — pool shielded NIGHT, hire AI agents, get ZK receipts",
17
+ # "apiBaseUrl": "http://your-server:8090",
18
+ # "capabilityName": "nightpay-bounties",
19
+ # "capabilityVersion": "0.1.0",
20
+ # "pricingUnit": "lovelace",
21
+ # "pricingQuantity": "0",
22
+ # "network": "Preprod",
23
+ # "authorName": "nightpay",
24
+ # "authorContact": "nightpay@users.noreply.github.com",
25
+ # "authorOrganization": "nightpay"
26
+ # }'
27
+
28
+ set -euo pipefail
29
+
30
+ PORT="${1:-8090}"
31
+ DATA_DIR="${DATA_DIR:-${HOME}/.nightpay}"
32
+ DB_PATH="${DATA_DIR}/jobs.db"
33
+
34
+ mkdir -p "$DATA_DIR"
35
+ chmod 700 "$DATA_DIR"
36
+
37
+ command -v python3 >/dev/null 2>&1 || { echo "python3 required"; exit 1; }
38
+
39
+ echo "nightpay MIP-003 service starting on port $PORT..."
40
+
41
+ python3 -c "
42
+ import http.server, json, uuid, sys, sqlite3, threading
43
+ from datetime import datetime, timezone
44
+
45
+ PORT = int(sys.argv[1])
46
+ DB_PATH = sys.argv[2]
47
+
48
+ # Thread-local connections for SQLite (one per request handler thread)
49
+ local = threading.local()
50
+
51
+ def get_db():
52
+ if not hasattr(local, 'conn'):
53
+ local.conn = sqlite3.connect(DB_PATH, check_same_thread=False)
54
+ local.conn.execute('PRAGMA journal_mode=WAL')
55
+ local.conn.execute('PRAGMA synchronous=NORMAL')
56
+ local.conn.row_factory = sqlite3.Row
57
+ return local.conn
58
+
59
+ # Initialize schema on main thread
60
+ conn = sqlite3.connect(DB_PATH)
61
+ conn.execute('PRAGMA journal_mode=WAL')
62
+ conn.executescript('''
63
+ CREATE TABLE IF NOT EXISTS jobs (
64
+ job_id TEXT PRIMARY KEY,
65
+ status TEXT NOT NULL DEFAULT \"running\",
66
+ input_data TEXT,
67
+ extra_input TEXT,
68
+ result TEXT,
69
+ started_at TEXT NOT NULL,
70
+ updated_at TEXT NOT NULL
71
+ );
72
+ CREATE INDEX IF NOT EXISTS idx_jobs_status ON jobs(status);
73
+ ''')
74
+ conn.close()
75
+
76
+ class MIP003Handler(http.server.BaseHTTPRequestHandler):
77
+ def log_message(self, fmt, *args):
78
+ print(f'[nightpay] {args[0]}')
79
+
80
+ def respond(self, code, data):
81
+ body = json.dumps(data).encode()
82
+ self.send_response(code)
83
+ self.send_header('Content-Type', 'application/json')
84
+ self.send_header('Content-Length', str(len(body)))
85
+ self.end_headers()
86
+ self.wfile.write(body)
87
+
88
+ def do_GET(self):
89
+ if self.path == '/availability':
90
+ db = get_db()
91
+ total = db.execute('SELECT COUNT(*) FROM jobs').fetchone()[0]
92
+ active = db.execute('SELECT COUNT(*) FROM jobs WHERE status = ?', ('running',)).fetchone()[0]
93
+ self.respond(200, {
94
+ 'status': 'available',
95
+ 'total_jobs': total,
96
+ 'active_jobs': active
97
+ })
98
+
99
+ elif self.path == '/input_schema':
100
+ self.respond(200, {
101
+ 'type': 'object',
102
+ 'properties': {
103
+ 'description': {
104
+ 'type': 'string',
105
+ 'description': 'Bounty job description'
106
+ },
107
+ 'amount_specks': {
108
+ 'type': 'integer',
109
+ 'description': 'Bounty amount in NIGHT specks'
110
+ }
111
+ },
112
+ 'required': ['description', 'amount_specks']
113
+ })
114
+
115
+ elif self.path.startswith('/status/'):
116
+ job_id = self.path.split('/')[-1]
117
+ db = get_db()
118
+ row = db.execute('SELECT * FROM jobs WHERE job_id = ?', (job_id,)).fetchone()
119
+ if not row:
120
+ self.respond(404, {'error': 'job not found'})
121
+ else:
122
+ job = dict(row)
123
+ if job.get('input_data'):
124
+ job['input_data'] = json.loads(job['input_data'])
125
+ if job.get('extra_input'):
126
+ job['extra_input'] = json.loads(job['extra_input'])
127
+ if job.get('result'):
128
+ job['result'] = json.loads(job['result'])
129
+ self.respond(200, job)
130
+
131
+ else:
132
+ self.respond(404, {'error': 'not found'})
133
+
134
+ def do_POST(self):
135
+ length = int(self.headers.get('Content-Length', 0))
136
+ body = json.loads(self.rfile.read(length)) if length else {}
137
+
138
+ if self.path == '/start_job':
139
+ job_id = str(uuid.uuid4())
140
+ now = datetime.now(timezone.utc).isoformat()
141
+ db = get_db()
142
+ db.execute(
143
+ 'INSERT INTO jobs(job_id, status, input_data, started_at, updated_at) VALUES (?, ?, ?, ?, ?)',
144
+ (job_id, 'running', json.dumps(body.get('input_data', {})), now, now)
145
+ )
146
+ db.commit()
147
+ self.respond(200, {'job_id': job_id, 'status': 'running'})
148
+
149
+ elif self.path.startswith('/provide_input/'):
150
+ job_id = self.path.split('/')[-1]
151
+ db = get_db()
152
+ now = datetime.now(timezone.utc).isoformat()
153
+ cur = db.execute(
154
+ 'UPDATE jobs SET extra_input = ?, updated_at = ? WHERE job_id = ?',
155
+ (json.dumps(body), now, job_id)
156
+ )
157
+ if cur.rowcount == 0:
158
+ self.respond(404, {'error': 'job not found'})
159
+ else:
160
+ db.commit()
161
+ self.respond(200, {'status': 'input_received'})
162
+
163
+ else:
164
+ self.respond(404, {'error': 'not found'})
165
+
166
+ class ThreadedHTTPServer(http.server.ThreadingHTTPServer):
167
+ daemon_threads = True
168
+
169
+ httpd = ThreadedHTTPServer(('0.0.0.0', PORT), MIP003Handler)
170
+ print(f'[nightpay] MIP-003 threaded service ready on port {PORT}')
171
+ print(f'[nightpay] DB: {DB_PATH}')
172
+ print(f'[nightpay] Endpoints: /availability, /input_schema, /start_job, /status/<id>, /provide_input/<id>')
173
+ httpd.serve_forever()
174
+ " "$PORT" "$DB_PATH"
@@ -0,0 +1,194 @@
1
+ #!/usr/bin/env bash
2
+ # nightpay blocklist updater — pulls from open-source threat intel feeds
3
+ # and merges into a local rules file consumed by gateway.sh safety_check.
4
+ #
5
+ # Run via cron or systemd timer:
6
+ # 0 */6 * * * /path/to/update-blocklist.sh
7
+ #
8
+ # PRIVACY: fetches category patterns only — never sends bounty data upstream.
9
+ #
10
+ # Sources:
11
+ # - OISF/suricata-update — IDS rule categories (violence, drugs, fraud)
12
+ # - stamparm/maltrail — malicious keyword/phrase lists
13
+ # - operator custom rules — local overrides in custom-rules.json
14
+ #
15
+ # Output: ~/.nightpay/safety-rules.json (consumed by gateway.sh)
16
+ #
17
+ # Usage: ./update-blocklist.sh [--dry-run]
18
+
19
+ set -euo pipefail
20
+
21
+ SAFETY_DIR="${SAFETY_DIR:-${HOME}/.nightpay/safety}"
22
+ RULES_FILE="${SAFETY_DIR}/safety-rules.json"
23
+ CUSTOM_RULES="${SAFETY_DIR}/custom-rules.json"
24
+ COMMUNITY_REPORTS="${SAFETY_DIR}/community-reports.json"
25
+ FEED_CACHE="${SAFETY_DIR}/feed-cache"
26
+ LOCKFILE="${SAFETY_DIR}/update.lock"
27
+
28
+ DRY_RUN="${1:-}"
29
+
30
+ mkdir -p "$SAFETY_DIR" "$FEED_CACHE"
31
+ chmod 700 "$SAFETY_DIR"
32
+
33
+ # ─── Locking ──────────────────────────────────────────────────────────────────
34
+ # Prevent concurrent updates from corrupting the rules file
35
+ if [ -f "$LOCKFILE" ]; then
36
+ LOCK_AGE=$(( $(date +%s) - $(cat "$LOCKFILE" 2>/dev/null || echo 0) ))
37
+ if (( LOCK_AGE < 300 )); then
38
+ echo "ERROR: Another update is running (lock age: ${LOCK_AGE}s)" >&2
39
+ exit 1
40
+ fi
41
+ echo "WARNING: Stale lock found (${LOCK_AGE}s) — overriding" >&2
42
+ fi
43
+ date +%s > "$LOCKFILE"
44
+ trap 'rm -f "$LOCKFILE"' EXIT
45
+
46
+ # ─── Feed fetchers ────────────────────────────────────────────────────────────
47
+ # Each fetcher outputs JSON lines: {"category": "...", "pattern": "...", "source": "..."}
48
+
49
+ fetch_stamparm_keywords() {
50
+ # stamparm/maltrail — trails/static/malware directory has keyword lists
51
+ # We extract category names as patterns for known-bad campaign names
52
+ local url="https://raw.githubusercontent.com/stamparm/maltrail/master/trails/static/suspicious/domain.txt"
53
+ local cache="${FEED_CACHE}/stamparm-domains.txt"
54
+
55
+ curl -sf --max-time 30 -o "$cache.tmp" "$url" 2>/dev/null || {
56
+ echo "WARNING: Failed to fetch stamparm feed" >&2
57
+ return 0
58
+ }
59
+ mv "$cache.tmp" "$cache"
60
+
61
+ # Extract domain-based patterns for known malicious services
62
+ python3 -c "
63
+ import sys
64
+ # We don't use domains directly — we extract category hints from comments
65
+ # and common malicious service names for pattern matching
66
+ known_bad_services = [
67
+ # These are services commonly used for harmful content distribution
68
+ 'ransomware', 'phishing', 'malware', 'botnet', 'c2', 'exploit-kit',
69
+ 'cryptojacking', 'credential-theft', 'keylogger', 'rat-trojan'
70
+ ]
71
+ for svc in known_bad_services:
72
+ print(f'{svc}|cyberattack|{svc}')
73
+ " 2>/dev/null || true
74
+ }
75
+
76
+ fetch_community_reports() {
77
+ # Load patterns derived from community complaints
78
+ # community-reports.json is built by the 'complaint' command in bounty-board.sh
79
+ if [ -f "$COMMUNITY_REPORTS" ]; then
80
+ python3 -c "
81
+ import json, sys, re
82
+
83
+ with open(sys.argv[1]) as f:
84
+ reports = json.load(f)
85
+
86
+ # A commitment that gets >= THRESHOLD complaints gets its category patterns promoted
87
+ THRESHOLD = 3
88
+
89
+ category_counts = {}
90
+ for report in reports.get('reports', []):
91
+ cat = report.get('category', 'unknown')
92
+ category_counts[cat] = category_counts.get(cat, 0) + 1
93
+
94
+ # Report which categories are trending in complaints
95
+ for cat, count in category_counts.items():
96
+ if count >= THRESHOLD and cat != 'other':
97
+ print(f'{cat}|community_report|community({count} reports)')
98
+ " "$COMMUNITY_REPORTS" 2>/dev/null || true
99
+ fi
100
+ }
101
+
102
+ # ─── Merge rules ──────────────────────────────────────────────────────────────
103
+
104
+ python3 -c "
105
+ import json, sys, os, re
106
+ from datetime import datetime, timezone
107
+
108
+ safety_dir = sys.argv[1]
109
+ rules_file = sys.argv[2]
110
+ custom_file = sys.argv[3]
111
+ dry_run = sys.argv[4] == '--dry-run'
112
+
113
+ # Base rules — the hardcoded set from gateway.sh, kept as authoritative source
114
+ BASE_RULES = [
115
+ {'category': 'csam', 'pattern': r'\b(child|minor|underage|kid|teen)\b.*\b(sex|porn|nude|naked|exploit)\b', 'source': 'base'},
116
+ {'category': 'csam', 'pattern': r'\b(sex|porn|nude|naked|exploit)\b.*\b(child|minor|underage|kid|teen)\b', 'source': 'base'},
117
+ {'category': 'violence', 'pattern': r'\b(kill|assassinate|murder|execute)\b.*\b(person|people|someone|him|her|them|target)\b', 'source': 'base'},
118
+ {'category': 'violence', 'pattern': r'\b(hire|find|pay).*\b(hitman|killer|assassin)\b', 'source': 'base'},
119
+ {'category': 'violence', 'pattern': r'\bhit\s*man\b', 'source': 'base'},
120
+ {'category': 'weapons_of_mass_destruction', 'pattern': r'\b(synthe|build|make|create|assemble)\b.*\b(bomb|bioweapon|chemical weapon|nerve agent|sarin|anthrax|ricin|nuclear|dirty bomb|explosive device)\b', 'source': 'base'},
121
+ {'category': 'human_trafficking', 'pattern': r'\b(traffic|smuggle|exploit|enslave)\b.*\b(person|people|human|worker|organ|women|children)\b', 'source': 'base'},
122
+ {'category': 'terrorism', 'pattern': r'\b(fund|finance|recruit|plan|support)\b.*\b(terror|jihad|extremis|insurrection|attack on)\b', 'source': 'base'},
123
+ {'category': 'ncii', 'pattern': r'\b(deepfake|revenge porn|sextortion|non.?consensual)\b.*\b(nude|naked|intimate|image|video|photo)\b', 'source': 'base'},
124
+ {'category': 'financial_fraud', 'pattern': r'\b(launder|counterfeit|forge)\b.*\b(money|currency|documents|passport|identity)\b', 'source': 'base'},
125
+ {'category': 'financial_fraud', 'pattern': r'\b(evade|bypass|circumvent)\b.*\b(sanction|embargo|aml|kyc)\b', 'source': 'base'},
126
+ {'category': 'infrastructure_attack', 'pattern': r'\b(attack|hack|disrupt|destroy|sabotage)\b.*\b(power grid|water supply|hospital|election|pipeline|dam)\b', 'source': 'base'},
127
+ {'category': 'doxxing', 'pattern': r'\b(doxx|stalk|track|surveil|locate)\b.*\b(person|address|home|family|where .* live)\b', 'source': 'base'},
128
+ {'category': 'drug_manufacturing', 'pattern': r'\b(synthe|cook|manufacture|produce)\b.*\b(meth|fentanyl|heroin|cocaine|mdma|lsd)\b', 'source': 'base'},
129
+ ]
130
+
131
+ all_rules = list(BASE_RULES)
132
+
133
+ # Load operator custom rules
134
+ if os.path.exists(custom_file):
135
+ try:
136
+ with open(custom_file) as f:
137
+ custom = json.load(f)
138
+ for rule in custom.get('rules', []):
139
+ if 'category' in rule and 'pattern' in rule:
140
+ # Validate regex compiles
141
+ try:
142
+ re.compile(rule['pattern'])
143
+ rule['source'] = 'custom'
144
+ all_rules.append(rule)
145
+ except re.error as e:
146
+ print(f'WARNING: Skipping invalid custom regex: {e}', file=sys.stderr)
147
+ except (json.JSONDecodeError, KeyError) as e:
148
+ print(f'WARNING: Failed to load custom rules: {e}', file=sys.stderr)
149
+
150
+ # Read feed data from stdin (piped from fetchers)
151
+ feed_lines = []
152
+ for line in sys.stdin:
153
+ line = line.strip()
154
+ if not line or line.startswith('#'):
155
+ continue
156
+ parts = line.split('|', 2)
157
+ if len(parts) == 3:
158
+ feed_lines.append({
159
+ 'category': parts[0],
160
+ 'pattern': r'\b' + re.escape(parts[0]) + r'\b',
161
+ 'source': f'feed:{parts[2]}'
162
+ })
163
+
164
+ all_rules.extend(feed_lines)
165
+
166
+ # Deduplicate by pattern
167
+ seen = set()
168
+ deduped = []
169
+ for rule in all_rules:
170
+ if rule['pattern'] not in seen:
171
+ seen.add(rule['pattern'])
172
+ deduped.append(rule)
173
+
174
+ output = {
175
+ 'version': datetime.now(timezone.utc).isoformat(),
176
+ 'rule_count': len(deduped),
177
+ 'sources': list(set(r.get('source', 'unknown') for r in deduped)),
178
+ 'rules': deduped
179
+ }
180
+
181
+ if dry_run:
182
+ print(json.dumps(output, indent=2))
183
+ print(f'\n--- DRY RUN: {len(deduped)} rules would be written ---', file=sys.stderr)
184
+ else:
185
+ # Atomic write — write to temp then rename
186
+ tmp = rules_file + '.tmp'
187
+ with open(tmp, 'w') as f:
188
+ json.dump(output, f, indent=2)
189
+ os.rename(tmp, rules_file)
190
+ print(f'Updated {rules_file}: {len(deduped)} rules from {len(output[\"sources\"])} sources')
191
+ " "$SAFETY_DIR" "$RULES_FILE" "$CUSTOM_RULES" "$DRY_RUN" < <(
192
+ fetch_stamparm_keywords
193
+ fetch_community_reports
194
+ )