@wiimdy/openfunderse-agents 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +51 -0
- package/README.md +252 -0
- package/dist/clawbot-cli.d.ts +1 -0
- package/dist/clawbot-cli.js +114 -0
- package/dist/index.d.ts +10 -0
- package/dist/index.js +34 -0
- package/dist/lib/aa-client.d.ts +67 -0
- package/dist/lib/aa-client.js +353 -0
- package/dist/lib/relayer-client.d.ts +171 -0
- package/dist/lib/relayer-client.js +486 -0
- package/dist/lib/signer.d.ts +38 -0
- package/dist/lib/signer.js +103 -0
- package/dist/participant-cli.d.ts +1 -0
- package/dist/participant-cli.js +399 -0
- package/dist/reddit-mvp.d.ts +1 -0
- package/dist/reddit-mvp.js +546 -0
- package/dist/skills/participant/index.d.ts +116 -0
- package/dist/skills/participant/index.js +462 -0
- package/dist/skills/strategy/index.d.ts +117 -0
- package/dist/skills/strategy/index.js +879 -0
- package/dist/strategy-cli.d.ts +1 -0
- package/dist/strategy-cli.js +867 -0
- package/package.json +42 -0
|
@@ -0,0 +1,546 @@
|
|
|
1
|
+
import { createHash } from "node:crypto";
|
|
2
|
+
import { mkdir, readFile, writeFile } from "node:fs/promises";
|
|
3
|
+
import { basename, dirname, join, resolve } from "node:path";
|
|
4
|
+
import { encodeAbiParameters, getAddress, keccak256, parseAbiParameters } from "viem";
|
|
5
|
+
const UINT64_MAX = (1n << 64n) - 1n;
|
|
6
|
+
const DEFAULT_LIMIT = 25;
|
|
7
|
+
const DEFAULT_SUBREDDIT = "CryptoCurrency";
|
|
8
|
+
const DEFAULT_KEYWORDS = ["monad"];
|
|
9
|
+
const DEFAULT_CRAWLER = "0x1111111111111111111111111111111111111111";
|
|
10
|
+
const CLAIM_SCHEMA_ID = "REDDIT_KEYWORD_MINING_V1";
|
|
11
|
+
const CLAIM_SOURCE_TYPE = "REDDIT";
|
|
12
|
+
const CLAIM_EVIDENCE_TYPE = "RECrawlConsensus";
|
|
13
|
+
const DEFAULT_USER_AGENT = "openclaw-mvp-crawler/0.1 (+https://github.com/wiimdy/agent)";
|
|
14
|
+
function parseCli(argv) {
|
|
15
|
+
const [command, ...rest] = argv;
|
|
16
|
+
const options = new Map();
|
|
17
|
+
const flags = new Set();
|
|
18
|
+
const positionals = [];
|
|
19
|
+
for (let i = 0; i < rest.length; i += 1) {
|
|
20
|
+
const token = rest[i];
|
|
21
|
+
if (!token.startsWith("--")) {
|
|
22
|
+
positionals.push(token);
|
|
23
|
+
continue;
|
|
24
|
+
}
|
|
25
|
+
const option = token.slice(2);
|
|
26
|
+
if (option.includes("=")) {
|
|
27
|
+
const [key, ...valueParts] = option.split("=");
|
|
28
|
+
options.set(key, valueParts.join("="));
|
|
29
|
+
continue;
|
|
30
|
+
}
|
|
31
|
+
const next = rest[i + 1];
|
|
32
|
+
if (next && !next.startsWith("--")) {
|
|
33
|
+
options.set(option, next);
|
|
34
|
+
i += 1;
|
|
35
|
+
continue;
|
|
36
|
+
}
|
|
37
|
+
flags.add(option);
|
|
38
|
+
}
|
|
39
|
+
return {
|
|
40
|
+
command,
|
|
41
|
+
options,
|
|
42
|
+
flags,
|
|
43
|
+
positionals
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
function optionOrDefault(parsed, key, fallback) {
|
|
47
|
+
return parsed.options.get(key) ?? fallback;
|
|
48
|
+
}
|
|
49
|
+
function requiredOption(parsed, key) {
|
|
50
|
+
const value = parsed.options.get(key);
|
|
51
|
+
if (!value) {
|
|
52
|
+
throw new Error(`missing required option --${key}`);
|
|
53
|
+
}
|
|
54
|
+
return value;
|
|
55
|
+
}
|
|
56
|
+
function normalizeText(value) {
|
|
57
|
+
return value.normalize("NFC").trim();
|
|
58
|
+
}
|
|
59
|
+
function normalizeAddress(value) {
|
|
60
|
+
return getAddress(value);
|
|
61
|
+
}
|
|
62
|
+
function assertUint64(value, label) {
|
|
63
|
+
if (value < 0n || value > UINT64_MAX) {
|
|
64
|
+
throw new Error(`${label} must be uint64`);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
function canonicalClaim(input) {
|
|
68
|
+
return {
|
|
69
|
+
...input,
|
|
70
|
+
schemaId: normalizeText(input.schemaId),
|
|
71
|
+
sourceType: normalizeText(input.sourceType),
|
|
72
|
+
sourceRef: normalizeText(input.sourceRef),
|
|
73
|
+
selector: normalizeText(input.selector),
|
|
74
|
+
extracted: normalizeText(input.extracted),
|
|
75
|
+
extractedType: normalizeText(input.extractedType),
|
|
76
|
+
evidenceType: normalizeText(input.evidenceType),
|
|
77
|
+
evidenceURI: normalizeText(input.evidenceURI),
|
|
78
|
+
crawler: normalizeAddress(input.crawler),
|
|
79
|
+
notes: input.notes === undefined ? undefined : normalizeText(input.notes)
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
function claimHash(payload) {
|
|
83
|
+
const canonical = canonicalClaim(payload);
|
|
84
|
+
assertUint64(canonical.timestamp, "timestamp");
|
|
85
|
+
return keccak256(encodeAbiParameters(parseAbiParameters("string schemaId,string sourceType,string sourceRef,string selector,string extracted,string extractedType,uint64 timestamp,bytes32 responseHash,string evidenceType,string evidenceURI,address crawler,string notes"), [
|
|
86
|
+
canonical.schemaId,
|
|
87
|
+
canonical.sourceType,
|
|
88
|
+
canonical.sourceRef,
|
|
89
|
+
canonical.selector,
|
|
90
|
+
canonical.extracted,
|
|
91
|
+
canonical.extractedType,
|
|
92
|
+
canonical.timestamp,
|
|
93
|
+
canonical.responseHash,
|
|
94
|
+
canonical.evidenceType,
|
|
95
|
+
canonical.evidenceURI,
|
|
96
|
+
canonical.crawler,
|
|
97
|
+
canonical.notes ?? ""
|
|
98
|
+
]));
|
|
99
|
+
}
|
|
100
|
+
function isRecord(value) {
|
|
101
|
+
return typeof value === "object" && value !== null && !Array.isArray(value);
|
|
102
|
+
}
|
|
103
|
+
function toStableJsonValue(value) {
|
|
104
|
+
if (Array.isArray(value)) {
|
|
105
|
+
return value.map((item) => toStableJsonValue(item));
|
|
106
|
+
}
|
|
107
|
+
if (!isRecord(value)) {
|
|
108
|
+
return value;
|
|
109
|
+
}
|
|
110
|
+
const keys = Object.keys(value).sort((a, b) => a.localeCompare(b));
|
|
111
|
+
const output = {};
|
|
112
|
+
for (const key of keys) {
|
|
113
|
+
output[key] = toStableJsonValue(value[key]);
|
|
114
|
+
}
|
|
115
|
+
return output;
|
|
116
|
+
}
|
|
117
|
+
function stableStringify(value) {
|
|
118
|
+
return JSON.stringify(toStableJsonValue(value));
|
|
119
|
+
}
|
|
120
|
+
function sha256Hex(value) {
|
|
121
|
+
return `0x${createHash("sha256").update(value).digest("hex")}`;
|
|
122
|
+
}
|
|
123
|
+
function parseKeywords(raw) {
|
|
124
|
+
const deduped = new Set();
|
|
125
|
+
for (const chunk of raw.split(",")) {
|
|
126
|
+
const normalized = chunk.trim().toLowerCase();
|
|
127
|
+
if (normalized.length > 0) {
|
|
128
|
+
deduped.add(normalized);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
return [...deduped];
|
|
132
|
+
}
|
|
133
|
+
function toInteger(input) {
|
|
134
|
+
if (typeof input === "number" && Number.isFinite(input)) {
|
|
135
|
+
return Math.trunc(input);
|
|
136
|
+
}
|
|
137
|
+
if (typeof input === "string" && input.length > 0) {
|
|
138
|
+
const parsed = Number(input);
|
|
139
|
+
if (Number.isFinite(parsed)) {
|
|
140
|
+
return Math.trunc(parsed);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
return null;
|
|
144
|
+
}
|
|
145
|
+
function normalizePermalink(value) {
|
|
146
|
+
if (value.startsWith("http://") || value.startsWith("https://")) {
|
|
147
|
+
return value;
|
|
148
|
+
}
|
|
149
|
+
return `https://www.reddit.com${value}`;
|
|
150
|
+
}
|
|
151
|
+
function parsePostsFromListing(listingJson) {
|
|
152
|
+
if (!isRecord(listingJson)) {
|
|
153
|
+
throw new Error("reddit listing response is not an object");
|
|
154
|
+
}
|
|
155
|
+
const data = listingJson.data;
|
|
156
|
+
if (!isRecord(data) || !Array.isArray(data.children)) {
|
|
157
|
+
throw new Error("reddit listing response missing data.children");
|
|
158
|
+
}
|
|
159
|
+
const posts = [];
|
|
160
|
+
for (const child of data.children) {
|
|
161
|
+
if (!isRecord(child) || !isRecord(child.data)) {
|
|
162
|
+
continue;
|
|
163
|
+
}
|
|
164
|
+
const record = child.data;
|
|
165
|
+
const id = typeof record.id === "string" ? record.id : null;
|
|
166
|
+
const title = typeof record.title === "string" ? record.title : null;
|
|
167
|
+
const permalink = typeof record.permalink === "string" ? normalizePermalink(record.permalink) : null;
|
|
168
|
+
const createdUtc = toInteger(record.created_utc);
|
|
169
|
+
if (!id || !title || !permalink || createdUtc === null) {
|
|
170
|
+
continue;
|
|
171
|
+
}
|
|
172
|
+
posts.push({
|
|
173
|
+
id,
|
|
174
|
+
title,
|
|
175
|
+
permalink,
|
|
176
|
+
createdUtc
|
|
177
|
+
});
|
|
178
|
+
}
|
|
179
|
+
return posts;
|
|
180
|
+
}
|
|
181
|
+
function computeKeywordStats(posts, keywords) {
|
|
182
|
+
const keywordHits = {};
|
|
183
|
+
for (const keyword of keywords) {
|
|
184
|
+
keywordHits[keyword] = 0;
|
|
185
|
+
}
|
|
186
|
+
let postsWithKeyword = 0;
|
|
187
|
+
for (const post of posts) {
|
|
188
|
+
const loweredTitle = post.title.toLowerCase();
|
|
189
|
+
let hasAny = false;
|
|
190
|
+
for (const keyword of keywords) {
|
|
191
|
+
if (loweredTitle.includes(keyword)) {
|
|
192
|
+
keywordHits[keyword] += 1;
|
|
193
|
+
hasAny = true;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
if (hasAny) {
|
|
197
|
+
postsWithKeyword += 1;
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
let totalKeywordHits = 0;
|
|
201
|
+
for (const count of Object.values(keywordHits)) {
|
|
202
|
+
totalKeywordHits += count;
|
|
203
|
+
}
|
|
204
|
+
return {
|
|
205
|
+
keywordHits,
|
|
206
|
+
postsWithKeyword,
|
|
207
|
+
totalKeywordHits
|
|
208
|
+
};
|
|
209
|
+
}
|
|
210
|
+
async function fetchJson(url, userAgent) {
|
|
211
|
+
const response = await fetch(url, {
|
|
212
|
+
headers: {
|
|
213
|
+
"User-Agent": userAgent,
|
|
214
|
+
Accept: "application/json"
|
|
215
|
+
}
|
|
216
|
+
});
|
|
217
|
+
if (!response.ok) {
|
|
218
|
+
throw new Error(`request failed (${response.status}) for ${url}`);
|
|
219
|
+
}
|
|
220
|
+
return (await response.json());
|
|
221
|
+
}
|
|
222
|
+
function serializedClaimPayload(payload) {
|
|
223
|
+
return {
|
|
224
|
+
...payload,
|
|
225
|
+
timestamp: payload.timestamp.toString()
|
|
226
|
+
};
|
|
227
|
+
}
|
|
228
|
+
function deserializeClaimPayload(payload) {
|
|
229
|
+
return {
|
|
230
|
+
...payload,
|
|
231
|
+
timestamp: BigInt(payload.timestamp)
|
|
232
|
+
};
|
|
233
|
+
}
|
|
234
|
+
function stringifyPretty(value) {
|
|
235
|
+
return JSON.stringify(value, (_key, v) => (typeof v === "bigint" ? v.toString() : v), 2);
|
|
236
|
+
}
|
|
237
|
+
function fromClaimFileBaseName(claimFile) {
|
|
238
|
+
const name = basename(claimFile);
|
|
239
|
+
if (name.endsWith(".claim.json")) {
|
|
240
|
+
return name.slice(0, -".claim.json".length);
|
|
241
|
+
}
|
|
242
|
+
if (name.endsWith(".json")) {
|
|
243
|
+
return name.slice(0, -".json".length);
|
|
244
|
+
}
|
|
245
|
+
return name;
|
|
246
|
+
}
|
|
247
|
+
async function mineRedditClaim(params) {
|
|
248
|
+
const listingUrl = new URL(`https://www.reddit.com/r/${params.subreddit}/new.json`);
|
|
249
|
+
listingUrl.searchParams.set("limit", String(params.limit));
|
|
250
|
+
listingUrl.searchParams.set("raw_json", "1");
|
|
251
|
+
const listingJson = await fetchJson(listingUrl.toString(), params.userAgent);
|
|
252
|
+
const posts = parsePostsFromListing(listingJson).slice(0, params.limit);
|
|
253
|
+
if (posts.length === 0) {
|
|
254
|
+
throw new Error(`no posts found from subreddit r/${params.subreddit}`);
|
|
255
|
+
}
|
|
256
|
+
const sampledAt = Math.trunc(Date.now() / 1000);
|
|
257
|
+
const keywordStats = computeKeywordStats(posts, params.keywords);
|
|
258
|
+
const extraction = {
|
|
259
|
+
version: "REDDIT_KEYWORD_MINING_V1",
|
|
260
|
+
subreddit: params.subreddit,
|
|
261
|
+
listing: "new",
|
|
262
|
+
limit: params.limit,
|
|
263
|
+
keywords: params.keywords,
|
|
264
|
+
sampledAt,
|
|
265
|
+
sampleSize: posts.length,
|
|
266
|
+
posts,
|
|
267
|
+
...keywordStats
|
|
268
|
+
};
|
|
269
|
+
const canonicalEvidencePayload = {
|
|
270
|
+
subreddit: params.subreddit,
|
|
271
|
+
keywords: params.keywords,
|
|
272
|
+
listing: "new",
|
|
273
|
+
posts
|
|
274
|
+
};
|
|
275
|
+
const responseHash = sha256Hex(stableStringify(canonicalEvidencePayload));
|
|
276
|
+
const extracted = stableStringify(extraction);
|
|
277
|
+
const outDir = resolve(params.outDir);
|
|
278
|
+
await mkdir(outDir, { recursive: true });
|
|
279
|
+
const keywordSlug = params.keywords.join("-").replace(/[^a-z0-9-]/g, "");
|
|
280
|
+
const baseName = `${params.subreddit.toLowerCase()}-${keywordSlug || "keywords"}-${sampledAt}`;
|
|
281
|
+
const evidenceFile = join(outDir, `${baseName}.evidence.json`);
|
|
282
|
+
const claimFile = join(outDir, `${baseName}.claim.json`);
|
|
283
|
+
const claimPayload = {
|
|
284
|
+
schemaId: CLAIM_SCHEMA_ID,
|
|
285
|
+
sourceType: CLAIM_SOURCE_TYPE,
|
|
286
|
+
sourceRef: listingUrl.toString(),
|
|
287
|
+
selector: `listing=new;field=title;keywords=${params.keywords.join(",")};limit=${params.limit}`,
|
|
288
|
+
extracted,
|
|
289
|
+
extractedType: "application/json",
|
|
290
|
+
timestamp: BigInt(sampledAt),
|
|
291
|
+
responseHash,
|
|
292
|
+
evidenceType: CLAIM_EVIDENCE_TYPE,
|
|
293
|
+
evidenceURI: `file://${evidenceFile}`,
|
|
294
|
+
crawler: params.crawler,
|
|
295
|
+
notes: "responseHash=sha256(stable-json(listing:new + post[id,title,permalink,createdUtc]))"
|
|
296
|
+
};
|
|
297
|
+
const computedClaimHash = claimHash(claimPayload);
|
|
298
|
+
const evidenceDoc = {
|
|
299
|
+
version: "openclaw-reddit-evidence-v1",
|
|
300
|
+
listingUrl: listingUrl.toString(),
|
|
301
|
+
fetchedAt: new Date().toISOString(),
|
|
302
|
+
posts
|
|
303
|
+
};
|
|
304
|
+
await writeFile(evidenceFile, stringifyPretty(evidenceDoc));
|
|
305
|
+
const bundle = {
|
|
306
|
+
version: "openclaw-reddit-claim-bundle-v1",
|
|
307
|
+
createdAt: new Date().toISOString(),
|
|
308
|
+
source: {
|
|
309
|
+
provider: "reddit",
|
|
310
|
+
listingUrl: listingUrl.toString(),
|
|
311
|
+
subreddit: params.subreddit,
|
|
312
|
+
postIds: posts.map((post) => post.id)
|
|
313
|
+
},
|
|
314
|
+
mined: extraction,
|
|
315
|
+
claimPayload: serializedClaimPayload(claimPayload),
|
|
316
|
+
claimHash: computedClaimHash,
|
|
317
|
+
evidenceFile
|
|
318
|
+
};
|
|
319
|
+
await writeFile(claimFile, stringifyPretty(bundle));
|
|
320
|
+
return {
|
|
321
|
+
claimFile,
|
|
322
|
+
evidenceFile,
|
|
323
|
+
bundle
|
|
324
|
+
};
|
|
325
|
+
}
|
|
326
|
+
async function recrawlPostsByIds(postIds, userAgent) {
|
|
327
|
+
if (postIds.length === 0) {
|
|
328
|
+
return [];
|
|
329
|
+
}
|
|
330
|
+
const byIdParam = postIds.map((id) => `t3_${id}`).join(",");
|
|
331
|
+
const url = new URL(`https://www.reddit.com/by_id/${byIdParam}.json`);
|
|
332
|
+
url.searchParams.set("raw_json", "1");
|
|
333
|
+
const byIdJson = await fetchJson(url.toString(), userAgent);
|
|
334
|
+
const fetchedPosts = parsePostsFromListing(byIdJson);
|
|
335
|
+
const byId = new Map();
|
|
336
|
+
for (const post of fetchedPosts) {
|
|
337
|
+
byId.set(post.id, post);
|
|
338
|
+
}
|
|
339
|
+
const ordered = [];
|
|
340
|
+
for (const id of postIds) {
|
|
341
|
+
const post = byId.get(id);
|
|
342
|
+
if (!post) {
|
|
343
|
+
throw new Error(`by_id recrawl missing post id=${id}`);
|
|
344
|
+
}
|
|
345
|
+
ordered.push(post);
|
|
346
|
+
}
|
|
347
|
+
return ordered;
|
|
348
|
+
}
|
|
349
|
+
function parseStoredClaimBundle(raw) {
|
|
350
|
+
const parsed = JSON.parse(raw);
|
|
351
|
+
if (!parsed || parsed.version !== "openclaw-reddit-claim-bundle-v1") {
|
|
352
|
+
throw new Error("unsupported claim bundle format");
|
|
353
|
+
}
|
|
354
|
+
if (!parsed.claimPayload || typeof parsed.claimPayload.timestamp !== "string") {
|
|
355
|
+
throw new Error("claim bundle missing claimPayload.timestamp");
|
|
356
|
+
}
|
|
357
|
+
if (!parsed.source || !Array.isArray(parsed.source.postIds)) {
|
|
358
|
+
throw new Error("claim bundle missing source.postIds");
|
|
359
|
+
}
|
|
360
|
+
return parsed;
|
|
361
|
+
}
|
|
362
|
+
function objectsEqual(a, b) {
|
|
363
|
+
return stableStringify(a) === stableStringify(b);
|
|
364
|
+
}
|
|
365
|
+
async function verifyClaimBundle(params) {
|
|
366
|
+
const claimFile = resolve(params.claimFile);
|
|
367
|
+
const raw = await readFile(claimFile, "utf8");
|
|
368
|
+
const stored = parseStoredClaimBundle(raw);
|
|
369
|
+
const payload = deserializeClaimPayload(stored.claimPayload);
|
|
370
|
+
const mismatches = [];
|
|
371
|
+
const recomputedClaimHash = claimHash(payload);
|
|
372
|
+
if (recomputedClaimHash.toLowerCase() !== stored.claimHash.toLowerCase()) {
|
|
373
|
+
mismatches.push("claimHash mismatch against claim payload");
|
|
374
|
+
}
|
|
375
|
+
const recrawledPosts = await recrawlPostsByIds(stored.source.postIds, params.userAgent);
|
|
376
|
+
const recrawledStats = computeKeywordStats(recrawledPosts, stored.mined.keywords);
|
|
377
|
+
const recrawledExtraction = {
|
|
378
|
+
version: "REDDIT_KEYWORD_MINING_V1",
|
|
379
|
+
subreddit: stored.mined.subreddit,
|
|
380
|
+
listing: "new",
|
|
381
|
+
limit: stored.mined.limit,
|
|
382
|
+
keywords: stored.mined.keywords,
|
|
383
|
+
sampledAt: stored.mined.sampledAt,
|
|
384
|
+
sampleSize: recrawledPosts.length,
|
|
385
|
+
posts: recrawledPosts,
|
|
386
|
+
...recrawledStats
|
|
387
|
+
};
|
|
388
|
+
const recrawledResponseHash = sha256Hex(stableStringify({
|
|
389
|
+
subreddit: stored.mined.subreddit,
|
|
390
|
+
keywords: stored.mined.keywords,
|
|
391
|
+
listing: "new",
|
|
392
|
+
posts: recrawledPosts
|
|
393
|
+
}));
|
|
394
|
+
if (payload.responseHash.toLowerCase() !== recrawledResponseHash.toLowerCase()) {
|
|
395
|
+
mismatches.push("responseHash mismatch after recrawl");
|
|
396
|
+
}
|
|
397
|
+
if (!objectsEqual(stored.mined.posts, recrawledPosts)) {
|
|
398
|
+
mismatches.push("post sample mismatch (title/permalink/createdUtc changed)");
|
|
399
|
+
}
|
|
400
|
+
if (!objectsEqual(stored.mined.keywordHits, recrawledStats.keywordHits)) {
|
|
401
|
+
mismatches.push("keywordHits mismatch");
|
|
402
|
+
}
|
|
403
|
+
if (stored.mined.postsWithKeyword !== recrawledStats.postsWithKeyword) {
|
|
404
|
+
mismatches.push("postsWithKeyword mismatch");
|
|
405
|
+
}
|
|
406
|
+
if (stored.mined.totalKeywordHits !== recrawledStats.totalKeywordHits) {
|
|
407
|
+
mismatches.push("totalKeywordHits mismatch");
|
|
408
|
+
}
|
|
409
|
+
const extractedFromRecrawl = stableStringify(recrawledExtraction);
|
|
410
|
+
if (payload.extracted !== extractedFromRecrawl) {
|
|
411
|
+
mismatches.push("extracted payload mismatch after recrawl");
|
|
412
|
+
}
|
|
413
|
+
return {
|
|
414
|
+
ok: mismatches.length === 0,
|
|
415
|
+
checkedAt: new Date().toISOString(),
|
|
416
|
+
claimFile,
|
|
417
|
+
claimHashExpected: stored.claimHash,
|
|
418
|
+
claimHashComputed: recomputedClaimHash,
|
|
419
|
+
responseHashExpected: payload.responseHash,
|
|
420
|
+
responseHashComputed: recrawledResponseHash,
|
|
421
|
+
mismatches,
|
|
422
|
+
recrawled: {
|
|
423
|
+
sampleSize: recrawledPosts.length,
|
|
424
|
+
postsWithKeyword: recrawledStats.postsWithKeyword,
|
|
425
|
+
totalKeywordHits: recrawledStats.totalKeywordHits,
|
|
426
|
+
keywordHits: recrawledStats.keywordHits
|
|
427
|
+
}
|
|
428
|
+
};
|
|
429
|
+
}
|
|
430
|
+
function printUsage() {
|
|
431
|
+
console.log(`
|
|
432
|
+
[agents] Reddit MVP crawler/verifier
|
|
433
|
+
|
|
434
|
+
commands:
|
|
435
|
+
crawl-reddit
|
|
436
|
+
--subreddit <name> (default: ${DEFAULT_SUBREDDIT})
|
|
437
|
+
--keywords <csv> (default: ${DEFAULT_KEYWORDS.join(",")})
|
|
438
|
+
--limit <n> (default: ${DEFAULT_LIMIT})
|
|
439
|
+
--out-dir <path> (default: ./data/claims)
|
|
440
|
+
--crawler <0xaddress> (default: env CRAWLER_ADDRESS or ${DEFAULT_CRAWLER})
|
|
441
|
+
--user-agent <string> (default: env REDDIT_USER_AGENT)
|
|
442
|
+
|
|
443
|
+
verify-reddit-claim
|
|
444
|
+
--claim <file>
|
|
445
|
+
--user-agent <string> (default: env REDDIT_USER_AGENT)
|
|
446
|
+
|
|
447
|
+
mvp-reddit-flow
|
|
448
|
+
crawl-reddit -> verify-reddit-claim in one command
|
|
449
|
+
`);
|
|
450
|
+
}
|
|
451
|
+
async function runCrawlCommand(parsed, verifyAfter) {
|
|
452
|
+
const subreddit = optionOrDefault(parsed, "subreddit", DEFAULT_SUBREDDIT).trim();
|
|
453
|
+
if (subreddit.length === 0) {
|
|
454
|
+
throw new Error("subreddit cannot be empty");
|
|
455
|
+
}
|
|
456
|
+
const keywords = parseKeywords(optionOrDefault(parsed, "keywords", DEFAULT_KEYWORDS.join(",")));
|
|
457
|
+
if (keywords.length === 0) {
|
|
458
|
+
throw new Error("at least one keyword is required");
|
|
459
|
+
}
|
|
460
|
+
const limitRaw = optionOrDefault(parsed, "limit", String(DEFAULT_LIMIT));
|
|
461
|
+
const limit = Number.parseInt(limitRaw, 10);
|
|
462
|
+
if (!Number.isFinite(limit) || limit < 1 || limit > 100) {
|
|
463
|
+
throw new Error("--limit must be an integer between 1 and 100");
|
|
464
|
+
}
|
|
465
|
+
const outDir = optionOrDefault(parsed, "out-dir", resolve(process.cwd(), "data", "claims"));
|
|
466
|
+
const crawler = optionOrDefault(parsed, "crawler", process.env.CRAWLER_ADDRESS ?? DEFAULT_CRAWLER);
|
|
467
|
+
const userAgent = optionOrDefault(parsed, "user-agent", process.env.REDDIT_USER_AGENT ?? DEFAULT_USER_AGENT);
|
|
468
|
+
const mined = await mineRedditClaim({
|
|
469
|
+
subreddit,
|
|
470
|
+
keywords,
|
|
471
|
+
limit,
|
|
472
|
+
crawler: crawler,
|
|
473
|
+
outDir,
|
|
474
|
+
userAgent
|
|
475
|
+
});
|
|
476
|
+
console.log("[agents] crawl-reddit complete");
|
|
477
|
+
console.log(stringifyPretty({
|
|
478
|
+
claimFile: mined.claimFile,
|
|
479
|
+
evidenceFile: mined.evidenceFile,
|
|
480
|
+
claimHash: mined.bundle.claimHash,
|
|
481
|
+
sourceRef: mined.bundle.claimPayload.sourceRef,
|
|
482
|
+
sampleSize: mined.bundle.mined.sampleSize,
|
|
483
|
+
keywordHits: mined.bundle.mined.keywordHits
|
|
484
|
+
}));
|
|
485
|
+
if (!verifyAfter) {
|
|
486
|
+
return;
|
|
487
|
+
}
|
|
488
|
+
const verification = await verifyClaimBundle({
|
|
489
|
+
claimFile: mined.claimFile,
|
|
490
|
+
userAgent
|
|
491
|
+
});
|
|
492
|
+
const baseName = fromClaimFileBaseName(mined.claimFile);
|
|
493
|
+
const verificationFile = join(dirname(mined.claimFile), `${baseName}.verification.json`);
|
|
494
|
+
await writeFile(verificationFile, stringifyPretty(verification));
|
|
495
|
+
console.log("[agents] mvp-reddit-flow verification complete");
|
|
496
|
+
console.log(stringifyPretty({
|
|
497
|
+
verificationFile,
|
|
498
|
+
ok: verification.ok,
|
|
499
|
+
mismatches: verification.mismatches
|
|
500
|
+
}));
|
|
501
|
+
if (!verification.ok) {
|
|
502
|
+
process.exitCode = 2;
|
|
503
|
+
}
|
|
504
|
+
}
|
|
505
|
+
async function runVerifyCommand(parsed) {
|
|
506
|
+
const claimFile = requiredOption(parsed, "claim");
|
|
507
|
+
const userAgent = optionOrDefault(parsed, "user-agent", process.env.REDDIT_USER_AGENT ?? DEFAULT_USER_AGENT);
|
|
508
|
+
const verification = await verifyClaimBundle({
|
|
509
|
+
claimFile,
|
|
510
|
+
userAgent
|
|
511
|
+
});
|
|
512
|
+
const absoluteClaimFile = resolve(claimFile);
|
|
513
|
+
const baseName = fromClaimFileBaseName(absoluteClaimFile);
|
|
514
|
+
const verificationFile = join(dirname(absoluteClaimFile), `${baseName}.verification.json`);
|
|
515
|
+
await writeFile(verificationFile, stringifyPretty(verification));
|
|
516
|
+
console.log("[agents] verify-reddit-claim complete");
|
|
517
|
+
console.log(stringifyPretty({
|
|
518
|
+
verificationFile,
|
|
519
|
+
ok: verification.ok,
|
|
520
|
+
mismatches: verification.mismatches
|
|
521
|
+
}));
|
|
522
|
+
if (!verification.ok) {
|
|
523
|
+
process.exitCode = 2;
|
|
524
|
+
}
|
|
525
|
+
}
|
|
526
|
+
export async function runRedditMvpCli(argv) {
|
|
527
|
+
const parsed = parseCli(argv);
|
|
528
|
+
const command = parsed.command ?? "";
|
|
529
|
+
if (command.length === 0 || command === "help" || parsed.flags.has("help")) {
|
|
530
|
+
printUsage();
|
|
531
|
+
return;
|
|
532
|
+
}
|
|
533
|
+
if (command === "crawl-reddit") {
|
|
534
|
+
await runCrawlCommand(parsed, false);
|
|
535
|
+
return;
|
|
536
|
+
}
|
|
537
|
+
if (command === "verify-reddit-claim") {
|
|
538
|
+
await runVerifyCommand(parsed);
|
|
539
|
+
return;
|
|
540
|
+
}
|
|
541
|
+
if (command === "mvp-reddit-flow") {
|
|
542
|
+
await runCrawlCommand(parsed, true);
|
|
543
|
+
return;
|
|
544
|
+
}
|
|
545
|
+
throw new Error(`unknown command: ${command}`);
|
|
546
|
+
}
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import { type Address, type ClaimPayload } from '@claw/protocol-sdk';
|
|
2
|
+
import { type RelayerClientOptions } from '../../lib/relayer-client.js';
|
|
3
|
+
import { type BotSignerOptions } from '../../lib/signer.js';
|
|
4
|
+
export interface MineClaimInput {
|
|
5
|
+
taskType: 'mine_claim';
|
|
6
|
+
fundId: string;
|
|
7
|
+
roomId: string;
|
|
8
|
+
epochId: number;
|
|
9
|
+
sourceSpec: {
|
|
10
|
+
sourceSpecId: string;
|
|
11
|
+
sourceRef: string;
|
|
12
|
+
extractor: Record<string, unknown>;
|
|
13
|
+
freshnessSeconds: number;
|
|
14
|
+
allowHosts?: string[];
|
|
15
|
+
};
|
|
16
|
+
tokenContext: {
|
|
17
|
+
symbol: string;
|
|
18
|
+
address: string;
|
|
19
|
+
};
|
|
20
|
+
crawlerAddress?: Address;
|
|
21
|
+
maxResponseBytes?: number;
|
|
22
|
+
}
|
|
23
|
+
export interface SerializedClaimPayload extends Omit<ClaimPayload, 'timestamp'> {
|
|
24
|
+
timestamp: string;
|
|
25
|
+
}
|
|
26
|
+
export interface MineClaimObservation {
|
|
27
|
+
claimHash: string;
|
|
28
|
+
sourceSpecId: string;
|
|
29
|
+
token: string;
|
|
30
|
+
timestamp: number;
|
|
31
|
+
extracted: string;
|
|
32
|
+
responseHash: string;
|
|
33
|
+
evidenceURI: string;
|
|
34
|
+
crawler: string;
|
|
35
|
+
canonicalPayload: SerializedClaimPayload;
|
|
36
|
+
}
|
|
37
|
+
export interface MineClaimOutput {
|
|
38
|
+
status: 'OK' | 'ERROR';
|
|
39
|
+
taskType: 'mine_claim';
|
|
40
|
+
fundId: string;
|
|
41
|
+
epochId: number;
|
|
42
|
+
observation?: MineClaimObservation;
|
|
43
|
+
confidence: number;
|
|
44
|
+
assumptions: string[];
|
|
45
|
+
reasonCode?: string;
|
|
46
|
+
error?: string;
|
|
47
|
+
}
|
|
48
|
+
export interface VerifyClaimInput {
|
|
49
|
+
taskType: 'verify_claim_or_intent_validity';
|
|
50
|
+
fundId: string;
|
|
51
|
+
roomId: string;
|
|
52
|
+
epochId: number;
|
|
53
|
+
subjectType: 'CLAIM' | 'INTENT';
|
|
54
|
+
subjectHash: string;
|
|
55
|
+
subjectPayload: Record<string, unknown>;
|
|
56
|
+
validationPolicy: {
|
|
57
|
+
reproducible: boolean;
|
|
58
|
+
maxDataAgeSeconds: number;
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
export interface VerifyClaimOutput {
|
|
62
|
+
status: 'OK' | 'ERROR';
|
|
63
|
+
taskType: 'verify_claim_or_intent_validity';
|
|
64
|
+
fundId: string;
|
|
65
|
+
roomId: string;
|
|
66
|
+
epochId: number;
|
|
67
|
+
subjectType: 'CLAIM' | 'INTENT';
|
|
68
|
+
subjectHash: string;
|
|
69
|
+
verdict: 'PASS' | 'FAIL' | 'NEED_MORE_EVIDENCE';
|
|
70
|
+
reason: string;
|
|
71
|
+
reasonCode: 'OK' | 'MISSING_FIELDS' | 'INVALID_SCOPE' | 'STALE_DATA' | 'REPRODUCTION_FAILED' | 'HASH_MISMATCH';
|
|
72
|
+
attestationDraft?: {
|
|
73
|
+
validator: string;
|
|
74
|
+
expiresAt: number;
|
|
75
|
+
nonce: string;
|
|
76
|
+
};
|
|
77
|
+
confidence: number;
|
|
78
|
+
assumptions: string[];
|
|
79
|
+
error?: string;
|
|
80
|
+
}
|
|
81
|
+
export interface SubmitMinedClaimInput {
|
|
82
|
+
fundId: string;
|
|
83
|
+
epochId: number;
|
|
84
|
+
observation: MineClaimObservation;
|
|
85
|
+
clientOptions?: RelayerClientOptions;
|
|
86
|
+
}
|
|
87
|
+
export interface SubmitMinedClaimOutput {
|
|
88
|
+
status: 'OK' | 'ERROR';
|
|
89
|
+
fundId: string;
|
|
90
|
+
epochId: number;
|
|
91
|
+
claimHash?: string;
|
|
92
|
+
response?: Record<string, unknown>;
|
|
93
|
+
reasonCode?: 'OK' | 'CLAIM_HASH_MISMATCH' | 'RELAYER_REJECTED' | 'NETWORK_ERROR';
|
|
94
|
+
error?: string;
|
|
95
|
+
}
|
|
96
|
+
export interface AttestClaimInput {
|
|
97
|
+
fundId: string;
|
|
98
|
+
claimHash: `0x${string}`;
|
|
99
|
+
epochId: number;
|
|
100
|
+
expiresInSeconds?: number;
|
|
101
|
+
nonce?: bigint | number | string;
|
|
102
|
+
clientOptions?: RelayerClientOptions;
|
|
103
|
+
signerOptions?: BotSignerOptions;
|
|
104
|
+
}
|
|
105
|
+
export interface AttestClaimOutput {
|
|
106
|
+
status: 'OK' | 'ERROR';
|
|
107
|
+
fundId: string;
|
|
108
|
+
claimHash: `0x${string}`;
|
|
109
|
+
response?: Record<string, unknown>;
|
|
110
|
+
reasonCode?: 'OK' | 'ATTESTATION_DOMAIN_MISMATCH' | 'RELAYER_REJECTED' | 'NETWORK_ERROR';
|
|
111
|
+
error?: string;
|
|
112
|
+
}
|
|
113
|
+
export declare function mineClaim(input: MineClaimInput): Promise<MineClaimOutput>;
|
|
114
|
+
export declare function verifyClaim(input: VerifyClaimInput): Promise<VerifyClaimOutput>;
|
|
115
|
+
export declare function submitMinedClaim(input: SubmitMinedClaimInput): Promise<SubmitMinedClaimOutput>;
|
|
116
|
+
export declare function attestClaim(input: AttestClaimInput): Promise<AttestClaimOutput>;
|