2020117-agent 0.2.4 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # 2020117-agent
2
2
 
3
- Decentralized AI agent runtime for the [2020117](https://2020117.xyz) network. Connects your agent to the DVM compute marketplace via API polling + P2P Hyperswarm, with Lightning/Cashu micro-payments.
3
+ Decentralized AI agent runtime for the [2020117](https://2020117.xyz) network. Connects your agent to the DVM compute marketplace via API polling + P2P Hyperswarm, with CLINK Lightning payments.
4
4
 
5
5
  ## Quick Start
6
6
 
@@ -14,8 +14,8 @@ npx 2020117-agent --kind=5302 --processor=exec:./translate.sh
14
14
  # Run as provider (HTTP backend)
15
15
  npx 2020117-agent --kind=5200 --processor=http://localhost:7860 --models=sdxl-lightning,sd3.5-turbo
16
16
 
17
- # P2P streaming customer
18
- npx 2020117-customer --kind=5100 --budget=50 "Explain quantum computing"
17
+ # P2P session — rent an agent by the minute
18
+ npx 2020117-session --kind=5200 --budget=500 --port=8080
19
19
  ```
20
20
 
21
21
  ## Setup
@@ -47,10 +47,8 @@ npx 2020117-agent --agent=my-agent --kind=5100
47
47
 
48
48
  | Command | Description |
49
49
  |---------|-------------|
50
- | `2020117-agent` | Unified agent (API polling + P2P listening) |
51
- | `2020117-customer` | P2P streaming customer |
52
- | `2020117-provider` | P2P-only provider |
53
- | `2020117-pipeline` | Multi-step pipeline agent |
50
+ | `2020117-agent` | Unified agent (API polling + P2P session listening) |
51
+ | `2020117-session` | P2P session client (CLI REPL + HTTP proxy) |
54
52
 
55
53
  ## CLI Parameters
56
54
 
@@ -64,10 +62,11 @@ npx 2020117-agent --agent=my-agent --kind=5100
64
62
  | `--max-jobs` | `MAX_JOBS` | Max concurrent jobs (default: 3) |
65
63
  | `--api-key` | `API_2020117_KEY` | API key (overrides `.2020117_keys`) |
66
64
  | `--api-url` | `API_2020117_URL` | API base URL |
67
- | `--sub-kind` | `SUB_KIND` | Sub-task kind (enables pipeline) |
68
- | `--sub-channel` | `SUB_CHANNEL` | Sub-task channel: `p2p` or `api` |
69
- | `--budget` | `SUB_BUDGET` | P2P sub-task budget in sats |
65
+ | `--sub-kind` | `SUB_KIND` | Sub-task kind (enables pipeline via API) |
70
66
  | `--skill` | `SKILL_FILE` | Path to skill JSON file describing agent capabilities |
67
+ | `--port` | `SESSION_PORT` | Session HTTP proxy port (default: 8080) |
68
+ | `--provider` | `PROVIDER_PUBKEY` | Target provider public key |
69
+ | `--lightning-address` | `LIGHTNING_ADDRESS` | Provider's Lightning Address (auto-fetched from platform if not set) |
71
70
 
72
71
  Environment variables also work: `AGENT=my-agent DVM_KIND=5100 2020117-agent`
73
72
 
@@ -85,7 +84,7 @@ Environment variables also work: `AGENT=my-agent DVM_KIND=5100 2020117-agent`
85
84
  ```js
86
85
  import { createProcessor } from '2020117-agent/processor'
87
86
  import { SwarmNode } from '2020117-agent/swarm'
88
- import { mintTokens } from '2020117-agent/cashu'
87
+ import { collectPayment } from '2020117-agent/clink'
89
88
  import { hasApiKey, registerService } from '2020117-agent/api'
90
89
  ```
91
90
 
@@ -99,14 +98,14 @@ import { hasApiKey, registerService } from '2020117-agent/api'
99
98
  (heartbeat, │ (inbox → accept → │
100
99
  inbox, result) │ process → result) │
101
100
  │ │
102
- Hyperswarm DHT ◄──┤ P2P Listener │──► Cashu Payments
103
- (encrypted TCP) │ (offerchunks → │ (mint/split/claim)
104
- │ result)
101
+ Hyperswarm DHT ◄──┤ P2P Sessions │──► CLINK Payments
102
+ (encrypted TCP) │ (sessionHTTP │ (ndebit via Lightning)
103
+ tunnel → result)
105
104
  └─────────────────────┘
106
105
  ```
107
106
 
108
107
  - **API channel**: Polls platform inbox, accepts jobs, submits results. Lightning payments on completion.
109
- - **P2P channel**: Listens on Hyperswarm DHT topic `SHA256("2020117-dvm-kind-{kind}")`. Cashu micro-payments per chunk.
108
+ - **P2P channel**: Listens on Hyperswarm DHT topic `SHA256("2020117-dvm-kind-{kind}")`. Interactive sessions with CLINK per-minute billing.
110
109
  - Both channels share a single capacity counter — the agent never overloads.
111
110
 
112
111
  ## Development
package/dist/agent.d.ts CHANGED
@@ -1,8 +1,8 @@
1
1
  #!/usr/bin/env node
2
2
  /**
3
- * Unified Agent Runtime — runs as a long-lived daemon that handles both:
4
- * 1. Async platform tasks (inbox polling → accept → Ollama → submit result)
5
- * 2. Real-time P2P streaming (Hyperswarm + CLINK debit payments)
3
+ * Unified Agent Runtime — runs as a long-lived daemon that handles:
4
+ * 1. Async platform tasks (inbox polling → accept → process → submit result)
5
+ * 2. P2P sessions (Hyperswarm + CLINK per-minute billing)
6
6
  *
7
7
  * Both channels share a single capacity counter so the agent never overloads.
8
8
  *
package/dist/agent.js CHANGED
@@ -1,8 +1,8 @@
1
1
  #!/usr/bin/env node
2
2
  /**
3
- * Unified Agent Runtime — runs as a long-lived daemon that handles both:
4
- * 1. Async platform tasks (inbox polling → accept → Ollama → submit result)
5
- * 2. Real-time P2P streaming (Hyperswarm + CLINK debit payments)
3
+ * Unified Agent Runtime — runs as a long-lived daemon that handles:
4
+ * 1. Async platform tasks (inbox polling → accept → process → submit result)
5
+ * 2. P2P sessions (Hyperswarm + CLINK per-minute billing)
6
6
  *
7
7
  * Both channels share a single capacity counter so the agent never overloads.
8
8
  *
@@ -42,18 +42,12 @@ for (const arg of process.argv.slice(2)) {
42
42
  case '--sub-kind':
43
43
  process.env.SUB_KIND = val;
44
44
  break;
45
- case '--sub-channel':
46
- process.env.SUB_CHANNEL = val;
47
- break;
48
45
  case '--sub-provider':
49
46
  process.env.SUB_PROVIDER = val;
50
47
  break;
51
48
  case '--sub-bid':
52
49
  process.env.SUB_BID = val;
53
50
  break;
54
- case '--budget':
55
- process.env.SUB_BUDGET = val;
56
- break;
57
51
  case '--api-key':
58
52
  process.env.API_2020117_KEY = val;
59
53
  break;
@@ -73,8 +67,6 @@ for (const arg of process.argv.slice(2)) {
73
67
  }
74
68
  import { randomBytes } from 'crypto';
75
69
  import { SwarmNode, topicFromKind } from './swarm.js';
76
- import { collectP2PPayment, handleStop, streamToCustomer } from './p2p-provider.js';
77
- import { streamFromProvider } from './p2p-customer.js';
78
70
  import { createProcessor } from './processor.js';
79
71
  import { hasApiKey, loadAgentName, registerService, startHeartbeatLoop, getInbox, acceptJob, sendFeedback, submitResult, createJob, getJob, getProfile, } from './api.js';
80
72
  import { initClinkAgent, collectPayment } from './clink.js';
@@ -86,18 +78,13 @@ const MAX_CONCURRENT = Number(process.env.MAX_JOBS) || 3;
86
78
  const POLL_INTERVAL = Number(process.env.POLL_INTERVAL) || 30_000;
87
79
  const SATS_PER_CHUNK = Number(process.env.SATS_PER_CHUNK) || 1;
88
80
  const CHUNKS_PER_PAYMENT = Number(process.env.CHUNKS_PER_PAYMENT) || 10;
89
- const PAYMENT_TIMEOUT = Number(process.env.PAYMENT_TIMEOUT) || 30_000;
90
81
  // --- CLINK payment config ---
91
82
  let LIGHTNING_ADDRESS = process.env.LIGHTNING_ADDRESS || '';
92
83
  // --- Sub-task delegation config ---
93
84
  const SUB_KIND = process.env.SUB_KIND ? Number(process.env.SUB_KIND) : null;
94
- const SUB_BUDGET = Number(process.env.SUB_BUDGET) || 50;
95
- const SUB_CHANNEL = process.env.SUB_CHANNEL || 'p2p';
96
85
  const SUB_PROVIDER = process.env.SUB_PROVIDER || undefined;
97
86
  const SUB_BID = Number(process.env.SUB_BID) || 100;
98
- const MAX_SATS_PER_CHUNK = Number(process.env.MAX_SATS_PER_CHUNK) || 5;
99
87
  const MIN_BID_SATS = Number(process.env.MIN_BID_SATS) || SATS_PER_CHUNK * CHUNKS_PER_PAYMENT; // default = pricing per job
100
- const SUB_BATCH_SIZE = Number(process.env.SUB_BATCH_SIZE) || 500; // chars to accumulate before local processing
101
88
  // --- Skill file loading ---
102
89
  function loadSkill() {
103
90
  const skillPath = process.env.SKILL_FILE;
@@ -153,7 +140,7 @@ async function main() {
153
140
  state.processor = await createProcessor();
154
141
  console.log(`[${label}] kind=${KIND} processor=${state.processor.name} maxJobs=${MAX_CONCURRENT}`);
155
142
  if (SUB_KIND) {
156
- console.log(`[${label}] Pipeline: sub-task kind=${SUB_KIND} via ${SUB_CHANNEL}${SUB_CHANNEL === 'api' ? ` (bid=${SUB_BID}${SUB_PROVIDER ? `, provider=${SUB_PROVIDER}` : ''})` : ` (budget=${SUB_BUDGET} sats)`}`);
143
+ console.log(`[${label}] Pipeline: sub-task kind=${SUB_KIND} (bid=${SUB_BID}${SUB_PROVIDER ? `, provider=${SUB_PROVIDER}` : ''})`);
157
144
  }
158
145
  else if (state.processor.name === 'none') {
159
146
  console.warn(`[${label}] WARNING: processor=none without SUB_KIND — generate() will pass through input as-is`);
@@ -266,23 +253,13 @@ async function processAsyncJob(label, inboxJobId, input, params) {
266
253
  console.log(`[${label}] Job ${providerJobId}: processing "${input.slice(0, 60)}..."`);
267
254
  await sendFeedback(providerJobId, 'processing');
268
255
  let result;
269
- // Pipeline: delegate sub-task then process locally
256
+ // Pipeline: delegate sub-task via API then process locally
270
257
  if (SUB_KIND) {
271
- console.log(`[${label}] Job ${providerJobId}: delegating to kind ${SUB_KIND} via ${SUB_CHANNEL}...`);
258
+ console.log(`[${label}] Job ${providerJobId}: delegating to kind ${SUB_KIND}...`);
272
259
  try {
273
- if (SUB_CHANNEL === 'api') {
274
- // API delegation is non-streaming collect full result, then process
275
- const subResult = await delegateAPI(SUB_KIND, input, SUB_BID, SUB_PROVIDER);
276
- console.log(`[${label}] Job ${providerJobId}: sub-task returned ${subResult.length} chars`);
277
- result = await state.processor.generate({ input: subResult, params });
278
- }
279
- else {
280
- // P2P delegation — stream-collect from sub-provider, batch-translate
281
- result = '';
282
- for await (const chunk of pipelineStream(SUB_KIND, input, SUB_BUDGET)) {
283
- result += chunk;
284
- }
285
- }
260
+ const subResult = await delegateAPI(SUB_KIND, input, SUB_BID, SUB_PROVIDER);
261
+ console.log(`[${label}] Job ${providerJobId}: sub-task returned ${subResult.length} chars`);
262
+ result = await state.processor.generate({ input: subResult, params });
286
263
  }
287
264
  catch (e) {
288
265
  console.error(`[${label}] Job ${providerJobId}: sub-task failed: ${e.message}, using original input`);
@@ -307,50 +284,6 @@ async function processAsyncJob(label, inboxJobId, input, params) {
307
284
  }
308
285
  }
309
286
  // --- Sub-task delegation ---
310
- /**
311
- * Delegate a sub-task via Hyperswarm P2P with CLINK debit payments.
312
- * Thin wrapper around the shared streamFromProvider() module.
313
- */
314
- async function* delegateP2PStream(kind, input, budgetSats) {
315
- const ndebit = process.env.CLINK_NDEBIT || '';
316
- if (!ndebit)
317
- throw new Error('Pipeline sub-delegation requires CLINK_NDEBIT env var (--ndebit)');
318
- yield* streamFromProvider({
319
- kind,
320
- input,
321
- budgetSats,
322
- ndebit,
323
- maxSatsPerChunk: MAX_SATS_PER_CHUNK,
324
- label: 'sub-p2p',
325
- });
326
- }
327
- /**
328
- * Streaming pipeline: delegates to a sub-provider via P2P, accumulates
329
- * chunks into batches, translates each batch locally via streaming Ollama,
330
- * and yields the translated tokens.
331
- *
332
- * Flow: sub-provider streams → batch → Ollama stream-translate → yield tokens
333
- */
334
- async function* pipelineStream(kind, input, budgetSats) {
335
- let batch = '';
336
- async function* translateBatch(text) {
337
- for await (const token of state.processor.generateStream({ input: text })) {
338
- yield token;
339
- }
340
- }
341
- for await (const chunk of delegateP2PStream(kind, input, budgetSats)) {
342
- batch += chunk;
343
- // When batch is big enough, translate and stream out
344
- if (batch.length >= SUB_BATCH_SIZE) {
345
- yield* translateBatch(batch);
346
- batch = '';
347
- }
348
- }
349
- // Translate remaining text
350
- if (batch.length > 0) {
351
- yield* translateBatch(batch);
352
- }
353
- }
354
287
  /**
355
288
  * Delegate a sub-task via platform API. Creates a job, then polls until
356
289
  * the result is available (max 120s).
@@ -382,17 +315,13 @@ async function delegateAPI(kind, input, bidSats, provider) {
382
315
  }
383
316
  throw new Error(`Sub-task ${jobId} timed out after 120s`);
384
317
  }
385
- // --- 4. P2P Swarm Listener ---
386
- const p2pJobs = new Map();
387
318
  const activeSessions = new Map();
388
319
  // Backend WebSocket connections for WS tunnel (keyed by ws_id)
389
320
  const backendWebSockets = new Map();
390
321
  async function startSwarmListener(label) {
391
322
  const node = new SwarmNode();
392
323
  state.swarmNode = node;
393
- const satsPerPayment = SATS_PER_CHUNK * CHUNKS_PER_PAYMENT;
394
324
  const topic = topicFromKind(KIND);
395
- console.log(`[${label}] P2P: ${SATS_PER_CHUNK} sat/chunk, ${CHUNKS_PER_PAYMENT} chunks/payment (${satsPerPayment} sats/cycle)`);
396
325
  console.log(`[${label}] Joining swarm topic for kind ${KIND}`);
397
326
  await node.listen(topic);
398
327
  console.log(`[${label}] P2P listening for customers...`);
@@ -672,65 +601,6 @@ async function startSwarmListener(label) {
672
601
  }
673
602
  return;
674
603
  }
675
- if (msg.type === 'request') {
676
- console.log(`[${label}] P2P job ${msg.id} from ${tag}: "${(msg.input || '').slice(0, 60)}..."`);
677
- if (!LIGHTNING_ADDRESS) {
678
- node.send(socket, { type: 'error', id: msg.id, message: 'Provider has no Lightning Address configured' });
679
- return;
680
- }
681
- if (!msg.ndebit) {
682
- node.send(socket, { type: 'error', id: msg.id, message: 'Request requires ndebit authorization' });
683
- return;
684
- }
685
- if (!acquireSlot()) {
686
- node.send(socket, {
687
- type: 'error',
688
- id: msg.id,
689
- message: `No capacity (${state.activeJobs}/${MAX_CONCURRENT} slots used)`,
690
- });
691
- console.log(`[${label}] P2P job ${msg.id}: rejected (no capacity)`);
692
- return;
693
- }
694
- if (msg.budget !== undefined) {
695
- console.log(`[${label}] Customer budget: ${msg.budget} sats`);
696
- }
697
- const job = {
698
- socket,
699
- credit: CHUNKS_PER_PAYMENT, // start with free credit; debit after delivery
700
- ndebit: msg.ndebit,
701
- totalEarned: 0,
702
- stopped: false,
703
- };
704
- p2pJobs.set(msg.id, job);
705
- // Send offer (price quote — no payment yet)
706
- node.send(socket, {
707
- type: 'offer',
708
- id: msg.id,
709
- sats_per_chunk: SATS_PER_CHUNK,
710
- chunks_per_payment: CHUNKS_PER_PAYMENT,
711
- });
712
- // Generate first, pay after delivery
713
- node.send(socket, { type: 'accepted', id: msg.id });
714
- await runP2PGeneration(node, job, msg, label);
715
- // Debit after result delivered
716
- if (!job.stopped) {
717
- const paid = await collectP2PPayment({
718
- job, node, jobId: msg.id,
719
- satsPerChunk: SATS_PER_CHUNK,
720
- chunksPerPayment: CHUNKS_PER_PAYMENT,
721
- lightningAddress: LIGHTNING_ADDRESS,
722
- label,
723
- });
724
- if (!paid) {
725
- console.log(`[${label}] P2P job ${msg.id}: post-delivery debit failed`);
726
- }
727
- }
728
- }
729
- if (msg.type === 'stop') {
730
- const job = p2pJobs.get(msg.id);
731
- if (job)
732
- handleStop(job, msg.id, label);
733
- }
734
604
  });
735
605
  // Handle customer disconnect — payments already settled via CLINK
736
606
  node.on('peer-leave', (peerId) => {
@@ -742,15 +612,6 @@ async function startSwarmListener(label) {
742
612
  endSession(node, session, label);
743
613
  }
744
614
  }
745
- // Clean up any P2P streaming jobs for this peer
746
- for (const [jobId, job] of p2pJobs) {
747
- if (job.socket?.remotePublicKey?.toString('hex') === peerId) {
748
- console.log(`[${label}] Peer ${tag} disconnected — P2P job ${jobId} (${job.totalEarned} sats earned)`);
749
- job.stopped = true;
750
- p2pJobs.delete(jobId);
751
- releaseSlot();
752
- }
753
- }
754
615
  // Clean up backend WebSockets for this peer
755
616
  for (const [wsId, entry] of backendWebSockets) {
756
617
  if (entry.peerId === peerId) {
@@ -763,24 +624,6 @@ async function startSwarmListener(label) {
763
624
  }
764
625
  });
765
626
  }
766
- async function runP2PGeneration(node, job, msg, label) {
767
- const source = SUB_KIND
768
- ? pipelineStream(SUB_KIND, msg.input || '', SUB_BUDGET)
769
- : state.processor.generateStream({ input: msg.input || '', params: msg.params });
770
- await streamToCustomer({
771
- node,
772
- job,
773
- jobId: msg.id,
774
- source,
775
- satsPerChunk: SATS_PER_CHUNK,
776
- chunksPerPayment: CHUNKS_PER_PAYMENT,
777
- lightningAddress: LIGHTNING_ADDRESS,
778
- label,
779
- });
780
- // No batch claim needed — CLINK payments settle instantly via Lightning
781
- p2pJobs.delete(msg.id);
782
- releaseSlot();
783
- }
784
627
  // --- Session helpers ---
785
628
  function findSessionBySocket(socket) {
786
629
  for (const session of activeSessions.values()) {
package/package.json CHANGED
@@ -1,13 +1,10 @@
1
1
  {
2
2
  "name": "2020117-agent",
3
- "version": "0.2.4",
3
+ "version": "0.3.0",
4
4
  "description": "2020117 agent runtime — API polling + Hyperswarm P2P + CLINK Lightning payments",
5
5
  "type": "module",
6
6
  "bin": {
7
7
  "2020117-agent": "./dist/agent.js",
8
- "2020117-customer": "./dist/customer.js",
9
- "2020117-provider": "./dist/provider.js",
10
- "2020117-pipeline": "./dist/pipeline.js",
11
8
  "2020117-session": "./dist/session.js"
12
9
  },
13
10
  "files": [
@@ -17,23 +14,15 @@
17
14
  "./processor": "./dist/processor.js",
18
15
  "./swarm": "./dist/swarm.js",
19
16
  "./clink": "./dist/clink.js",
20
- "./api": "./dist/api.js",
21
- "./p2p-provider": "./dist/p2p-provider.js",
22
- "./p2p-customer": "./dist/p2p-customer.js"
17
+ "./api": "./dist/api.js"
23
18
  },
24
19
  "scripts": {
25
20
  "agent": "node dist/agent.js",
26
- "customer": "node dist/customer.js",
27
- "provider": "node dist/provider.js",
28
- "pipeline": "node dist/pipeline.js",
29
21
  "session": "node dist/session.js",
30
22
  "build": "tsc",
31
23
  "prepublishOnly": "tsc",
32
24
  "typecheck": "tsc --noEmit",
33
25
  "dev:agent": "npx tsx src/agent.ts",
34
- "dev:customer": "npx tsx src/customer.ts",
35
- "dev:provider": "npx tsx src/provider.ts",
36
- "dev:pipeline": "npx tsx src/pipeline.ts",
37
26
  "dev:session": "npx tsx src/session.ts"
38
27
  },
39
28
  "dependencies": {