deadnet-agent 1.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +150 -0
- package/bin/deadnet-agent.js +2 -0
- package/dist/components/App.d.ts +9 -0
- package/dist/components/App.js +44 -0
- package/dist/components/Header.d.ts +7 -0
- package/dist/components/Header.js +5 -0
- package/dist/components/Log.d.ts +7 -0
- package/dist/components/Log.js +12 -0
- package/dist/components/PrettyApp.d.ts +9 -0
- package/dist/components/PrettyApp.js +245 -0
- package/dist/components/Status.d.ts +12 -0
- package/dist/components/Status.js +21 -0
- package/dist/lib/api.d.ts +27 -0
- package/dist/lib/api.js +101 -0
- package/dist/lib/config.d.ts +3 -0
- package/dist/lib/config.js +212 -0
- package/dist/lib/engine.d.ts +51 -0
- package/dist/lib/engine.js +591 -0
- package/dist/lib/prompts.d.ts +28 -0
- package/dist/lib/prompts.js +227 -0
- package/dist/lib/types.d.ts +66 -0
- package/dist/lib/types.js +1 -0
- package/dist/main.d.ts +2 -0
- package/dist/main.js +44 -0
- package/dist/providers/anthropic.d.ts +11 -0
- package/dist/providers/anthropic.js +54 -0
- package/dist/providers/base.d.ts +30 -0
- package/dist/providers/base.js +1 -0
- package/dist/providers/claude-code.d.ts +21 -0
- package/dist/providers/claude-code.js +103 -0
- package/dist/providers/index.d.ts +5 -0
- package/dist/providers/index.js +28 -0
- package/dist/providers/ollama.d.ts +11 -0
- package/dist/providers/ollama.js +57 -0
- package/dist/providers/openai.d.ts +11 -0
- package/dist/providers/openai.js +36 -0
- package/package.json +36 -0
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
export class OllamaProvider {
|
|
2
|
+
name = "ollama";
|
|
3
|
+
model;
|
|
4
|
+
host;
|
|
5
|
+
constructor(host, model) {
|
|
6
|
+
this.model = model;
|
|
7
|
+
this.host = host.replace(/\/$/, "");
|
|
8
|
+
}
|
|
9
|
+
async generate(system, messages, maxTokens) {
|
|
10
|
+
// Split stable (cache=true) blocks into the system message and dynamic (cache=false)
|
|
11
|
+
// blocks into a prefix on the first user message. This keeps the system message
|
|
12
|
+
// identical across turns so Ollama's KV prefix cache can actually hit.
|
|
13
|
+
const stableBlocks = system.filter((b) => b.cache !== false);
|
|
14
|
+
const dynamicBlocks = system.filter((b) => b.cache === false);
|
|
15
|
+
const systemText = stableBlocks.map((b) => b.text).join("\n\n");
|
|
16
|
+
const dynamicPrefix = dynamicBlocks.map((b) => b.text).join("\n\n");
|
|
17
|
+
const mappedMessages = messages.map((m) => ({
|
|
18
|
+
role: m.role,
|
|
19
|
+
content: typeof m.content === "string" ? m.content : JSON.stringify(m.content),
|
|
20
|
+
}));
|
|
21
|
+
// Prepend dynamic context to the first user message
|
|
22
|
+
if (dynamicPrefix && mappedMessages.length > 0 && mappedMessages[0].role === "user") {
|
|
23
|
+
mappedMessages[0] = {
|
|
24
|
+
...mappedMessages[0],
|
|
25
|
+
content: `${dynamicPrefix}\n\n${mappedMessages[0].content}`,
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
const ollamaMessages = [
|
|
29
|
+
{ role: "system", content: systemText },
|
|
30
|
+
...mappedMessages,
|
|
31
|
+
];
|
|
32
|
+
const res = await fetch(`${this.host}/api/chat`, {
|
|
33
|
+
method: "POST",
|
|
34
|
+
headers: { "Content-Type": "application/json" },
|
|
35
|
+
body: JSON.stringify({
|
|
36
|
+
model: this.model,
|
|
37
|
+
messages: ollamaMessages,
|
|
38
|
+
stream: false,
|
|
39
|
+
options: { num_predict: maxTokens },
|
|
40
|
+
}),
|
|
41
|
+
signal: AbortSignal.timeout(120000),
|
|
42
|
+
});
|
|
43
|
+
if (!res.ok) {
|
|
44
|
+
throw new Error(`Ollama ${res.status}: ${await res.text()}`);
|
|
45
|
+
}
|
|
46
|
+
const data = await res.json();
|
|
47
|
+
const message = data.message || {};
|
|
48
|
+
return {
|
|
49
|
+
content: (message.content || "").trim(),
|
|
50
|
+
inputTokens: data.prompt_eval_count || 0,
|
|
51
|
+
outputTokens: data.eval_count || 0,
|
|
52
|
+
cacheReadTokens: 0,
|
|
53
|
+
cacheWriteTokens: 0,
|
|
54
|
+
stopReason: data.done_reason === "length" ? "truncated" : "done",
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { type LLMProvider, type SystemBlock, type GenerateResult } from "./base.js";
|
|
2
|
+
export declare class OpenAIProvider implements LLMProvider {
|
|
3
|
+
name: string;
|
|
4
|
+
model: string;
|
|
5
|
+
private client;
|
|
6
|
+
constructor(apiKey: string, model: string);
|
|
7
|
+
generate(system: SystemBlock[], messages: Array<{
|
|
8
|
+
role: "user" | "assistant";
|
|
9
|
+
content: any;
|
|
10
|
+
}>, maxTokens: number): Promise<GenerateResult>;
|
|
11
|
+
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import OpenAI from "openai";
|
|
2
|
+
export class OpenAIProvider {
|
|
3
|
+
name = "openai";
|
|
4
|
+
model;
|
|
5
|
+
client;
|
|
6
|
+
constructor(apiKey, model) {
|
|
7
|
+
this.model = model;
|
|
8
|
+
this.client = new OpenAI({ apiKey });
|
|
9
|
+
}
|
|
10
|
+
async generate(system, messages, maxTokens) {
|
|
11
|
+
const systemText = system.map((b) => b.text).join("\n\n");
|
|
12
|
+
const oaiMessages = [
|
|
13
|
+
{ role: "system", content: systemText },
|
|
14
|
+
...messages.map((m) => ({
|
|
15
|
+
role: m.role,
|
|
16
|
+
content: typeof m.content === "string" ? m.content : JSON.stringify(m.content),
|
|
17
|
+
})),
|
|
18
|
+
];
|
|
19
|
+
const response = await this.client.chat.completions.create({
|
|
20
|
+
model: this.model,
|
|
21
|
+
max_tokens: maxTokens,
|
|
22
|
+
messages: oaiMessages,
|
|
23
|
+
});
|
|
24
|
+
const choice = response.choices[0];
|
|
25
|
+
const usage = response.usage;
|
|
26
|
+
return {
|
|
27
|
+
content: choice.message.content?.trim() || "",
|
|
28
|
+
inputTokens: usage?.prompt_tokens || 0,
|
|
29
|
+
outputTokens: usage?.completion_tokens || 0,
|
|
30
|
+
// OpenAI auto-caches prompts >=1024 tokens at 50% off — track for accurate cost display
|
|
31
|
+
cacheReadTokens: usage?.prompt_tokens_details?.cached_tokens || 0,
|
|
32
|
+
cacheWriteTokens: 0,
|
|
33
|
+
stopReason: choice.finish_reason === "length" ? "truncated" : "done",
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "deadnet-agent",
|
|
3
|
+
"version": "1.0.7",
|
|
4
|
+
"description": "Autonomous DeadNet competitor — connect, queue, compete.",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"files": [
|
|
7
|
+
"dist/",
|
|
8
|
+
"bin/",
|
|
9
|
+
"README.md"
|
|
10
|
+
],
|
|
11
|
+
"bin": {
|
|
12
|
+
"deadnet": "./bin/deadnet-agent.js",
|
|
13
|
+
"deadnet-agent": "./bin/deadnet-agent.js"
|
|
14
|
+
},
|
|
15
|
+
"scripts": {
|
|
16
|
+
"start": "tsx src/main.tsx",
|
|
17
|
+
"pretty": "tsx src/main.tsx --pretty",
|
|
18
|
+
"build": "tsc",
|
|
19
|
+
"dev": "tsx --watch src/main.tsx"
|
|
20
|
+
},
|
|
21
|
+
"dependencies": {
|
|
22
|
+
"@anthropic-ai/sdk": "^0.80.0",
|
|
23
|
+
"dotenv": "^17.3.1",
|
|
24
|
+
"ink": "^6.8.0",
|
|
25
|
+
"ink-spinner": "^5.0.0",
|
|
26
|
+
"ink-text-input": "^6.0.0",
|
|
27
|
+
"openai": "^6.32.0",
|
|
28
|
+
"react": "^19.2.4"
|
|
29
|
+
},
|
|
30
|
+
"devDependencies": {
|
|
31
|
+
"@types/node": "^25.5.0",
|
|
32
|
+
"@types/react": "^19.2.14",
|
|
33
|
+
"tsx": "^4.21.0",
|
|
34
|
+
"typescript": "^5.9.3"
|
|
35
|
+
}
|
|
36
|
+
}
|