hyperclaw 5.2.5 → 5.2.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audit-NPINyRh4.js +445 -0
- package/dist/chat-8E4H6nqx.js +325 -0
- package/dist/connector-1_9a4Mhv.js +276 -0
- package/dist/connector-BeHsEhpz.js +164 -0
- package/dist/connector-DJ63fLj9.js +555 -0
- package/dist/connector-Ic8H84de.js +204 -0
- package/dist/daemon-DRhU750_.js +7 -0
- package/dist/daemon-bJ8IYnkd.js +421 -0
- package/dist/delivery-1vTBQ0a0.js +95 -0
- package/dist/delivery-BbOfKejh.js +4 -0
- package/dist/engine-BjzV25HS.js +7 -0
- package/dist/engine-DJSr69DF.js +327 -0
- package/dist/heartbeat-engine-0swQl6wg.js +89 -0
- package/dist/hub-BuUwiTxh.js +6 -0
- package/dist/hub-DIoASRn6.js +512 -0
- package/dist/hyperclawbot-CIvGq2IG.js +516 -0
- package/dist/inference-BSWFHqzs.js +2854 -0
- package/dist/inference-DQiqWbqu.js +8 -0
- package/dist/loader-Bpju2Xqs.js +6 -0
- package/dist/loader-DRfmh8hU.js +410 -0
- package/dist/logger-CnxILOPV.js +86 -0
- package/dist/mcp-loader-D-uIqYwB.js +93 -0
- package/dist/memory-auto-CK5M1YV8.js +5 -0
- package/dist/memory-auto-Cs6XiIxb.js +306 -0
- package/dist/node-4_wJsNEN.js +226 -0
- package/dist/oauth-flow-CJ7dFXKT.js +148 -0
- package/dist/onboard-C1RArB82.js +3865 -0
- package/dist/onboard-CQkUrkNk.js +13 -0
- package/dist/orchestrator-D9R2u9yL.js +6 -0
- package/dist/orchestrator-DMDgfB8j.js +189 -0
- package/dist/pairing-CNNtZ8JR.js +6 -0
- package/dist/pairing-fGaxBlgG.js +207 -0
- package/dist/pc-access-CaE4x3Vt.js +8 -0
- package/dist/pc-access-OIwXRyAD.js +858 -0
- package/dist/run-main.js +50 -44
- package/dist/runner-CFvEFt23.js +1274 -0
- package/dist/server-BSCeWSlZ.js +1304 -0
- package/dist/server-DIwR4tT3.js +4 -0
- package/dist/skill-runtime-CCwGR7iX.js +5 -0
- package/dist/skill-runtime-vmBIhuVk.js +104 -0
- package/dist/src-3dXyf5GQ.js +458 -0
- package/dist/src-BVeLalMV.js +63 -0
- package/dist/sub-agent-tools-C1dWyUAR.js +39 -0
- package/dist/tts-elevenlabs-F_xjKQ-I.js +64 -0
- package/dist/vision-BR5Gdb2s.js +169 -0
- package/dist/vision-tools-DuB1QtlE.js +51 -0
- package/dist/vision-tools-LvL8RMWR.js +5 -0
- package/dist/voice-transcription-D6dK7b9A.js +171 -0
- package/dist/website-watch-tools-Cqp7RPvn.js +176 -0
- package/dist/website-watch-tools-UPSrnBk2.js +5 -0
- package/package.json +1 -1
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
const require_chunk = require('./chunk-jS-bbMI5.js');
|
|
2
|
+
const https = require_chunk.__toESM(require("https"));
|
|
3
|
+
|
|
4
|
+
//#region src/services/tts-elevenlabs.ts
|
|
5
|
+
const DEFAULT_VOICE = "21m00Tcm4TlvDq8ikWAM";
|
|
6
|
+
const DEFAULT_MODEL = "eleven_multilingual_v2";
|
|
7
|
+
function sanitizeForLog(value) {
|
|
8
|
+
return String(value ?? "").replace(/[\r\n\t]+/g, " ").replace(/[^\x20-\x7e]+/g, "?").slice(0, 200);
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
|
|
12
|
+
* Convert text to speech via ElevenLabs API.
|
|
13
|
+
|
|
14
|
+
* Returns base64-encoded MP3 or null on error.
|
|
15
|
+
|
|
16
|
+
*/
|
|
17
|
+
async function textToSpeech(text, opts) {
|
|
18
|
+
const voiceId = opts.voiceId || DEFAULT_VOICE;
|
|
19
|
+
const modelId = opts.modelId || DEFAULT_MODEL;
|
|
20
|
+
const format = opts.outputFormat || "mp3_22050_32";
|
|
21
|
+
const body = JSON.stringify({
|
|
22
|
+
text,
|
|
23
|
+
model_id: modelId
|
|
24
|
+
});
|
|
25
|
+
const path = `/v1/text-to-speech/${voiceId}?output_format=${format}`;
|
|
26
|
+
return new Promise((resolve) => {
|
|
27
|
+
const req = https.default.request({
|
|
28
|
+
hostname: "api.elevenlabs.io",
|
|
29
|
+
port: 443,
|
|
30
|
+
path,
|
|
31
|
+
method: "POST",
|
|
32
|
+
headers: {
|
|
33
|
+
"Content-Type": "application/json",
|
|
34
|
+
"xi-api-key": opts.apiKey,
|
|
35
|
+
"Content-Length": Buffer.byteLength(body)
|
|
36
|
+
}
|
|
37
|
+
}, (res) => {
|
|
38
|
+
const chunks = [];
|
|
39
|
+
res.on("data", (c) => chunks.push(c));
|
|
40
|
+
res.on("end", () => {
|
|
41
|
+
const buf = Buffer.concat(chunks);
|
|
42
|
+
if (res.statusCode !== 200) {
|
|
43
|
+
console.warn(`[tts] ElevenLabs error ${res.statusCode}: ${sanitizeForLog(buf.toString("utf8", 0, 200))}`);
|
|
44
|
+
resolve(null);
|
|
45
|
+
return;
|
|
46
|
+
}
|
|
47
|
+
resolve(buf.toString("base64"));
|
|
48
|
+
});
|
|
49
|
+
});
|
|
50
|
+
req.on("error", (e) => {
|
|
51
|
+
console.warn("[tts] ElevenLabs request error:", sanitizeForLog(e.message));
|
|
52
|
+
resolve(null);
|
|
53
|
+
});
|
|
54
|
+
req.setTimeout(3e4, () => {
|
|
55
|
+
req.destroy();
|
|
56
|
+
resolve(null);
|
|
57
|
+
});
|
|
58
|
+
req.write(body);
|
|
59
|
+
req.end();
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
//#endregion
|
|
64
|
+
exports.textToSpeech = textToSpeech;
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
const require_chunk = require('./chunk-jS-bbMI5.js');
|
|
2
|
+
const fs_extra = require_chunk.__toESM(require("fs-extra"));
|
|
3
|
+
const path = require_chunk.__toESM(require("path"));
|
|
4
|
+
const os = require_chunk.__toESM(require("os"));
|
|
5
|
+
const http = require_chunk.__toESM(require("http"));
|
|
6
|
+
const https = require_chunk.__toESM(require("https"));
|
|
7
|
+
|
|
8
|
+
//#region src/services/vision.ts
|
|
9
|
+
function inferImageMediaType(ext) {
|
|
10
|
+
return {
|
|
11
|
+
".png": "image/png",
|
|
12
|
+
".jpg": "image/jpeg",
|
|
13
|
+
".jpeg": "image/jpeg",
|
|
14
|
+
".gif": "image/gif",
|
|
15
|
+
".webp": "image/webp"
|
|
16
|
+
}[ext] ?? "image/jpeg";
|
|
17
|
+
}
|
|
18
|
+
async function readValidatedImageFile(filePath) {
|
|
19
|
+
const resolvedPath = path.default.resolve(filePath.replace(/^~/, os.default.homedir()));
|
|
20
|
+
const ext = path.default.extname(resolvedPath).toLowerCase();
|
|
21
|
+
if (!ALLOWED_IMAGE_EXTENSIONS.has(ext)) throw new Error(`Unsupported image file type: ${ext || "unknown"}`);
|
|
22
|
+
const stats = await fs_extra.default.stat(resolvedPath);
|
|
23
|
+
if (!stats.isFile()) throw new Error("Image input must be a file");
|
|
24
|
+
if (stats.size === 0) throw new Error("Image input is empty");
|
|
25
|
+
if (stats.size > MAX_IMAGE_BYTES) throw new Error(`Image input exceeds ${MAX_IMAGE_BYTES} bytes`);
|
|
26
|
+
const buf = await fs_extra.default.readFile(resolvedPath);
|
|
27
|
+
return {
|
|
28
|
+
data: buf.toString("base64"),
|
|
29
|
+
mediaType: inferImageMediaType(ext)
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
async function fetchValidatedRemoteImage(urlString) {
|
|
33
|
+
const url = new URL(urlString);
|
|
34
|
+
if (!["http:", "https:"].includes(url.protocol)) throw new Error(`Unsupported image URL protocol: ${url.protocol}`);
|
|
35
|
+
const mod = url.protocol === "https:" ? https.default : http.default;
|
|
36
|
+
const buf = await new Promise((resolve, reject) => {
|
|
37
|
+
const req = mod.get(url, (res) => {
|
|
38
|
+
if ((res.statusCode ?? 0) >= 400) {
|
|
39
|
+
reject(new Error(`Image download failed with status ${res.statusCode}`));
|
|
40
|
+
res.resume();
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
const contentLength = Number(res.headers["content-length"] ?? 0);
|
|
44
|
+
if (contentLength > MAX_IMAGE_BYTES) {
|
|
45
|
+
reject(new Error(`Image download exceeds ${MAX_IMAGE_BYTES} bytes`));
|
|
46
|
+
res.destroy();
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
49
|
+
const chunks = [];
|
|
50
|
+
let total = 0;
|
|
51
|
+
res.on("data", (c) => {
|
|
52
|
+
total += c.length;
|
|
53
|
+
if (total > MAX_IMAGE_BYTES) {
|
|
54
|
+
req.destroy(new Error(`Image download exceeds ${MAX_IMAGE_BYTES} bytes`));
|
|
55
|
+
return;
|
|
56
|
+
}
|
|
57
|
+
chunks.push(c);
|
|
58
|
+
});
|
|
59
|
+
res.on("end", () => resolve(Buffer.concat(chunks)));
|
|
60
|
+
res.on("error", reject);
|
|
61
|
+
});
|
|
62
|
+
req.on("error", reject);
|
|
63
|
+
});
|
|
64
|
+
const pathnameExt = path.default.extname(url.pathname).toLowerCase();
|
|
65
|
+
return {
|
|
66
|
+
data: buf.toString("base64"),
|
|
67
|
+
mediaType: inferImageMediaType(ALLOWED_IMAGE_EXTENSIONS.has(pathnameExt) ? pathnameExt : ".jpg")
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
async function imageToBase64(input) {
|
|
71
|
+
const trimmed = input.trim();
|
|
72
|
+
if (trimmed.startsWith("data:")) {
|
|
73
|
+
const match = trimmed.match(/^data:(image\/[^;]+);base64,(.+)$/);
|
|
74
|
+
if (match) {
|
|
75
|
+
const buffer = Buffer.from(match[2], "base64");
|
|
76
|
+
if (buffer.length === 0) throw new Error("Image input is empty");
|
|
77
|
+
if (buffer.length > MAX_IMAGE_BYTES) throw new Error(`Image input exceeds ${MAX_IMAGE_BYTES} bytes`);
|
|
78
|
+
return {
|
|
79
|
+
data: match[2],
|
|
80
|
+
mediaType: match[1]
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
if (trimmed.startsWith("http")) return fetchValidatedRemoteImage(trimmed);
|
|
85
|
+
return readValidatedImageFile(trimmed);
|
|
86
|
+
}
|
|
87
|
+
async function analyzeImage(imageInput, prompt, apiKey, provider = "anthropic") {
|
|
88
|
+
const { data, mediaType } = await imageToBase64(imageInput);
|
|
89
|
+
const isAnthropic = provider === "anthropic";
|
|
90
|
+
const hostname = isAnthropic ? "api.anthropic.com" : "openrouter.ai";
|
|
91
|
+
const model = isAnthropic ? "claude-sonnet-4-20250514" : "openai/gpt-4o";
|
|
92
|
+
const body = {
|
|
93
|
+
model,
|
|
94
|
+
max_tokens: 1024,
|
|
95
|
+
messages: isAnthropic ? [{
|
|
96
|
+
role: "user",
|
|
97
|
+
content: [{
|
|
98
|
+
type: "image",
|
|
99
|
+
source: {
|
|
100
|
+
type: "base64",
|
|
101
|
+
media_type: mediaType,
|
|
102
|
+
data
|
|
103
|
+
}
|
|
104
|
+
}, {
|
|
105
|
+
type: "text",
|
|
106
|
+
text: prompt || "Describe this image."
|
|
107
|
+
}]
|
|
108
|
+
}] : [{
|
|
109
|
+
role: "user",
|
|
110
|
+
content: [{
|
|
111
|
+
type: "text",
|
|
112
|
+
text: "[Image attached] " + (prompt || "Describe this image.")
|
|
113
|
+
}, {
|
|
114
|
+
type: "image_url",
|
|
115
|
+
image_url: { url: `data:${mediaType};base64,${data}` }
|
|
116
|
+
}]
|
|
117
|
+
}]
|
|
118
|
+
};
|
|
119
|
+
if (isAnthropic) body.anthropic_version = "2023-06-01";
|
|
120
|
+
return new Promise((resolve, reject) => {
|
|
121
|
+
const payload = JSON.stringify(body);
|
|
122
|
+
const req = https.default.request({
|
|
123
|
+
hostname,
|
|
124
|
+
port: 443,
|
|
125
|
+
path: isAnthropic ? "/v1/messages" : "/api/v1/chat/completions",
|
|
126
|
+
method: "POST",
|
|
127
|
+
headers: {
|
|
128
|
+
"Authorization": `Bearer ${apiKey}`,
|
|
129
|
+
"Content-Type": "application/json",
|
|
130
|
+
...isAnthropic ? { "anthropic-version": "2023-06-01" } : { "HTTP-Referer": "https://hyperclaw.ai" }
|
|
131
|
+
}
|
|
132
|
+
}, (res) => {
|
|
133
|
+
let raw = "";
|
|
134
|
+
res.on("data", (c) => raw += c);
|
|
135
|
+
res.on("end", () => {
|
|
136
|
+
try {
|
|
137
|
+
const j = JSON.parse(raw);
|
|
138
|
+
if (isAnthropic) {
|
|
139
|
+
const text = j.content?.[0]?.text;
|
|
140
|
+
resolve(text || j.error?.message || "(no description)");
|
|
141
|
+
} else {
|
|
142
|
+
const text = j.choices?.[0]?.message?.content;
|
|
143
|
+
resolve(text || j.error?.message || "(no description)");
|
|
144
|
+
}
|
|
145
|
+
} catch {
|
|
146
|
+
resolve(raw || "(parse error)");
|
|
147
|
+
}
|
|
148
|
+
});
|
|
149
|
+
});
|
|
150
|
+
req.on("error", reject);
|
|
151
|
+
req.write(payload);
|
|
152
|
+
req.end();
|
|
153
|
+
});
|
|
154
|
+
}
|
|
155
|
+
var MAX_IMAGE_BYTES, ALLOWED_IMAGE_EXTENSIONS;
|
|
156
|
+
var init_vision = require_chunk.__esm({ "src/services/vision.ts"() {
|
|
157
|
+
MAX_IMAGE_BYTES = 15 * 1024 * 1024;
|
|
158
|
+
ALLOWED_IMAGE_EXTENSIONS = new Set([
|
|
159
|
+
".png",
|
|
160
|
+
".jpg",
|
|
161
|
+
".jpeg",
|
|
162
|
+
".gif",
|
|
163
|
+
".webp"
|
|
164
|
+
]);
|
|
165
|
+
} });
|
|
166
|
+
|
|
167
|
+
//#endregion
|
|
168
|
+
init_vision();
|
|
169
|
+
exports.analyzeImage = analyzeImage;
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
const require_chunk = require('./chunk-jS-bbMI5.js');
|
|
2
|
+
|
|
3
|
+
//#region packages/core/src/agent/vision-tools.ts
|
|
4
|
+
function getVisionTools(opts = {}) {
|
|
5
|
+
const { apiKey = "", provider = "anthropic" } = opts;
|
|
6
|
+
return [{
|
|
7
|
+
name: "analyze_image",
|
|
8
|
+
description: "Analyze an image using a vision model. Describe scenes, receipts, documents, photos. Supports file path, URL, or data URI.",
|
|
9
|
+
input_schema: {
|
|
10
|
+
type: "object",
|
|
11
|
+
properties: {
|
|
12
|
+
image: {
|
|
13
|
+
type: "string",
|
|
14
|
+
description: "Image path (~/path), URL (https://...), or data:image/...;base64,..."
|
|
15
|
+
},
|
|
16
|
+
prompt: {
|
|
17
|
+
type: "string",
|
|
18
|
+
description: "What to describe or extract (e.g. \"Describe this scene\", \"List items on this receipt\")"
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
required: ["image"]
|
|
22
|
+
},
|
|
23
|
+
handler: async (input) => {
|
|
24
|
+
if (!apiKey) return "Error: No API key configured for vision. Set provider.apiKey or run hyperclaw auth add.";
|
|
25
|
+
const image = input.image?.trim();
|
|
26
|
+
const prompt = input.prompt?.trim() || "Describe this image concisely.";
|
|
27
|
+
if (!image) return "Error: image is required";
|
|
28
|
+
try {
|
|
29
|
+
const { analyzeImage } = await Promise.resolve().then(() => require("./vision-BR5Gdb2s.js"));
|
|
30
|
+
return await analyzeImage(image, prompt, apiKey, provider);
|
|
31
|
+
} catch (e) {
|
|
32
|
+
return `Error: ${e.message}`;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}];
|
|
36
|
+
}
|
|
37
|
+
var init_vision_tools = require_chunk.__esm({ "packages/core/src/agent/vision-tools.ts"() {} });
|
|
38
|
+
|
|
39
|
+
//#endregion
|
|
40
|
+
Object.defineProperty(exports, 'getVisionTools', {
|
|
41
|
+
enumerable: true,
|
|
42
|
+
get: function () {
|
|
43
|
+
return getVisionTools;
|
|
44
|
+
}
|
|
45
|
+
});
|
|
46
|
+
Object.defineProperty(exports, 'init_vision_tools', {
|
|
47
|
+
enumerable: true,
|
|
48
|
+
get: function () {
|
|
49
|
+
return init_vision_tools;
|
|
50
|
+
}
|
|
51
|
+
});
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
const require_chunk = require('./chunk-jS-bbMI5.js');
|
|
2
|
+
const require_paths = require('./paths-AIyBxIzm.js');
|
|
3
|
+
const require_paths$1 = require('./paths-DPovhojT.js');
|
|
4
|
+
const fs_extra = require_chunk.__toESM(require("fs-extra"));
|
|
5
|
+
const path = require_chunk.__toESM(require("path"));
|
|
6
|
+
const https = require_chunk.__toESM(require("https"));
|
|
7
|
+
|
|
8
|
+
//#region src/services/voice-transcription.ts
|
|
9
|
+
require_paths$1.init_paths();
|
|
10
|
+
const MAX_AUDIO_BYTES = 25 * 1024 * 1024;
|
|
11
|
+
const ALLOWED_AUDIO_EXTENSIONS = new Set([
|
|
12
|
+
".ogg",
|
|
13
|
+
".oga",
|
|
14
|
+
".mp3",
|
|
15
|
+
".wav",
|
|
16
|
+
".m4a",
|
|
17
|
+
".mp4",
|
|
18
|
+
".mpeg",
|
|
19
|
+
".webm"
|
|
20
|
+
]);
|
|
21
|
+
function sanitizeForError(value) {
|
|
22
|
+
return String(value ?? "").replace(/[\r\n\t]+/g, " ").slice(0, 160);
|
|
23
|
+
}
|
|
24
|
+
async function loadAudioInput(audioPathOrBuffer) {
|
|
25
|
+
if (Buffer.isBuffer(audioPathOrBuffer)) {
|
|
26
|
+
if (audioPathOrBuffer.length === 0) throw new Error("Audio input is empty");
|
|
27
|
+
if (audioPathOrBuffer.length > MAX_AUDIO_BYTES) throw new Error(`Audio input exceeds ${MAX_AUDIO_BYTES} bytes`);
|
|
28
|
+
return audioPathOrBuffer;
|
|
29
|
+
}
|
|
30
|
+
const resolvedPath = path.default.resolve(audioPathOrBuffer);
|
|
31
|
+
const ext = path.default.extname(resolvedPath).toLowerCase();
|
|
32
|
+
if (!ALLOWED_AUDIO_EXTENSIONS.has(ext)) throw new Error(`Unsupported audio file type: ${ext || "unknown"}`);
|
|
33
|
+
const stats = await fs_extra.default.stat(resolvedPath);
|
|
34
|
+
if (!stats.isFile()) throw new Error("Audio input must be a file");
|
|
35
|
+
if (stats.size === 0) throw new Error("Audio input is empty");
|
|
36
|
+
if (stats.size > MAX_AUDIO_BYTES) throw new Error(`Audio input exceeds ${MAX_AUDIO_BYTES} bytes`);
|
|
37
|
+
return fs_extra.default.readFile(resolvedPath);
|
|
38
|
+
}
|
|
39
|
+
async function getConfig() {
|
|
40
|
+
try {
|
|
41
|
+
const cfg = await fs_extra.default.readJson(require_paths.getConfigPath());
|
|
42
|
+
const providerId = cfg?.provider?.providerId;
|
|
43
|
+
const apiKey = cfg?.provider?.apiKey;
|
|
44
|
+
return {
|
|
45
|
+
providerId,
|
|
46
|
+
apiKey
|
|
47
|
+
};
|
|
48
|
+
} catch {
|
|
49
|
+
return {};
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
async function transcribeWithWhisper(buffer, apiKey) {
|
|
53
|
+
return new Promise((resolve, reject) => {
|
|
54
|
+
const boundary = "----HyperClaw" + Date.now();
|
|
55
|
+
const header = [
|
|
56
|
+
`--${boundary}`,
|
|
57
|
+
"Content-Disposition: form-data; name=\"file\"; filename=\"audio.ogg\"",
|
|
58
|
+
"Content-Type: application/octet-stream",
|
|
59
|
+
"",
|
|
60
|
+
""
|
|
61
|
+
].join("\r\n");
|
|
62
|
+
const footer = `\r\n--${boundary}\r\nContent-Disposition: form-data; name="model"\r\n\r\nwhisper-1\r\n--${boundary}--\r\n`;
|
|
63
|
+
const body = Buffer.concat([
|
|
64
|
+
Buffer.from(header, "utf8"),
|
|
65
|
+
buffer,
|
|
66
|
+
Buffer.from(footer, "utf8")
|
|
67
|
+
]);
|
|
68
|
+
const req = https.default.request({
|
|
69
|
+
hostname: "api.openai.com",
|
|
70
|
+
port: 443,
|
|
71
|
+
path: "/v1/audio/transcriptions",
|
|
72
|
+
method: "POST",
|
|
73
|
+
headers: {
|
|
74
|
+
"Authorization": `Bearer ${apiKey}`,
|
|
75
|
+
"Content-Type": `multipart/form-data; boundary=${boundary}`,
|
|
76
|
+
"Content-Length": body.length
|
|
77
|
+
}
|
|
78
|
+
}, (res) => {
|
|
79
|
+
let data = "";
|
|
80
|
+
res.on("data", (c) => data += c);
|
|
81
|
+
res.on("end", () => {
|
|
82
|
+
try {
|
|
83
|
+
const j = JSON.parse(data);
|
|
84
|
+
resolve(j.text?.trim() || "[No transcription]");
|
|
85
|
+
} catch {
|
|
86
|
+
resolve(`[Transcription error: ${data.slice(0, 100)}]`);
|
|
87
|
+
}
|
|
88
|
+
});
|
|
89
|
+
});
|
|
90
|
+
req.on("error", reject);
|
|
91
|
+
req.write(body);
|
|
92
|
+
req.end();
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
async function transcribeWithGemini(buffer, apiKey) {
|
|
96
|
+
const base64 = buffer.toString("base64");
|
|
97
|
+
const payload = JSON.stringify({
|
|
98
|
+
contents: [{ parts: [{ text: "Transcribe this audio to text. Output only the transcription, no other text." }, { inlineData: {
|
|
99
|
+
mimeType: "audio/ogg",
|
|
100
|
+
data: base64
|
|
101
|
+
} }] }],
|
|
102
|
+
generationConfig: { maxOutputTokens: 1024 }
|
|
103
|
+
});
|
|
104
|
+
return new Promise((resolve, reject) => {
|
|
105
|
+
const req = https.default.request({
|
|
106
|
+
hostname: "generativelanguage.googleapis.com",
|
|
107
|
+
port: 443,
|
|
108
|
+
path: "/v1beta/models/gemini-2.0-flash:generateContent?key=" + encodeURIComponent(apiKey),
|
|
109
|
+
method: "POST",
|
|
110
|
+
headers: {
|
|
111
|
+
"Content-Type": "application/json",
|
|
112
|
+
"Content-Length": Buffer.byteLength(payload)
|
|
113
|
+
}
|
|
114
|
+
}, (res) => {
|
|
115
|
+
let data = "";
|
|
116
|
+
res.on("data", (c) => data += c);
|
|
117
|
+
res.on("end", () => {
|
|
118
|
+
try {
|
|
119
|
+
const j = JSON.parse(data);
|
|
120
|
+
const text = j.candidates?.[0]?.content?.parts?.[0]?.text?.trim();
|
|
121
|
+
resolve(text || "[No transcription]");
|
|
122
|
+
} catch {
|
|
123
|
+
resolve(`[Transcription error: ${data.slice(0, 100)}]`);
|
|
124
|
+
}
|
|
125
|
+
});
|
|
126
|
+
});
|
|
127
|
+
req.on("error", reject);
|
|
128
|
+
req.write(payload);
|
|
129
|
+
req.end();
|
|
130
|
+
});
|
|
131
|
+
}
|
|
132
|
+
/**
|
|
133
|
+
|
|
134
|
+
* Transcribe audio using configured provider or fallbacks.
|
|
135
|
+
|
|
136
|
+
* Providers: OpenAI (Whisper), Google (Gemini), OpenRouter.
|
|
137
|
+
|
|
138
|
+
* Env: OPENAI_API_KEY, WHISPER_API_KEY, GOOGLE_AI_API_KEY.
|
|
139
|
+
|
|
140
|
+
*/
|
|
141
|
+
async function transcribeVoiceNote(audioPathOrBuffer, apiKey) {
|
|
142
|
+
let buffer;
|
|
143
|
+
try {
|
|
144
|
+
buffer = await loadAudioInput(audioPathOrBuffer);
|
|
145
|
+
} catch (e) {
|
|
146
|
+
return `[Transcription failed: ${sanitizeForError(e?.message)}]`;
|
|
147
|
+
}
|
|
148
|
+
const cfg = await getConfig();
|
|
149
|
+
const openaiKey = apiKey || process.env.OPENAI_API_KEY || process.env.WHISPER_API_KEY || (cfg.providerId === "openai" || cfg.providerId === "openrouter" ? cfg.apiKey : "");
|
|
150
|
+
const googleKey = process.env.GOOGLE_AI_API_KEY || (cfg.providerId === "google" ? cfg.apiKey : "");
|
|
151
|
+
if (cfg.providerId === "google" && googleKey) try {
|
|
152
|
+
return await transcribeWithGemini(buffer, googleKey);
|
|
153
|
+
} catch {}
|
|
154
|
+
if ((cfg.providerId === "openrouter" || cfg.providerId === "openai") && openaiKey) try {
|
|
155
|
+
return await transcribeWithWhisper(buffer, openaiKey);
|
|
156
|
+
} catch {}
|
|
157
|
+
if (openaiKey) try {
|
|
158
|
+
return await transcribeWithWhisper(buffer, openaiKey);
|
|
159
|
+
} catch (e) {
|
|
160
|
+
return `[Transcription failed: ${sanitizeForError(e?.message)}]`;
|
|
161
|
+
}
|
|
162
|
+
if (googleKey) try {
|
|
163
|
+
return await transcribeWithGemini(buffer, googleKey);
|
|
164
|
+
} catch (e) {
|
|
165
|
+
return `[Transcription failed: ${sanitizeForError(e?.message)}]`;
|
|
166
|
+
}
|
|
167
|
+
return "[Voice note — add OPENAI_API_KEY or GOOGLE_AI_API_KEY (or select OpenAI/Google provider in the wizard) for transcription]";
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
//#endregion
|
|
171
|
+
exports.transcribeVoiceNote = transcribeVoiceNote;
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
const require_chunk = require('./chunk-jS-bbMI5.js');
|
|
2
|
+
const fs_extra = require_chunk.__toESM(require("fs-extra"));
|
|
3
|
+
const path = require_chunk.__toESM(require("path"));
|
|
4
|
+
const os = require_chunk.__toESM(require("os"));
|
|
5
|
+
const crypto = require_chunk.__toESM(require("crypto"));
|
|
6
|
+
const http = require_chunk.__toESM(require("http"));
|
|
7
|
+
const https = require_chunk.__toESM(require("https"));
|
|
8
|
+
|
|
9
|
+
//#region packages/core/src/agent/website-watch-tools.ts
|
|
10
|
+
async function fetchUrl(url) {
|
|
11
|
+
return new Promise((resolve, reject) => {
|
|
12
|
+
const client = url.startsWith("https") ? https.default : http.default;
|
|
13
|
+
const req = client.get(url, { timeout: 15e3 }, (res) => {
|
|
14
|
+
let data = "";
|
|
15
|
+
res.on("data", (c) => data += c);
|
|
16
|
+
res.on("end", () => resolve(data));
|
|
17
|
+
});
|
|
18
|
+
req.on("error", reject);
|
|
19
|
+
req.on("timeout", () => {
|
|
20
|
+
req.destroy();
|
|
21
|
+
reject(new Error("Timeout"));
|
|
22
|
+
});
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
function stripBlockedTagContent(html, tagNames) {
|
|
26
|
+
let output = String(html || "");
|
|
27
|
+
for (const tag of tagNames) {
|
|
28
|
+
let lower = output.toLowerCase();
|
|
29
|
+
const open = `<${tag}`;
|
|
30
|
+
const close = `</${tag}>`;
|
|
31
|
+
let cursor = 0;
|
|
32
|
+
while (cursor < output.length) {
|
|
33
|
+
const start = lower.indexOf(open, cursor);
|
|
34
|
+
if (start === -1) break;
|
|
35
|
+
const end = lower.indexOf(close, start);
|
|
36
|
+
const removeTo = end === -1 ? output.length : end + close.length;
|
|
37
|
+
output = output.slice(0, start) + " " + output.slice(removeTo);
|
|
38
|
+
lower = output.toLowerCase();
|
|
39
|
+
cursor = start + 1;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
return output;
|
|
43
|
+
}
|
|
44
|
+
function stripTags(html) {
|
|
45
|
+
let out = "";
|
|
46
|
+
let inTag = false;
|
|
47
|
+
for (const ch of String(html || "")) {
|
|
48
|
+
if (ch === "<") {
|
|
49
|
+
inTag = true;
|
|
50
|
+
out += " ";
|
|
51
|
+
continue;
|
|
52
|
+
}
|
|
53
|
+
if (ch === ">") {
|
|
54
|
+
inTag = false;
|
|
55
|
+
out += " ";
|
|
56
|
+
continue;
|
|
57
|
+
}
|
|
58
|
+
if (!inTag) out += ch;
|
|
59
|
+
}
|
|
60
|
+
return out;
|
|
61
|
+
}
|
|
62
|
+
function hashContent(html) {
|
|
63
|
+
const cleaned = stripTags(stripBlockedTagContent(html, ["script", "style"])).replace(/\s+/g, " ").slice(0, 5e4);
|
|
64
|
+
return crypto.default.createHash("sha256").update(cleaned).digest("hex").slice(0, 16);
|
|
65
|
+
}
|
|
66
|
+
async function loadWatches() {
|
|
67
|
+
try {
|
|
68
|
+
return await fs_extra.default.readJson(WATCH_FILE);
|
|
69
|
+
} catch {
|
|
70
|
+
return {};
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
async function saveWatches(watches) {
|
|
74
|
+
await fs_extra.default.ensureDir(path.default.dirname(WATCH_FILE));
|
|
75
|
+
await fs_extra.default.writeJson(WATCH_FILE, watches, { spaces: 2 });
|
|
76
|
+
}
|
|
77
|
+
function getWebsiteWatchTools() {
|
|
78
|
+
return [
|
|
79
|
+
{
|
|
80
|
+
name: "watch_website_add",
|
|
81
|
+
description: "Add a URL to the website change watch list. Use with watch_website_check to detect changes.",
|
|
82
|
+
input_schema: {
|
|
83
|
+
type: "object",
|
|
84
|
+
properties: { url: {
|
|
85
|
+
type: "string",
|
|
86
|
+
description: "URL to watch (e.g. https://example.com)"
|
|
87
|
+
} },
|
|
88
|
+
required: ["url"]
|
|
89
|
+
},
|
|
90
|
+
handler: async (input) => {
|
|
91
|
+
const url = input.url.trim();
|
|
92
|
+
if (!url.startsWith("http")) return "Error: URL must start with http:// or https://";
|
|
93
|
+
const watches = await loadWatches();
|
|
94
|
+
const content = await fetchUrl(url).catch((e) => `[fetch error: ${e.message}]`);
|
|
95
|
+
const hash = hashContent(content);
|
|
96
|
+
watches[url] = {
|
|
97
|
+
url,
|
|
98
|
+
lastHash: hash,
|
|
99
|
+
lastCheck: (/* @__PURE__ */ new Date()).toISOString(),
|
|
100
|
+
lastSnippet: content.slice(0, 200)
|
|
101
|
+
};
|
|
102
|
+
await saveWatches(watches);
|
|
103
|
+
return `Added: ${url}. Initial hash: ${hash}`;
|
|
104
|
+
}
|
|
105
|
+
},
|
|
106
|
+
{
|
|
107
|
+
name: "watch_website_check",
|
|
108
|
+
description: "Check watched URLs for changes. Returns list of URLs that changed since last check.",
|
|
109
|
+
input_schema: {
|
|
110
|
+
type: "object",
|
|
111
|
+
properties: { url: {
|
|
112
|
+
type: "string",
|
|
113
|
+
description: "Specific URL to check (optional — checks all if omitted)"
|
|
114
|
+
} }
|
|
115
|
+
},
|
|
116
|
+
handler: async (input) => {
|
|
117
|
+
const watches = await loadWatches();
|
|
118
|
+
const filterUrl = input.url;
|
|
119
|
+
const toCheck = filterUrl ? [filterUrl] : Object.keys(watches);
|
|
120
|
+
const changed = [];
|
|
121
|
+
for (const url of toCheck) {
|
|
122
|
+
const ent = watches[url];
|
|
123
|
+
if (!ent) continue;
|
|
124
|
+
try {
|
|
125
|
+
const content = await fetchUrl(url);
|
|
126
|
+
const hash = hashContent(content);
|
|
127
|
+
ent.lastCheck = (/* @__PURE__ */ new Date()).toISOString();
|
|
128
|
+
if (hash !== ent.lastHash) {
|
|
129
|
+
ent.lastHash = hash;
|
|
130
|
+
ent.lastSnippet = content.slice(0, 300);
|
|
131
|
+
changed.push(url);
|
|
132
|
+
}
|
|
133
|
+
watches[url] = ent;
|
|
134
|
+
} catch (e) {
|
|
135
|
+
changed.push(`${url} [error: ${e.message}]`);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
await saveWatches(watches);
|
|
139
|
+
if (changed.length === 0) return "No changes detected.";
|
|
140
|
+
return `Changed: ${changed.join(", ")}`;
|
|
141
|
+
}
|
|
142
|
+
},
|
|
143
|
+
{
|
|
144
|
+
name: "watch_website_list",
|
|
145
|
+
description: "List all watched URLs.",
|
|
146
|
+
input_schema: {
|
|
147
|
+
type: "object",
|
|
148
|
+
properties: {}
|
|
149
|
+
},
|
|
150
|
+
handler: async () => {
|
|
151
|
+
const watches = await loadWatches();
|
|
152
|
+
const urls = Object.keys(watches);
|
|
153
|
+
if (urls.length === 0) return "No watched URLs. Add with watch_website_add.";
|
|
154
|
+
return urls.map((u) => `- ${u} (last: ${watches[u].lastCheck})`).join("\n");
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
];
|
|
158
|
+
}
|
|
159
|
+
var WATCH_FILE;
|
|
160
|
+
var init_website_watch_tools = require_chunk.__esm({ "packages/core/src/agent/website-watch-tools.ts"() {
|
|
161
|
+
WATCH_FILE = path.default.join(os.default.homedir(), ".hyperclaw", "website-watches.json");
|
|
162
|
+
} });
|
|
163
|
+
|
|
164
|
+
//#endregion
|
|
165
|
+
Object.defineProperty(exports, 'getWebsiteWatchTools', {
|
|
166
|
+
enumerable: true,
|
|
167
|
+
get: function () {
|
|
168
|
+
return getWebsiteWatchTools;
|
|
169
|
+
}
|
|
170
|
+
});
|
|
171
|
+
Object.defineProperty(exports, 'init_website_watch_tools', {
|
|
172
|
+
enumerable: true,
|
|
173
|
+
get: function () {
|
|
174
|
+
return init_website_watch_tools;
|
|
175
|
+
}
|
|
176
|
+
});
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
const require_chunk = require('./chunk-jS-bbMI5.js');
|
|
2
|
+
const require_website_watch_tools = require('./website-watch-tools-Cqp7RPvn.js');
|
|
3
|
+
|
|
4
|
+
require_website_watch_tools.init_website_watch_tools();
|
|
5
|
+
exports.getWebsiteWatchTools = require_website_watch_tools.getWebsiteWatchTools;
|