@saccolabs/tars 1.20.2 → 1.22.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dash/server.js +28 -10
- package/dist/channels/discord/discord-channel.d.ts +1 -0
- package/dist/channels/discord/discord-channel.js +9 -0
- package/dist/channels/discord/discord-channel.js.map +1 -1
- package/dist/cli/commands/setup.js +85 -5
- package/dist/cli/commands/setup.js.map +1 -1
- package/dist/inference/LlamaCppGenerator.js +23 -4
- package/dist/inference/LlamaCppGenerator.js.map +1 -1
- package/dist/supervisor/gemini-engine.d.ts +7 -1
- package/dist/supervisor/gemini-engine.js +123 -13
- package/dist/supervisor/gemini-engine.js.map +1 -1
- package/dist/supervisor/main.js +5 -0
- package/dist/supervisor/main.js.map +1 -1
- package/dist/supervisor/supervisor.js +9 -1
- package/dist/supervisor/supervisor.js.map +1 -1
- package/dist/tools/get-quota.d.ts +9 -1
- package/dist/tools/get-quota.js +56 -5
- package/dist/tools/get-quota.js.map +1 -1
- package/dist/utils/endpoint-probe.d.ts +25 -0
- package/dist/utils/endpoint-probe.js +80 -0
- package/dist/utils/endpoint-probe.js.map +1 -0
- package/package.json +1 -1
- package/src/prompts/system.md +4 -0
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Result of probing a local inference endpoint.
|
|
3
|
+
*/
|
|
4
|
+
export interface EndpointProbeResult {
|
|
5
|
+
reachable: boolean;
|
|
6
|
+
models: string[];
|
|
7
|
+
error?: string;
|
|
8
|
+
}
|
|
9
|
+
/**
|
|
10
|
+
* Model info returned from the /v1/models endpoint.
|
|
11
|
+
*/
|
|
12
|
+
export interface LocalModelInfo {
|
|
13
|
+
id: string;
|
|
14
|
+
object?: string;
|
|
15
|
+
owned_by?: string;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Probes a local inference endpoint by checking /v1/models and optionally
|
|
19
|
+
* the /v1/chat/completions health endpoint.
|
|
20
|
+
*
|
|
21
|
+
* @param baseUrl - The base URL of the OpenAI-compatible endpoint
|
|
22
|
+
* @param timeoutMs - Timeout in milliseconds (default: 5000)
|
|
23
|
+
* @returns Probe result with reachability and available models
|
|
24
|
+
*/
|
|
25
|
+
export declare function probeEndpoint(baseUrl: string, timeoutMs?: number): Promise<EndpointProbeResult>;
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Probes a local inference endpoint by checking /v1/models and optionally
|
|
3
|
+
* the /v1/chat/completions health endpoint.
|
|
4
|
+
*
|
|
5
|
+
* @param baseUrl - The base URL of the OpenAI-compatible endpoint
|
|
6
|
+
* @param timeoutMs - Timeout in milliseconds (default: 5000)
|
|
7
|
+
* @returns Probe result with reachability and available models
|
|
8
|
+
*/
|
|
9
|
+
export async function probeEndpoint(baseUrl, timeoutMs = 5000) {
|
|
10
|
+
const normalizedUrl = baseUrl.replace(/\/+$/, '');
|
|
11
|
+
// Try /v1/models first
|
|
12
|
+
try {
|
|
13
|
+
const modelsUrl = normalizedUrl.endsWith('/v1')
|
|
14
|
+
? `${normalizedUrl}/models`
|
|
15
|
+
: `${normalizedUrl}/v1/models`;
|
|
16
|
+
const controller = new AbortController();
|
|
17
|
+
const timeout = setTimeout(() => controller.abort(), timeoutMs);
|
|
18
|
+
try {
|
|
19
|
+
const response = await fetch(modelsUrl, {
|
|
20
|
+
method: 'GET',
|
|
21
|
+
signal: controller.signal
|
|
22
|
+
});
|
|
23
|
+
clearTimeout(timeout);
|
|
24
|
+
if (response.ok) {
|
|
25
|
+
const data = (await response.json());
|
|
26
|
+
const models = [];
|
|
27
|
+
if (data.data && Array.isArray(data.data)) {
|
|
28
|
+
for (const model of data.data) {
|
|
29
|
+
if (model.id) {
|
|
30
|
+
models.push(model.id);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
return { reachable: true, models };
|
|
35
|
+
}
|
|
36
|
+
// Server reachable but /v1/models not supported — still valid
|
|
37
|
+
return { reachable: true, models: [] };
|
|
38
|
+
}
|
|
39
|
+
finally {
|
|
40
|
+
clearTimeout(timeout);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
catch (error) {
|
|
44
|
+
// If /v1/models failed, try a simpler connectivity check
|
|
45
|
+
try {
|
|
46
|
+
const healthUrl = normalizedUrl.endsWith('/v1')
|
|
47
|
+
? normalizedUrl.replace(/\/v1$/, '/health')
|
|
48
|
+
: `${normalizedUrl}/health`;
|
|
49
|
+
const controller = new AbortController();
|
|
50
|
+
const timeout = setTimeout(() => controller.abort(), timeoutMs);
|
|
51
|
+
try {
|
|
52
|
+
const response = await fetch(healthUrl, {
|
|
53
|
+
method: 'GET',
|
|
54
|
+
signal: controller.signal
|
|
55
|
+
});
|
|
56
|
+
clearTimeout(timeout);
|
|
57
|
+
if (response.ok) {
|
|
58
|
+
return { reachable: true, models: [] };
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
finally {
|
|
62
|
+
clearTimeout(timeout);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
catch {
|
|
66
|
+
// Both /v1/models and /health failed
|
|
67
|
+
}
|
|
68
|
+
const errorMessage = error.cause?.code === 'ECONNREFUSED'
|
|
69
|
+
? `Connection refused at ${normalizedUrl}`
|
|
70
|
+
: error.name === 'AbortError'
|
|
71
|
+
? `Connection timed out after ${timeoutMs}ms`
|
|
72
|
+
: error.message || 'Unknown error';
|
|
73
|
+
return {
|
|
74
|
+
reachable: false,
|
|
75
|
+
models: [],
|
|
76
|
+
error: errorMessage
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
//# sourceMappingURL=endpoint-probe.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"endpoint-probe.js","sourceRoot":"","sources":["../../src/utils/endpoint-probe.ts"],"names":[],"mappings":"AAoBA;;;;;;;GAOG;AACH,MAAM,CAAC,KAAK,UAAU,aAAa,CAC/B,OAAe,EACf,YAAoB,IAAI;IAExB,MAAM,aAAa,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IAElD,uBAAuB;IACvB,IAAI,CAAC;QACD,MAAM,SAAS,GAAG,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC;YAC3C,CAAC,CAAC,GAAG,aAAa,SAAS;YAC3B,CAAC,CAAC,GAAG,aAAa,YAAY,CAAC;QAEnC,MAAM,UAAU,GAAG,IAAI,eAAe,EAAE,CAAC;QACzC,MAAM,OAAO,GAAG,UAAU,CAAC,GAAG,EAAE,CAAC,UAAU,CAAC,KAAK,EAAE,EAAE,SAAS,CAAC,CAAC;QAEhE,IAAI,CAAC;YACD,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,SAAS,EAAE;gBACpC,MAAM,EAAE,KAAK;gBACb,MAAM,EAAE,UAAU,CAAC,MAAM;aAC5B,CAAC,CAAC;YAEH,YAAY,CAAC,OAAO,CAAC,CAAC;YAEtB,IAAI,QAAQ,CAAC,EAAE,EAAE,CAAC;gBACd,MAAM,IAAI,GAAG,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAQ,CAAC;gBAC5C,MAAM,MAAM,GAAa,EAAE,CAAC;gBAE5B,IAAI,IAAI,CAAC,IAAI,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;oBACxC,KAAK,MAAM,KAAK,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC;wBAC5B,IAAI,KAAK,CAAC,EAAE,EAAE,CAAC;4BACX,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;wBAC1B,CAAC;oBACL,CAAC;gBACL,CAAC;gBAED,OAAO,EAAE,SAAS,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC;YACvC,CAAC;YAED,8DAA8D;YAC9D,OAAO,EAAE,SAAS,EAAE,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE,CAAC;QAC3C,CAAC;gBAAS,CAAC;YACP,YAAY,CAAC,OAAO,CAAC,CAAC;QAC1B,CAAC;IACL,CAAC;IAAC,OAAO,KAAU,EAAE,CAAC;QAClB,yDAAyD;QACzD,IAAI,CAAC;YACD,MAAM,SAAS,GAAG,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC;gBAC3C,CAAC,CAAC,aAAa,CAAC,OAAO,CAAC,OAAO,EAAE,SAAS,CAAC;gBAC3C,CAAC,CAAC,GAAG,aAAa,SAAS,CAAC;YAEhC,MAAM,UAAU,GAAG,IAAI,eAAe,EAAE,CAAC;YACzC,MAAM,OAAO,GAAG,UAAU,CAAC,GAAG,EAAE,CAAC,UAAU,CAAC,KAAK,EAAE,EAAE,SAAS,CAAC,CAAC;YAEhE,IAAI,CAAC;gBACD,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,SAAS,EAAE;oBACpC,MAAM,EAAE,KAAK;oBACb,MAAM,EAAE,UAAU,CAAC,MAAM;iBAC5B,CAAC,CAAC;gBAEH,YAAY,CAAC,OAAO,CAAC,CAAC;gBAEtB,IAAI,QAAQ,CAAC,EAAE,EAAE,CAAC;oBACd,OAAO,EAAE,SAAS,EAAE,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE,CAAC;gBAC3C,CAAC;YACL,CAAC;oBAAS,CAAC;gBACP,YAAY,CAAC,OAAO,CAAC,CAAC;YAC1B,CAAC;QACL,CAAC;QAAC,MAAM,CAAC;YACL,qCAAqC;QACzC,CAAC;QAED,MAAM,YAAY,GACd,KAAK,CAAC,KAAK,EAAE,IAAI,KAAK,cAAc;YAChC,CAAC,CAAC,yBAAyB,aAAa,EAAE;YAC1C,CAAC,CAAC,KAAK,CAAC,IAAI,KAAK,YAAY;gBAC3B,CAAC,CAAC,8BAA8B,SAAS,IAAI;gBAC7C,CAAC,CAAC,KAAK,CAAC,OAAO,IAAI,eAAe,CAAC;QAE7C,OAAO;YACH,SAAS,EAAE,KAAK;YAChB,MAAM,EAAE,EAAE;YACV,KAAK,EAAE,YAAY;SACtB,CAAC;IACN,CAAC;AACL,CAAC"}
|
package/package.json
CHANGED
package/src/prompts/system.md
CHANGED
|
@@ -3,6 +3,10 @@
|
|
|
3
3
|
- **Assistant Name**: {{ASSISTANT_NAME}}
|
|
4
4
|
- **Instance ID**: {{INSTANCE_NAME}}
|
|
5
5
|
- **Designated Role**: {{INSTANCE_ROLE}}
|
|
6
|
+
- **Inference Backend**: {{INFERENCE_BACKEND}}
|
|
7
|
+
- **Model**: {{MODEL_NAME}}
|
|
8
|
+
- **Context Window**: {{CONTEXT_WINDOW}} tokens
|
|
9
|
+
- **Inference Endpoint**: {{INFERENCE_ENDPOINT}}
|
|
6
10
|
|
|
7
11
|
You are **{{ASSISTANT_NAME}}**, a personal AI assistant. You are autonomous, proactive, and capable of self-improvement. You serve one user as a trusted generalist across all domains.
|
|
8
12
|
|