@mcoda/agents 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/CHANGELOG.md +7 -0
  2. package/LICENSE +21 -0
  3. package/README.md +9 -0
  4. package/dist/AgentService/AgentService.d.ts +22 -0
  5. package/dist/AgentService/AgentService.d.ts.map +1 -0
  6. package/dist/AgentService/AgentService.js +291 -0
  7. package/dist/adapters/AdapterTypes.d.ts +29 -0
  8. package/dist/adapters/AdapterTypes.d.ts.map +1 -0
  9. package/dist/adapters/AdapterTypes.js +1 -0
  10. package/dist/adapters/codex/CodexAdapter.d.ts +11 -0
  11. package/dist/adapters/codex/CodexAdapter.d.ts.map +1 -0
  12. package/dist/adapters/codex/CodexAdapter.js +58 -0
  13. package/dist/adapters/codex/CodexCliRunner.d.ts +13 -0
  14. package/dist/adapters/codex/CodexCliRunner.d.ts.map +1 -0
  15. package/dist/adapters/codex/CodexCliRunner.js +143 -0
  16. package/dist/adapters/gemini/GeminiAdapter.d.ts +11 -0
  17. package/dist/adapters/gemini/GeminiAdapter.d.ts.map +1 -0
  18. package/dist/adapters/gemini/GeminiAdapter.js +53 -0
  19. package/dist/adapters/gemini/GeminiCliRunner.d.ts +13 -0
  20. package/dist/adapters/gemini/GeminiCliRunner.d.ts.map +1 -0
  21. package/dist/adapters/gemini/GeminiCliRunner.js +68 -0
  22. package/dist/adapters/local/LocalAdapter.d.ts +11 -0
  23. package/dist/adapters/local/LocalAdapter.d.ts.map +1 -0
  24. package/dist/adapters/local/LocalAdapter.js +38 -0
  25. package/dist/adapters/ollama/OllamaCliAdapter.d.ts +11 -0
  26. package/dist/adapters/ollama/OllamaCliAdapter.d.ts.map +1 -0
  27. package/dist/adapters/ollama/OllamaCliAdapter.js +53 -0
  28. package/dist/adapters/ollama/OllamaCliRunner.d.ts +13 -0
  29. package/dist/adapters/ollama/OllamaCliRunner.d.ts.map +1 -0
  30. package/dist/adapters/ollama/OllamaCliRunner.js +61 -0
  31. package/dist/adapters/ollama/OllamaRemoteAdapter.d.ts +23 -0
  32. package/dist/adapters/ollama/OllamaRemoteAdapter.d.ts.map +1 -0
  33. package/dist/adapters/ollama/OllamaRemoteAdapter.js +199 -0
  34. package/dist/adapters/openai/OpenAiAdapter.d.ts +11 -0
  35. package/dist/adapters/openai/OpenAiAdapter.d.ts.map +1 -0
  36. package/dist/adapters/openai/OpenAiAdapter.js +51 -0
  37. package/dist/adapters/openai/OpenAiCliAdapter.d.ts +11 -0
  38. package/dist/adapters/openai/OpenAiCliAdapter.d.ts.map +1 -0
  39. package/dist/adapters/openai/OpenAiCliAdapter.js +57 -0
  40. package/dist/adapters/qa/QaAdapter.d.ts +11 -0
  41. package/dist/adapters/qa/QaAdapter.d.ts.map +1 -0
  42. package/dist/adapters/qa/QaAdapter.js +37 -0
  43. package/dist/adapters/zhipu/ZhipuApiAdapter.d.ts +30 -0
  44. package/dist/adapters/zhipu/ZhipuApiAdapter.d.ts.map +1 -0
  45. package/dist/adapters/zhipu/ZhipuApiAdapter.js +255 -0
  46. package/dist/index.d.ts +12 -0
  47. package/dist/index.d.ts.map +1 -0
  48. package/dist/index.js +11 -0
  49. package/package.json +41 -0
@@ -0,0 +1,53 @@
1
+ import { geminiHealthy, runGeminiExec, runGeminiExecStream } from "./GeminiCliRunner.js";
2
+ export class GeminiAdapter {
3
+ constructor(config) {
4
+ this.config = config;
5
+ }
6
+ async getCapabilities() {
7
+ return this.config.capabilities;
8
+ }
9
+ async healthCheck() {
10
+ const started = Date.now();
11
+ const result = geminiHealthy();
12
+ return {
13
+ agentId: this.config.agent.id,
14
+ status: result.ok ? "healthy" : "unreachable",
15
+ lastCheckedAt: new Date().toISOString(),
16
+ latencyMs: Date.now() - started,
17
+ details: { adapter: "gemini-cli", ...result.details },
18
+ };
19
+ }
20
+ async invoke(request) {
21
+ const details = geminiHealthy(true).details;
22
+ const result = runGeminiExec(request.input, this.config.model);
23
+ return {
24
+ output: result.output,
25
+ adapter: this.config.adapter ?? "gemini-cli",
26
+ model: this.config.model,
27
+ metadata: {
28
+ mode: "cli",
29
+ capabilities: this.config.capabilities,
30
+ adapterType: this.config.adapter ?? "gemini-cli",
31
+ authMode: "cli",
32
+ cli: details,
33
+ raw: result.raw,
34
+ },
35
+ };
36
+ }
37
+ async *invokeStream(request) {
38
+ const details = geminiHealthy(true).details;
39
+ for await (const chunk of runGeminiExecStream(request.input, this.config.model)) {
40
+ yield {
41
+ output: chunk.output,
42
+ adapter: this.config.adapter ?? "gemini-cli",
43
+ model: this.config.model,
44
+ metadata: {
45
+ mode: "cli",
46
+ streaming: true,
47
+ cli: details,
48
+ raw: chunk.raw,
49
+ },
50
+ };
51
+ }
52
+ }
53
+ }
@@ -0,0 +1,13 @@
1
+ export declare const geminiHealthy: (throwOnError?: boolean) => {
2
+ ok: boolean;
3
+ details?: Record<string, unknown>;
4
+ };
5
+ export declare const runGeminiExec: (prompt: string, model?: string) => {
6
+ output: string;
7
+ raw: string;
8
+ };
9
+ export declare function runGeminiExecStream(prompt: string, model?: string): AsyncGenerator<{
10
+ output: string;
11
+ raw: string;
12
+ }, void, unknown>;
13
+ //# sourceMappingURL=GeminiCliRunner.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"GeminiCliRunner.d.ts","sourceRoot":"","sources":["../../../src/adapters/gemini/GeminiCliRunner.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,aAAa,GAAI,sBAAoB,KAAG;IAAE,EAAE,EAAE,OAAO,CAAC;IAAC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;CAoBpG,CAAC;AAEF,eAAO,MAAM,aAAa,GAAI,QAAQ,MAAM,EAAE,QAAQ,MAAM,KAAG;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,GAAG,EAAE,MAAM,CAAA;CAa3F,CAAC;AAEF,wBAAuB,mBAAmB,CACxC,MAAM,EAAE,MAAM,EACd,KAAK,CAAC,EAAE,MAAM,GACb,cAAc,CAAC;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,GAAG,EAAE,MAAM,CAAA;CAAE,EAAE,IAAI,EAAE,OAAO,CAAC,CAgChE"}
@@ -0,0 +1,68 @@
1
+ import { spawn, spawnSync } from "node:child_process";
2
+ export const geminiHealthy = (throwOnError = false) => {
3
+ if (process.env.MCODA_SKIP_CLI_CHECKS === "1") {
4
+ return { ok: true, details: { skipped: true } };
5
+ }
6
+ const result = spawnSync("gemini", ["--version"], { encoding: "utf8" });
7
+ if (result.error || result.status !== 0) {
8
+ const details = {
9
+ reason: result.error ? "missing_cli" : "cli_error",
10
+ exitCode: result.status,
11
+ stderr: result.stderr?.toString(),
12
+ error: result.error?.message,
13
+ };
14
+ if (throwOnError) {
15
+ const error = new Error(`AUTH_ERROR: gemini CLI unavailable (${details.reason})`);
16
+ error.details = details;
17
+ throw error;
18
+ }
19
+ return { ok: false, details };
20
+ }
21
+ return { ok: true, details: { version: result.stdout?.toString().trim() } };
22
+ };
23
+ export const runGeminiExec = (prompt, model) => {
24
+ geminiHealthy(true);
25
+ const args = ["prompt"];
26
+ if (model)
27
+ args.push("--model", model);
28
+ const result = spawnSync("gemini", args, { input: prompt, encoding: "utf8" });
29
+ if (result.error || result.status !== 0) {
30
+ const error = new Error(`AUTH_ERROR: gemini CLI failed (${result.error?.message ?? `exit ${result.status}`})`);
31
+ error.details = { reason: "cli_error", exitCode: result.status, stderr: result.stderr };
32
+ throw error;
33
+ }
34
+ const stdout = result.stdout?.toString() ?? "";
35
+ const output = stdout.trim();
36
+ return { output, raw: stdout };
37
+ };
38
+ export async function* runGeminiExecStream(prompt, model) {
39
+ geminiHealthy(true);
40
+ const args = ["prompt"];
41
+ if (model)
42
+ args.push("--model", model);
43
+ const child = spawn("gemini", args, { stdio: ["pipe", "pipe", "pipe"] });
44
+ child.stdin.write(prompt);
45
+ child.stdin.end();
46
+ let stderr = "";
47
+ child.stderr?.setEncoding("utf8");
48
+ child.stderr?.on("data", (chunk) => {
49
+ stderr += chunk.toString();
50
+ });
51
+ const closePromise = new Promise((resolve, reject) => {
52
+ child.on("error", (err) => reject(err));
53
+ child.on("close", (code) => resolve(code ?? 0));
54
+ });
55
+ const stream = child.stdout;
56
+ stream?.setEncoding("utf8");
57
+ for await (const chunk of stream ?? []) {
58
+ if (!chunk)
59
+ continue;
60
+ yield { output: chunk, raw: chunk };
61
+ }
62
+ const exitCode = await closePromise;
63
+ if (exitCode !== 0) {
64
+ const error = new Error(`AUTH_ERROR: gemini CLI failed (exit ${exitCode}): ${stderr || "no output"}`);
65
+ error.details = { reason: "cli_error", exitCode, stderr };
66
+ throw error;
67
+ }
68
+ }
@@ -0,0 +1,11 @@
1
+ import { AgentHealth } from "@mcoda/shared";
2
+ import { AdapterConfig, AgentAdapter, InvocationRequest, InvocationResult } from "../AdapterTypes.js";
3
+ export declare class LocalAdapter implements AgentAdapter {
4
+ private config;
5
+ constructor(config: AdapterConfig);
6
+ getCapabilities(): Promise<string[]>;
7
+ healthCheck(): Promise<AgentHealth>;
8
+ invoke(request: InvocationRequest): Promise<InvocationResult>;
9
+ invokeStream(request: InvocationRequest): AsyncGenerator<InvocationResult, void, unknown>;
10
+ }
11
+ //# sourceMappingURL=LocalAdapter.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"LocalAdapter.d.ts","sourceRoot":"","sources":["../../../src/adapters/local/LocalAdapter.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtG,qBAAa,YAAa,YAAW,YAAY;IACnC,OAAO,CAAC,MAAM;gBAAN,MAAM,EAAE,aAAa;IAEnC,eAAe,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;IAIpC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC;IAUnC,MAAM,CAAC,OAAO,EAAE,iBAAiB,GAAG,OAAO,CAAC,gBAAgB,CAAC;IAc5D,YAAY,CAAC,OAAO,EAAE,iBAAiB,GAAG,cAAc,CAAC,gBAAgB,EAAE,IAAI,EAAE,OAAO,CAAC;CAQjG"}
@@ -0,0 +1,38 @@
1
+ export class LocalAdapter {
2
+ constructor(config) {
3
+ this.config = config;
4
+ }
5
+ async getCapabilities() {
6
+ return this.config.capabilities;
7
+ }
8
+ async healthCheck() {
9
+ return {
10
+ agentId: this.config.agent.id,
11
+ status: "healthy",
12
+ lastCheckedAt: new Date().toISOString(),
13
+ latencyMs: 0,
14
+ details: { adapter: "local-model", model: this.config.model },
15
+ };
16
+ }
17
+ async invoke(request) {
18
+ return {
19
+ output: `local-stub:${request.input}`,
20
+ adapter: this.config.adapter ?? "local-model",
21
+ model: this.config.model,
22
+ metadata: {
23
+ mode: "local",
24
+ capabilities: this.config.capabilities,
25
+ adapterType: this.config.adapter ?? "local-model",
26
+ authMode: "local",
27
+ },
28
+ };
29
+ }
30
+ async *invokeStream(request) {
31
+ yield {
32
+ output: `local-stream:${request.input}`,
33
+ adapter: this.config.adapter ?? "local-model",
34
+ model: this.config.model,
35
+ metadata: { mode: "local", streaming: true },
36
+ };
37
+ }
38
+ }
@@ -0,0 +1,11 @@
1
+ import { AgentHealth } from "@mcoda/shared";
2
+ import { AdapterConfig, AgentAdapter, InvocationRequest, InvocationResult } from "../AdapterTypes.js";
3
+ export declare class OllamaCliAdapter implements AgentAdapter {
4
+ private config;
5
+ constructor(config: AdapterConfig);
6
+ getCapabilities(): Promise<string[]>;
7
+ healthCheck(): Promise<AgentHealth>;
8
+ invoke(request: InvocationRequest): Promise<InvocationResult>;
9
+ invokeStream(request: InvocationRequest): AsyncGenerator<InvocationResult, void, unknown>;
10
+ }
11
+ //# sourceMappingURL=OllamaCliAdapter.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"OllamaCliAdapter.d.ts","sourceRoot":"","sources":["../../../src/adapters/ollama/OllamaCliAdapter.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAGtG,qBAAa,gBAAiB,YAAW,YAAY;IACvC,OAAO,CAAC,MAAM;gBAAN,MAAM,EAAE,aAAa;IAEnC,eAAe,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;IAIpC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC;IAanC,MAAM,CAAC,OAAO,EAAE,iBAAiB,GAAG,OAAO,CAAC,gBAAgB,CAAC;IAgB5D,YAAY,CAAC,OAAO,EAAE,iBAAiB,GAAG,cAAc,CAAC,gBAAgB,EAAE,IAAI,EAAE,OAAO,CAAC;CAiBjG"}
@@ -0,0 +1,53 @@
1
+ import { ollamaHealthy, runOllamaExec, runOllamaExecStream } from "./OllamaCliRunner.js";
2
+ export class OllamaCliAdapter {
3
+ constructor(config) {
4
+ this.config = config;
5
+ }
6
+ async getCapabilities() {
7
+ return this.config.capabilities;
8
+ }
9
+ async healthCheck() {
10
+ const started = Date.now();
11
+ const health = ollamaHealthy();
12
+ const status = health.ok ? "healthy" : "unreachable";
13
+ return {
14
+ agentId: this.config.agent.id,
15
+ status,
16
+ lastCheckedAt: new Date().toISOString(),
17
+ latencyMs: Date.now() - started,
18
+ details: { adapter: "ollama-cli", ...(health.details ?? {}) },
19
+ };
20
+ }
21
+ async invoke(request) {
22
+ const result = runOllamaExec(request.input, this.config.model);
23
+ return {
24
+ output: result.output,
25
+ adapter: this.config.adapter ?? "ollama-cli",
26
+ model: this.config.model,
27
+ metadata: {
28
+ mode: "cli",
29
+ capabilities: this.config.capabilities,
30
+ adapterType: this.config.adapter ?? "ollama-cli",
31
+ authMode: "cli",
32
+ raw: result.raw,
33
+ },
34
+ };
35
+ }
36
+ async *invokeStream(request) {
37
+ for await (const chunk of runOllamaExecStream(request.input, this.config.model)) {
38
+ yield {
39
+ output: chunk.output,
40
+ adapter: this.config.adapter ?? "ollama-cli",
41
+ model: this.config.model,
42
+ metadata: {
43
+ mode: "cli",
44
+ capabilities: this.config.capabilities,
45
+ adapterType: this.config.adapter ?? "ollama-cli",
46
+ authMode: "cli",
47
+ raw: chunk.raw,
48
+ streaming: true,
49
+ },
50
+ };
51
+ }
52
+ }
53
+ }
@@ -0,0 +1,13 @@
1
+ export declare const ollamaHealthy: (throwOnError?: boolean) => {
2
+ ok: boolean;
3
+ details?: Record<string, unknown>;
4
+ };
5
+ export declare const runOllamaExec: (prompt: string, model?: string) => {
6
+ output: string;
7
+ raw: string;
8
+ };
9
+ export declare function runOllamaExecStream(prompt: string, model?: string): AsyncGenerator<{
10
+ output: string;
11
+ raw: string;
12
+ }, void, unknown>;
13
+ //# sourceMappingURL=OllamaCliRunner.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"OllamaCliRunner.d.ts","sourceRoot":"","sources":["../../../src/adapters/ollama/OllamaCliRunner.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,aAAa,GAAI,sBAAoB,KAAG;IAAE,EAAE,EAAE,OAAO,CAAC;IAAC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;CAiBpG,CAAC;AAEF,eAAO,MAAM,aAAa,GAAI,QAAQ,MAAM,EAAE,QAAQ,MAAM,KAAG;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,GAAG,EAAE,MAAM,CAAA;CAY3F,CAAC;AAEF,wBAAuB,mBAAmB,CACxC,MAAM,EAAE,MAAM,EACd,KAAK,CAAC,EAAE,MAAM,GACb,cAAc,CAAC;IAAE,MAAM,EAAE,MAAM,CAAC;IAAC,GAAG,EAAE,MAAM,CAAA;CAAE,EAAE,IAAI,EAAE,OAAO,CAAC,CA+BhE"}
@@ -0,0 +1,61 @@
1
+ import { spawn, spawnSync } from "node:child_process";
2
+ export const ollamaHealthy = (throwOnError = false) => {
3
+ const result = spawnSync("ollama", ["--version"], { encoding: "utf8" });
4
+ if (result.error || result.status !== 0) {
5
+ const details = {
6
+ reason: result.error ? "missing_cli" : "cli_error",
7
+ exitCode: result.status,
8
+ stderr: result.stderr?.toString(),
9
+ error: result.error?.message,
10
+ };
11
+ if (throwOnError) {
12
+ const error = new Error(`AUTH_ERROR: ollama CLI unavailable (${details.reason})`);
13
+ error.details = details;
14
+ throw error;
15
+ }
16
+ return { ok: false, details };
17
+ }
18
+ return { ok: true, details: { version: result.stdout?.toString().trim() } };
19
+ };
20
+ export const runOllamaExec = (prompt, model) => {
21
+ ollamaHealthy(true);
22
+ const args = ["run", model ?? "llama3"];
23
+ const result = spawnSync("ollama", args, { input: prompt, encoding: "utf8" });
24
+ if (result.error || result.status !== 0) {
25
+ const error = new Error(`AUTH_ERROR: ollama CLI failed (${result.error?.message ?? `exit ${result.status}`})`);
26
+ error.details = { reason: "cli_error", exitCode: result.status, stderr: result.stderr };
27
+ throw error;
28
+ }
29
+ const stdout = result.stdout?.toString() ?? "";
30
+ const output = stdout.trim();
31
+ return { output, raw: stdout };
32
+ };
33
+ export async function* runOllamaExecStream(prompt, model) {
34
+ ollamaHealthy(true);
35
+ const args = ["run", model ?? "llama3"];
36
+ const child = spawn("ollama", args, { stdio: ["pipe", "pipe", "pipe"] });
37
+ child.stdin.write(prompt);
38
+ child.stdin.end();
39
+ let stderr = "";
40
+ child.stderr?.setEncoding("utf8");
41
+ child.stderr?.on("data", (chunk) => {
42
+ stderr += chunk.toString();
43
+ });
44
+ const closePromise = new Promise((resolve, reject) => {
45
+ child.on("error", (err) => reject(err));
46
+ child.on("close", (code) => resolve(code ?? 0));
47
+ });
48
+ const stream = child.stdout;
49
+ stream?.setEncoding("utf8");
50
+ for await (const chunk of stream ?? []) {
51
+ if (!chunk)
52
+ continue;
53
+ yield { output: chunk, raw: chunk };
54
+ }
55
+ const exitCode = await closePromise;
56
+ if (exitCode !== 0) {
57
+ const error = new Error(`AUTH_ERROR: ollama CLI failed (exit ${exitCode}): ${stderr || "no output"}`);
58
+ error.details = { reason: "cli_error", exitCode, stderr };
59
+ throw error;
60
+ }
61
+ }
@@ -0,0 +1,23 @@
1
+ import { AgentHealth } from "@mcoda/shared";
2
+ import { AdapterConfig, AgentAdapter, InvocationRequest, InvocationResult } from "../AdapterTypes.js";
3
+ export declare class OllamaRemoteAdapter implements AgentAdapter {
4
+ private config;
5
+ private baseUrl;
6
+ private headers;
7
+ private verifyTls;
8
+ private tlsAgent;
9
+ constructor(config: AdapterConfig & {
10
+ baseUrl?: string;
11
+ headers?: Record<string, string>;
12
+ verifyTls?: boolean;
13
+ });
14
+ private assertConfig;
15
+ getCapabilities(): Promise<string[]>;
16
+ healthCheck(): Promise<AgentHealth>;
17
+ private ensureBaseUrl;
18
+ private ensureModel;
19
+ private extractMetrics;
20
+ invoke(request: InvocationRequest): Promise<InvocationResult>;
21
+ invokeStream(request: InvocationRequest): AsyncGenerator<InvocationResult, void, unknown>;
22
+ }
23
+ //# sourceMappingURL=OllamaRemoteAdapter.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"OllamaRemoteAdapter.d.ts","sourceRoot":"","sources":["../../../src/adapters/ollama/OllamaRemoteAdapter.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAE5C,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAStG,qBAAa,mBAAoB,YAAW,YAAY;IAM1C,OAAO,CAAC,MAAM;IAL1B,OAAO,CAAC,OAAO,CAAqB;IACpC,OAAO,CAAC,OAAO,CAAqC;IACpD,OAAO,CAAC,SAAS,CAAsB;IACvC,OAAO,CAAC,QAAQ,CAAyB;gBAErB,MAAM,EAAE,aAAa,GAAG;QAAE,OAAO,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAAC,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE;IAWvH,OAAO,CAAC,YAAY;IASd,eAAe,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;IAIpC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC;IA+BzC,OAAO,CAAC,aAAa;IAIrB,OAAO,CAAC,WAAW;IAQnB,OAAO,CAAC,cAAc;IAShB,MAAM,CAAC,OAAO,EAAE,iBAAiB,GAAG,OAAO,CAAC,gBAAgB,CAAC;IAmC5D,YAAY,CAAC,OAAO,EAAE,iBAAiB,GAAG,cAAc,CAAC,gBAAgB,EAAE,IAAI,EAAE,OAAO,CAAC;CA4EjG"}
@@ -0,0 +1,199 @@
1
+ import { Agent as HttpsAgent } from "node:https";
2
+ const normalizeBaseUrl = (value) => {
3
+ if (!value)
4
+ return undefined;
5
+ const str = String(value).trim();
6
+ if (!str)
7
+ return undefined;
8
+ return str.endsWith("/") ? str.slice(0, -1) : str;
9
+ };
10
+ export class OllamaRemoteAdapter {
11
+ constructor(config) {
12
+ this.config = config;
13
+ this.baseUrl = normalizeBaseUrl(config.baseUrl);
14
+ const headers = config.headers;
15
+ this.headers = headers && typeof headers === "object" ? headers : undefined;
16
+ this.verifyTls = typeof config.verifyTls === "boolean" ? Boolean(config.verifyTls) : undefined;
17
+ if (this.verifyTls === false) {
18
+ this.tlsAgent = new HttpsAgent({ rejectUnauthorized: false });
19
+ }
20
+ this.assertConfig();
21
+ }
22
+ assertConfig() {
23
+ if (!this.baseUrl) {
24
+ throw new Error("Ollama baseUrl is not configured; set config.baseUrl to http://host:11434");
25
+ }
26
+ if (!/^https?:\/\//i.test(this.baseUrl)) {
27
+ throw new Error("Ollama baseUrl must start with http:// or https://");
28
+ }
29
+ }
30
+ async getCapabilities() {
31
+ return this.config.capabilities;
32
+ }
33
+ async healthCheck() {
34
+ const url = this.baseUrl;
35
+ if (!url) {
36
+ return {
37
+ agentId: this.config.agent.id,
38
+ status: "unreachable",
39
+ lastCheckedAt: new Date().toISOString(),
40
+ details: { reason: "missing_base_url" },
41
+ };
42
+ }
43
+ const started = Date.now();
44
+ try {
45
+ const resp = await fetch(`${url}/api/tags`);
46
+ const healthy = resp.ok;
47
+ return {
48
+ agentId: this.config.agent.id,
49
+ status: healthy ? "healthy" : "unreachable",
50
+ lastCheckedAt: new Date().toISOString(),
51
+ latencyMs: Date.now() - started,
52
+ details: { adapter: "ollama-remote", baseUrl: url, status: resp.status },
53
+ };
54
+ }
55
+ catch (error) {
56
+ return {
57
+ agentId: this.config.agent.id,
58
+ status: "unreachable",
59
+ lastCheckedAt: new Date().toISOString(),
60
+ details: { reason: "connection_error", error: error.message, baseUrl: url },
61
+ };
62
+ }
63
+ }
64
+ ensureBaseUrl() {
65
+ return this.baseUrl;
66
+ }
67
+ ensureModel() {
68
+ const model = this.config.model;
69
+ if (!model) {
70
+ throw new Error("Ollama model is not configured for this agent");
71
+ }
72
+ return model;
73
+ }
74
+ extractMetrics(data) {
75
+ const metrics = {};
76
+ if (typeof data?.prompt_eval_count === "number")
77
+ metrics.promptEvalCount = data.prompt_eval_count;
78
+ if (typeof data?.eval_count === "number")
79
+ metrics.evalCount = data.eval_count;
80
+ if (typeof data?.total_duration === "number")
81
+ metrics.totalDurationNs = data.total_duration;
82
+ if (Object.keys(metrics).length === 0)
83
+ return undefined;
84
+ return metrics;
85
+ }
86
+ async invoke(request) {
87
+ const url = this.ensureBaseUrl();
88
+ const model = this.ensureModel();
89
+ const init = {
90
+ method: "POST",
91
+ headers: { "Content-Type": "application/json", ...(this.headers ?? {}) },
92
+ body: JSON.stringify({ model, prompt: request.input, stream: false }),
93
+ };
94
+ if (this.tlsAgent)
95
+ init.agent = this.tlsAgent;
96
+ const resp = await fetch(`${url}/api/generate`, init);
97
+ if (!resp.ok) {
98
+ const text = await resp.text().catch(() => "");
99
+ throw new Error(`Ollama generate failed (${resp.status}): ${text}`);
100
+ }
101
+ const data = await resp.json().catch(() => ({}));
102
+ const metrics = this.extractMetrics(data);
103
+ const output = typeof data?.response === "string"
104
+ ? data.response
105
+ : typeof data?.message === "string"
106
+ ? data.message
107
+ : JSON.stringify(data);
108
+ return {
109
+ output: output.trim(),
110
+ adapter: this.config.adapter ?? "ollama-remote",
111
+ model,
112
+ metadata: {
113
+ adapterType: this.config.adapter ?? "ollama-remote",
114
+ baseUrl: url,
115
+ capabilities: this.config.capabilities,
116
+ metrics,
117
+ },
118
+ };
119
+ }
120
+ async *invokeStream(request) {
121
+ const url = this.ensureBaseUrl();
122
+ const model = this.ensureModel();
123
+ const init = {
124
+ method: "POST",
125
+ headers: { "Content-Type": "application/json", ...(this.headers ?? {}) },
126
+ body: JSON.stringify({ model, prompt: request.input, stream: true }),
127
+ };
128
+ if (this.tlsAgent)
129
+ init.agent = this.tlsAgent;
130
+ const resp = await fetch(`${url}/api/generate`, init);
131
+ if (!resp.ok || !resp.body) {
132
+ const text = !resp.ok ? await resp.text().catch(() => "") : "";
133
+ throw new Error(`Ollama generate (stream) failed (${resp.status}): ${text}`);
134
+ }
135
+ const reader = resp.body.getReader();
136
+ const decoder = new TextDecoder();
137
+ let buffer = "";
138
+ while (true) {
139
+ const { value, done } = await reader.read();
140
+ if (done)
141
+ break;
142
+ buffer += decoder.decode(value, { stream: true });
143
+ while (true) {
144
+ const idx = buffer.indexOf("\n");
145
+ if (idx === -1)
146
+ break;
147
+ const line = buffer.slice(0, idx).trim();
148
+ buffer = buffer.slice(idx + 1);
149
+ if (!line)
150
+ continue;
151
+ try {
152
+ const data = JSON.parse(line);
153
+ const chunk = typeof data?.response === "string"
154
+ ? data.response
155
+ : typeof data?.message === "string"
156
+ ? data.message
157
+ : "";
158
+ const metrics = this.extractMetrics(data);
159
+ if (chunk) {
160
+ yield {
161
+ output: chunk,
162
+ adapter: this.config.adapter ?? "ollama-remote",
163
+ model,
164
+ metadata: {
165
+ adapterType: this.config.adapter ?? "ollama-remote",
166
+ baseUrl: url,
167
+ capabilities: this.config.capabilities,
168
+ streaming: true,
169
+ metrics,
170
+ raw: line,
171
+ },
172
+ };
173
+ }
174
+ if (data?.done) {
175
+ return;
176
+ }
177
+ }
178
+ catch {
179
+ // Ignore malformed lines; keep streaming.
180
+ }
181
+ }
182
+ }
183
+ const tail = buffer.trim();
184
+ if (tail) {
185
+ yield {
186
+ output: tail,
187
+ adapter: this.config.adapter ?? "ollama-remote",
188
+ model,
189
+ metadata: {
190
+ adapterType: this.config.adapter ?? "ollama-remote",
191
+ baseUrl: url,
192
+ capabilities: this.config.capabilities,
193
+ streaming: true,
194
+ raw: tail,
195
+ },
196
+ };
197
+ }
198
+ }
199
+ }
@@ -0,0 +1,11 @@
1
+ import { AgentHealth } from "@mcoda/shared";
2
+ import { AdapterConfig, AgentAdapter, InvocationRequest, InvocationResult } from "../AdapterTypes.js";
3
+ export declare class OpenAiAdapter implements AgentAdapter {
4
+ private config;
5
+ constructor(config: AdapterConfig);
6
+ getCapabilities(): Promise<string[]>;
7
+ healthCheck(): Promise<AgentHealth>;
8
+ invoke(request: InvocationRequest): Promise<InvocationResult>;
9
+ invokeStream(request: InvocationRequest): AsyncGenerator<InvocationResult, void, unknown>;
10
+ }
11
+ //# sourceMappingURL=OpenAiAdapter.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"OpenAiAdapter.d.ts","sourceRoot":"","sources":["../../../src/adapters/openai/OpenAiAdapter.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtG,qBAAa,aAAc,YAAW,YAAY;IACpC,OAAO,CAAC,MAAM;gBAAN,MAAM,EAAE,aAAa;IAEnC,eAAe,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;IAIpC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC;IAkBnC,MAAM,CAAC,OAAO,EAAE,iBAAiB,GAAG,OAAO,CAAC,gBAAgB,CAAC;IAgB5D,YAAY,CAAC,OAAO,EAAE,iBAAiB,GAAG,cAAc,CAAC,gBAAgB,EAAE,IAAI,EAAE,OAAO,CAAC;CAWjG"}
@@ -0,0 +1,51 @@
1
+ export class OpenAiAdapter {
2
+ constructor(config) {
3
+ this.config = config;
4
+ }
5
+ async getCapabilities() {
6
+ return this.config.capabilities;
7
+ }
8
+ async healthCheck() {
9
+ if (!this.config.apiKey) {
10
+ return {
11
+ agentId: this.config.agent.id,
12
+ status: "unreachable",
13
+ lastCheckedAt: new Date().toISOString(),
14
+ details: { reason: "missing_api_key" },
15
+ };
16
+ }
17
+ return {
18
+ agentId: this.config.agent.id,
19
+ status: "healthy",
20
+ lastCheckedAt: new Date().toISOString(),
21
+ latencyMs: 0,
22
+ details: { adapter: "openai-api", model: this.config.model },
23
+ };
24
+ }
25
+ async invoke(request) {
26
+ const authMode = this.config.apiKey ? "api" : "none";
27
+ return {
28
+ output: `openai-stub:${request.input}`,
29
+ adapter: this.config.adapter ?? "openai-api",
30
+ model: this.config.model,
31
+ metadata: {
32
+ mode: authMode,
33
+ capabilities: this.config.capabilities,
34
+ prompts: this.config.prompts,
35
+ authMode,
36
+ adapterType: this.config.adapter ?? "openai-api",
37
+ },
38
+ };
39
+ }
40
+ async *invokeStream(request) {
41
+ yield {
42
+ output: `openai-stream:${request.input}`,
43
+ adapter: this.config.adapter ?? "openai-api",
44
+ model: this.config.model,
45
+ metadata: {
46
+ mode: this.config.apiKey ? "api" : "none",
47
+ streaming: true,
48
+ },
49
+ };
50
+ }
51
+ }
@@ -0,0 +1,11 @@
1
+ import { AgentHealth } from "@mcoda/shared";
2
+ import { AdapterConfig, AgentAdapter, InvocationRequest, InvocationResult } from "../AdapterTypes.js";
3
+ export declare class OpenAiCliAdapter implements AgentAdapter {
4
+ private config;
5
+ constructor(config: AdapterConfig);
6
+ getCapabilities(): Promise<string[]>;
7
+ healthCheck(): Promise<AgentHealth>;
8
+ invoke(request: InvocationRequest): Promise<InvocationResult>;
9
+ invokeStream(request: InvocationRequest): AsyncGenerator<InvocationResult, void, unknown>;
10
+ }
11
+ //# sourceMappingURL=OpenAiCliAdapter.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"OpenAiCliAdapter.d.ts","sourceRoot":"","sources":["../../../src/adapters/openai/OpenAiCliAdapter.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAGtG,qBAAa,gBAAiB,YAAW,YAAY;IACvC,OAAO,CAAC,MAAM;gBAAN,MAAM,EAAE,aAAa;IAEnC,eAAe,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;IAIpC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC;IAYnC,MAAM,CAAC,OAAO,EAAE,iBAAiB,GAAG,OAAO,CAAC,gBAAgB,CAAC;IAkB5D,YAAY,CAAC,OAAO,EAAE,iBAAiB,GAAG,cAAc,CAAC,gBAAgB,EAAE,IAAI,EAAE,OAAO,CAAC;CAoBjG"}