@use-lattice/litmus 0.121.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +19 -0
- package/dist/src/accounts-Bt1oJb1Z.cjs +219 -0
- package/dist/src/accounts-DjOU8Rm3.js +178 -0
- package/dist/src/agentic-utils-D03IiXQc.js +153 -0
- package/dist/src/agentic-utils-Dh7xaMQM.cjs +180 -0
- package/dist/src/agents-C6BIMlZa.js +231 -0
- package/dist/src/agents-DvIpNX1L.cjs +666 -0
- package/dist/src/agents-ZP0RP9vV.cjs +231 -0
- package/dist/src/agents-maJXdjbR.js +665 -0
- package/dist/src/aimlapi-BTbQjG2E.cjs +30 -0
- package/dist/src/aimlapi-CwMxqfXP.js +30 -0
- package/dist/src/audio-BBUdvsde.cjs +97 -0
- package/dist/src/audio-D5DPZ7I-.js +97 -0
- package/dist/src/base-BEysXrkq.cjs +222 -0
- package/dist/src/base-C451JQfq.js +193 -0
- package/dist/src/blobs-BY8MDmpo.js +230 -0
- package/dist/src/blobs-BgcNn97m.cjs +256 -0
- package/dist/src/cache-BBE_lsTA.cjs +4 -0
- package/dist/src/cache-BkrqU5Ba.js +237 -0
- package/dist/src/cache-DsCxFlsZ.cjs +297 -0
- package/dist/src/chat-CPJWDP6a.cjs +289 -0
- package/dist/src/chat-CXX3xzkk.cjs +811 -0
- package/dist/src/chat-CcDgZFJ4.js +787 -0
- package/dist/src/chat-Dz5ZeGO2.js +289 -0
- package/dist/src/chatkit-Dw0mKkML.cjs +1158 -0
- package/dist/src/chatkit-swAIVuea.js +1157 -0
- package/dist/src/chunk-DEq-mXcV.js +15 -0
- package/dist/src/claude-agent-sdk-BXZJtOg6.js +379 -0
- package/dist/src/claude-agent-sdk-CkfyjDoG.cjs +383 -0
- package/dist/src/cloudflare-ai-BzpJcqUH.js +161 -0
- package/dist/src/cloudflare-ai-Cmy_R1y2.cjs +161 -0
- package/dist/src/cloudflare-gateway-B9tVQKok.cjs +272 -0
- package/dist/src/cloudflare-gateway-DrD3ew3H.js +272 -0
- package/dist/src/codex-sdk-Dezj9Nwm.js +1056 -0
- package/dist/src/codex-sdk-Dl9D4k5B.cjs +1060 -0
- package/dist/src/cometapi-C-9YvCHC.js +54 -0
- package/dist/src/cometapi-DHgDKoO2.cjs +54 -0
- package/dist/src/completion-B8Ctyxpr.js +120 -0
- package/dist/src/completion-Cxrt08sj.cjs +131 -0
- package/dist/src/createHash-BwgE13yv.cjs +27 -0
- package/dist/src/createHash-DmPQkvBh.js +15 -0
- package/dist/src/docker-BiqcTwLv.js +80 -0
- package/dist/src/docker-C7tEJnP-.cjs +80 -0
- package/dist/src/esm-C62Zofr1.cjs +409 -0
- package/dist/src/esm-DMVc93eh.js +379 -0
- package/dist/src/evalResult-C3NJPQOo.cjs +301 -0
- package/dist/src/evalResult-C7JJAPBb.js +295 -0
- package/dist/src/evalResult-DoVTZZWI.cjs +2 -0
- package/dist/src/extractor-DnMD3fwt.cjs +391 -0
- package/dist/src/extractor-DtlL28vL.js +374 -0
- package/dist/src/fetch-BTxakTSg.cjs +1133 -0
- package/dist/src/fetch-DQckpUFz.js +928 -0
- package/dist/src/fileExtensions-DnqA1y9x.js +85 -0
- package/dist/src/fileExtensions-bYh77CN8.cjs +114 -0
- package/dist/src/genaiTracer-CyZrmaK0.cjs +268 -0
- package/dist/src/genaiTracer-D3fD9dNV.js +256 -0
- package/dist/src/graders-BNscxFrU.js +13644 -0
- package/dist/src/graders-D2oE9Msq.js +2 -0
- package/dist/src/graders-c0Ez_w-9.cjs +2 -0
- package/dist/src/graders-d0F2M3e9.cjs +14056 -0
- package/dist/src/image-0ZhE0VlR.cjs +280 -0
- package/dist/src/image-CWE1pdNv.js +257 -0
- package/dist/src/image-D9ZK6hwL.js +163 -0
- package/dist/src/image-DKZgZITg.cjs +163 -0
- package/dist/src/index.cjs +11366 -0
- package/dist/src/index.d.cts +19640 -0
- package/dist/src/index.d.ts +19641 -0
- package/dist/src/index.js +11306 -0
- package/dist/src/invariant-Ddh24eXh.js +25 -0
- package/dist/src/invariant-kfQ8Bu82.cjs +30 -0
- package/dist/src/knowledgeBase-BgPyGFUd.cjs +122 -0
- package/dist/src/knowledgeBase-DyHilYaP.js +122 -0
- package/dist/src/litellm-CyMeneHS.js +135 -0
- package/dist/src/litellm-DWDF73yF.cjs +135 -0
- package/dist/src/logger-C40ZGil9.js +717 -0
- package/dist/src/logger-DyfK9PBt.cjs +917 -0
- package/dist/src/luma-ray-BAU9X_ep.cjs +315 -0
- package/dist/src/luma-ray-nwVseBbv.js +313 -0
- package/dist/src/messages-B5ADWTTv.js +245 -0
- package/dist/src/messages-BCnZfqrS.cjs +257 -0
- package/dist/src/meteor-DLZZ3osF.cjs +134 -0
- package/dist/src/meteor-DUiCJRC-.js +134 -0
- package/dist/src/modelslab-00cveB8L.cjs +163 -0
- package/dist/src/modelslab-D9sCU_L7.js +163 -0
- package/dist/src/nova-reel-CTapvqYH.js +276 -0
- package/dist/src/nova-reel-DlWuuroF.cjs +278 -0
- package/dist/src/nova-sonic-5UPWfeMv.cjs +363 -0
- package/dist/src/nova-sonic-BhSwQNym.js +363 -0
- package/dist/src/openai-BWrJK9d8.cjs +52 -0
- package/dist/src/openai-DumO8WQn.js +47 -0
- package/dist/src/openclaw-B8brrjC_.cjs +577 -0
- package/dist/src/openclaw-Bkayww9q.js +571 -0
- package/dist/src/opencode-sdk-7xjoDNiM.cjs +562 -0
- package/dist/src/opencode-sdk-SGwAPxht.js +558 -0
- package/dist/src/otlpReceiver-CoAHfAN9.cjs +15 -0
- package/dist/src/otlpReceiver-oO3EQwI9.js +14 -0
- package/dist/src/providerRegistry-4yjhaEM8.js +45 -0
- package/dist/src/providerRegistry-DhV4rJIc.cjs +50 -0
- package/dist/src/providers-B5RJVG-7.cjs +33609 -0
- package/dist/src/providers-BdmZCLzV.js +33262 -0
- package/dist/src/providers-CxtRxn8e.js +2 -0
- package/dist/src/providers-DnQLNbx1.cjs +3 -0
- package/dist/src/pythonUtils-BD0druiM.cjs +275 -0
- package/dist/src/pythonUtils-IBhn5YGR.js +249 -0
- package/dist/src/quiverai-BDOwZBsM.cjs +213 -0
- package/dist/src/quiverai-D3JTF5lD.js +213 -0
- package/dist/src/responses-B2LCDCXZ.js +667 -0
- package/dist/src/responses-BvNm4Xv9.cjs +685 -0
- package/dist/src/rubyUtils-B0NwnfpY.cjs +245 -0
- package/dist/src/rubyUtils-BroxzZ7c.cjs +2 -0
- package/dist/src/rubyUtils-hqVw5UvJ.js +222 -0
- package/dist/src/sagemaker-Cno2V-Sx.js +689 -0
- package/dist/src/sagemaker-fV_KUgs5.cjs +691 -0
- package/dist/src/server-BOuAXb06.cjs +238 -0
- package/dist/src/server-CtI-EWzm.cjs +2 -0
- package/dist/src/server-Cy3DZymt.js +189 -0
- package/dist/src/slack-CP8xBePa.js +135 -0
- package/dist/src/slack-DSQ1yXVb.cjs +135 -0
- package/dist/src/store-BwDDaBjb.cjs +246 -0
- package/dist/src/store-DcbLC593.cjs +2 -0
- package/dist/src/store-IGpqMIkv.js +240 -0
- package/dist/src/tables-3Q2cL7So.cjs +373 -0
- package/dist/src/tables-Bi2fjr4W.js +288 -0
- package/dist/src/telemetry-Bg2WqF79.js +161 -0
- package/dist/src/telemetry-D0x6u5kX.cjs +166 -0
- package/dist/src/telemetry-DXNimrI0.cjs +2 -0
- package/dist/src/text-B_UCRPp2.js +22 -0
- package/dist/src/text-CW1cyrwj.cjs +33 -0
- package/dist/src/tokenUsageUtils-NYT-WKS6.js +138 -0
- package/dist/src/tokenUsageUtils-bVa1ga6f.cjs +173 -0
- package/dist/src/transcription-Cl_W16Pr.js +122 -0
- package/dist/src/transcription-yt1EecY8.cjs +124 -0
- package/dist/src/transform-BCtGrl_W.cjs +228 -0
- package/dist/src/transform-Bv6gG2MJ.cjs +1688 -0
- package/dist/src/transform-CY1wbpRy.js +1507 -0
- package/dist/src/transform-DU8rUL9P.cjs +2 -0
- package/dist/src/transform-yWaShiKr.js +216 -0
- package/dist/src/transformersAvailability-BGkzavwb.js +35 -0
- package/dist/src/transformersAvailability-DKoRtQLy.cjs +35 -0
- package/dist/src/types-5aqHpBwE.cjs +3769 -0
- package/dist/src/types-Bn6D9c4U.js +3300 -0
- package/dist/src/util-BkKlTkI2.js +293 -0
- package/dist/src/util-CTh0bfOm.cjs +1119 -0
- package/dist/src/util-D17oBwo7.cjs +328 -0
- package/dist/src/util-DsS_-v4p.js +613 -0
- package/dist/src/util-DuntT1Ga.js +951 -0
- package/dist/src/util-aWjdCYMI.cjs +667 -0
- package/dist/src/utils-CisQwpjA.js +94 -0
- package/dist/src/utils-yWamDvmz.cjs +123 -0
- package/dist/tsconfig.tsbuildinfo +1 -0
- package/drizzle/0000_lush_hellion.sql +36 -0
- package/drizzle/0001_wide_calypso.sql +3 -0
- package/drizzle/0002_tidy_juggernaut.sql +1 -0
- package/drizzle/0003_lively_naoko.sql +8 -0
- package/drizzle/0004_minor_peter_quill.sql +19 -0
- package/drizzle/0005_silky_millenium_guard.sql +2 -0
- package/drizzle/0006_harsh_caretaker.sql +42 -0
- package/drizzle/0007_cloudy_wong.sql +1 -0
- package/drizzle/0008_broad_boomer.sql +2 -0
- package/drizzle/0009_strong_marten_broadcloak.sql +19 -0
- package/drizzle/0010_needy_bishop.sql +11 -0
- package/drizzle/0011_moaning_millenium_guard.sql +1 -0
- package/drizzle/0012_late_marten_broadcloak.sql +2 -0
- package/drizzle/0013_previous_dormammu.sql +9 -0
- package/drizzle/0014_lazy_captain_universe.sql +2 -0
- package/drizzle/0015_zippy_wallop.sql +29 -0
- package/drizzle/0016_jazzy_zemo.sql +2 -0
- package/drizzle/0017_reflective_praxagora.sql +4 -0
- package/drizzle/0018_fat_vanisher.sql +22 -0
- package/drizzle/0019_new_clint_barton.sql +8 -0
- package/drizzle/0020_skinny_maverick.sql +1 -0
- package/drizzle/0021_mysterious_madelyne_pryor.sql +13 -0
- package/drizzle/0022_sleepy_ultimo.sql +25 -0
- package/drizzle/0023_wooden_mandrill.sql +2 -0
- package/drizzle/AGENTS.md +68 -0
- package/drizzle/CLAUDE.md +1 -0
- package/drizzle/meta/0000_snapshot.json +221 -0
- package/drizzle/meta/0001_snapshot.json +214 -0
- package/drizzle/meta/0002_snapshot.json +221 -0
- package/drizzle/meta/0005_snapshot.json +369 -0
- package/drizzle/meta/0006_snapshot.json +638 -0
- package/drizzle/meta/0007_snapshot.json +640 -0
- package/drizzle/meta/0008_snapshot.json +649 -0
- package/drizzle/meta/0009_snapshot.json +554 -0
- package/drizzle/meta/0010_snapshot.json +619 -0
- package/drizzle/meta/0011_snapshot.json +627 -0
- package/drizzle/meta/0012_snapshot.json +639 -0
- package/drizzle/meta/0013_snapshot.json +717 -0
- package/drizzle/meta/0014_snapshot.json +717 -0
- package/drizzle/meta/0015_snapshot.json +897 -0
- package/drizzle/meta/0016_snapshot.json +1031 -0
- package/drizzle/meta/0018_snapshot.json +1210 -0
- package/drizzle/meta/0019_snapshot.json +1165 -0
- package/drizzle/meta/0020_snapshot.json +1232 -0
- package/drizzle/meta/0021_snapshot.json +1311 -0
- package/drizzle/meta/0022_snapshot.json +1481 -0
- package/drizzle/meta/0023_snapshot.json +1496 -0
- package/drizzle/meta/_journal.json +174 -0
- package/package.json +240 -0
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { x as getEnvString } from "./logger-C40ZGil9.js";
|
|
2
|
+
import "./fetch-DQckpUFz.js";
|
|
3
|
+
import "./cache-BkrqU5Ba.js";
|
|
4
|
+
import { t as OpenAiChatCompletionProvider } from "./chat-CcDgZFJ4.js";
|
|
5
|
+
import { n as OpenAiEmbeddingProvider, t as OpenAiCompletionProvider } from "./completion-B8Ctyxpr.js";
|
|
6
|
+
import { t as OpenAiImageProvider } from "./image-CWE1pdNv.js";
|
|
7
|
+
//#region src/providers/cometapi.ts
|
|
8
|
+
/**
|
|
9
|
+
* CometAPI Image Provider - extends OpenAI Image Provider for CometAPI's image generation models
|
|
10
|
+
*/
|
|
11
|
+
var CometApiImageProvider = class extends OpenAiImageProvider {
|
|
12
|
+
constructor(modelName, options = {}) {
|
|
13
|
+
super(modelName, {
|
|
14
|
+
...options,
|
|
15
|
+
config: {
|
|
16
|
+
...options.config,
|
|
17
|
+
apiKeyEnvar: "COMETAPI_KEY",
|
|
18
|
+
apiBaseUrl: "https://api.cometapi.com/v1"
|
|
19
|
+
}
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
getApiKey() {
|
|
23
|
+
if (this.config?.apiKey) return this.config.apiKey;
|
|
24
|
+
return getEnvString("COMETAPI_KEY");
|
|
25
|
+
}
|
|
26
|
+
getApiUrlDefault() {
|
|
27
|
+
return "https://api.cometapi.com/v1";
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
/**
|
|
31
|
+
* Factory for creating CometAPI providers using OpenAI-compatible endpoints.
|
|
32
|
+
*/
|
|
33
|
+
function createCometApiProvider(providerPath, options = {}) {
|
|
34
|
+
const splits = providerPath.split(":");
|
|
35
|
+
const type = splits[1];
|
|
36
|
+
const modelName = splits.slice(2).join(":");
|
|
37
|
+
const openaiOptions = {
|
|
38
|
+
...options,
|
|
39
|
+
config: {
|
|
40
|
+
...options.config || {},
|
|
41
|
+
apiBaseUrl: "https://api.cometapi.com/v1",
|
|
42
|
+
apiKeyEnvar: "COMETAPI_KEY"
|
|
43
|
+
}
|
|
44
|
+
};
|
|
45
|
+
if (type === "chat") return new OpenAiChatCompletionProvider(modelName, openaiOptions);
|
|
46
|
+
else if (type === "completion") return new OpenAiCompletionProvider(modelName, openaiOptions);
|
|
47
|
+
else if (type === "embedding" || type === "embeddings") return new OpenAiEmbeddingProvider(modelName, openaiOptions);
|
|
48
|
+
else if (type === "image") return new CometApiImageProvider(modelName, openaiOptions);
|
|
49
|
+
return new OpenAiChatCompletionProvider(splits.slice(1).join(":"), openaiOptions);
|
|
50
|
+
}
|
|
51
|
+
//#endregion
|
|
52
|
+
export { createCometApiProvider };
|
|
53
|
+
|
|
54
|
+
//# sourceMappingURL=cometapi-C-9YvCHC.js.map
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
const require_logger = require("./logger-DyfK9PBt.cjs");
|
|
2
|
+
require("./fetch-BTxakTSg.cjs");
|
|
3
|
+
require("./cache-DsCxFlsZ.cjs");
|
|
4
|
+
const require_chat = require("./chat-CXX3xzkk.cjs");
|
|
5
|
+
const require_completion = require("./completion-Cxrt08sj.cjs");
|
|
6
|
+
const require_image = require("./image-0ZhE0VlR.cjs");
|
|
7
|
+
//#region src/providers/cometapi.ts
|
|
8
|
+
/**
|
|
9
|
+
* CometAPI Image Provider - extends OpenAI Image Provider for CometAPI's image generation models
|
|
10
|
+
*/
|
|
11
|
+
var CometApiImageProvider = class extends require_image.OpenAiImageProvider {
|
|
12
|
+
constructor(modelName, options = {}) {
|
|
13
|
+
super(modelName, {
|
|
14
|
+
...options,
|
|
15
|
+
config: {
|
|
16
|
+
...options.config,
|
|
17
|
+
apiKeyEnvar: "COMETAPI_KEY",
|
|
18
|
+
apiBaseUrl: "https://api.cometapi.com/v1"
|
|
19
|
+
}
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
getApiKey() {
|
|
23
|
+
if (this.config?.apiKey) return this.config.apiKey;
|
|
24
|
+
return require_logger.getEnvString("COMETAPI_KEY");
|
|
25
|
+
}
|
|
26
|
+
getApiUrlDefault() {
|
|
27
|
+
return "https://api.cometapi.com/v1";
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
/**
|
|
31
|
+
* Factory for creating CometAPI providers using OpenAI-compatible endpoints.
|
|
32
|
+
*/
|
|
33
|
+
function createCometApiProvider(providerPath, options = {}) {
|
|
34
|
+
const splits = providerPath.split(":");
|
|
35
|
+
const type = splits[1];
|
|
36
|
+
const modelName = splits.slice(2).join(":");
|
|
37
|
+
const openaiOptions = {
|
|
38
|
+
...options,
|
|
39
|
+
config: {
|
|
40
|
+
...options.config || {},
|
|
41
|
+
apiBaseUrl: "https://api.cometapi.com/v1",
|
|
42
|
+
apiKeyEnvar: "COMETAPI_KEY"
|
|
43
|
+
}
|
|
44
|
+
};
|
|
45
|
+
if (type === "chat") return new require_chat.OpenAiChatCompletionProvider(modelName, openaiOptions);
|
|
46
|
+
else if (type === "completion") return new require_completion.OpenAiCompletionProvider(modelName, openaiOptions);
|
|
47
|
+
else if (type === "embedding" || type === "embeddings") return new require_completion.OpenAiEmbeddingProvider(modelName, openaiOptions);
|
|
48
|
+
else if (type === "image") return new CometApiImageProvider(modelName, openaiOptions);
|
|
49
|
+
return new require_chat.OpenAiChatCompletionProvider(splits.slice(1).join(":"), openaiOptions);
|
|
50
|
+
}
|
|
51
|
+
//#endregion
|
|
52
|
+
exports.createCometApiProvider = createCometApiProvider;
|
|
53
|
+
|
|
54
|
+
//# sourceMappingURL=cometapi-DHgDKoO2.cjs.map
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import { b as getEnvInt, r as logger, x as getEnvString, y as getEnvFloat } from "./logger-C40ZGil9.js";
|
|
2
|
+
import { h as REQUEST_TIMEOUT_MS } from "./fetch-DQckpUFz.js";
|
|
3
|
+
import { r as fetchWithCache } from "./cache-BkrqU5Ba.js";
|
|
4
|
+
import { t as OpenAiGenericProvider } from "./openai-DumO8WQn.js";
|
|
5
|
+
import { a as calculateOpenAICost, c as getTokenUsage, n as OPENAI_COMPLETION_MODELS, s as formatOpenAiError } from "./util-DsS_-v4p.js";
|
|
6
|
+
//#region src/providers/openai/embedding.ts
|
|
7
|
+
var OpenAiEmbeddingProvider = class extends OpenAiGenericProvider {
|
|
8
|
+
async callEmbeddingApi(text) {
|
|
9
|
+
if (this.requiresApiKey() && !this.getApiKey()) return { error: this.getMissingApiKeyErrorMessage() };
|
|
10
|
+
if (typeof text !== "string") return { error: `Invalid input type for embedding API. Expected string, got ${typeof text}. Input: ${JSON.stringify(text)}` };
|
|
11
|
+
const body = {
|
|
12
|
+
input: text,
|
|
13
|
+
model: this.modelName
|
|
14
|
+
};
|
|
15
|
+
let data;
|
|
16
|
+
let status;
|
|
17
|
+
let statusText;
|
|
18
|
+
let deleteFromCache;
|
|
19
|
+
let cached = false;
|
|
20
|
+
let latencyMs;
|
|
21
|
+
try {
|
|
22
|
+
const response = await fetchWithCache(`${this.getApiUrl()}/embeddings`, {
|
|
23
|
+
method: "POST",
|
|
24
|
+
headers: {
|
|
25
|
+
"Content-Type": "application/json",
|
|
26
|
+
Authorization: `Bearer ${this.getApiKey()}`,
|
|
27
|
+
...this.getOrganization() ? { "OpenAI-Organization": this.getOrganization() } : {},
|
|
28
|
+
...this.config.headers
|
|
29
|
+
},
|
|
30
|
+
body: JSON.stringify(body)
|
|
31
|
+
}, REQUEST_TIMEOUT_MS, "json", false, this.config.maxRetries);
|
|
32
|
+
({data, cached, status, statusText, latencyMs, deleteFromCache} = response);
|
|
33
|
+
if (status && (status < 200 || status >= 300)) return { error: `API error: ${status} ${statusText || "Unknown error"}\n${typeof data === "string" ? data : JSON.stringify(data)}` };
|
|
34
|
+
} catch (err) {
|
|
35
|
+
logger.error(`API call error: ${String(err)}`);
|
|
36
|
+
await deleteFromCache?.();
|
|
37
|
+
return { error: `API call error: ${String(err)}` };
|
|
38
|
+
}
|
|
39
|
+
try {
|
|
40
|
+
const embedding = data?.data?.[0]?.embedding;
|
|
41
|
+
if (!embedding) return { error: "No embedding found in OpenAI embeddings API response" };
|
|
42
|
+
return {
|
|
43
|
+
embedding,
|
|
44
|
+
latencyMs,
|
|
45
|
+
tokenUsage: getTokenUsage(data, cached)
|
|
46
|
+
};
|
|
47
|
+
} catch (err) {
|
|
48
|
+
logger.error(`Response parsing error: ${String(err)}`);
|
|
49
|
+
await deleteFromCache?.();
|
|
50
|
+
return { error: `API error: ${String(err)}: ${JSON.stringify(data)}` };
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
};
|
|
54
|
+
//#endregion
|
|
55
|
+
//#region src/providers/openai/completion.ts
|
|
56
|
+
var OpenAiCompletionProvider = class OpenAiCompletionProvider extends OpenAiGenericProvider {
|
|
57
|
+
static OPENAI_COMPLETION_MODELS = OPENAI_COMPLETION_MODELS;
|
|
58
|
+
static OPENAI_COMPLETION_MODEL_NAMES = OPENAI_COMPLETION_MODELS.map((model) => model.id);
|
|
59
|
+
config;
|
|
60
|
+
constructor(modelName, options = {}) {
|
|
61
|
+
super(modelName, options);
|
|
62
|
+
this.config = options.config || {};
|
|
63
|
+
if (!OpenAiCompletionProvider.OPENAI_COMPLETION_MODEL_NAMES.includes(modelName) && this.getApiUrl() === this.getApiUrlDefault()) logger.warn(`FYI: Using unknown OpenAI completion model: ${modelName}`);
|
|
64
|
+
}
|
|
65
|
+
async callApi(prompt, context, callApiOptions) {
|
|
66
|
+
if (this.requiresApiKey() && !this.getApiKey()) throw new Error(this.getMissingApiKeyErrorMessage());
|
|
67
|
+
let stop;
|
|
68
|
+
try {
|
|
69
|
+
stop = getEnvString("OPENAI_STOP") ? JSON.parse(getEnvString("OPENAI_STOP") || "") : this.config?.stop || ["<|im_end|>", "<|endoftext|>"];
|
|
70
|
+
} catch (err) {
|
|
71
|
+
throw new Error(`OPENAI_STOP is not a valid JSON string: ${err}`);
|
|
72
|
+
}
|
|
73
|
+
const body = {
|
|
74
|
+
model: this.modelName,
|
|
75
|
+
prompt,
|
|
76
|
+
seed: this.config.seed,
|
|
77
|
+
max_tokens: this.config.max_tokens ?? getEnvInt("OPENAI_MAX_TOKENS", 1024),
|
|
78
|
+
temperature: this.config.temperature ?? getEnvFloat("OPENAI_TEMPERATURE", 0),
|
|
79
|
+
top_p: this.config.top_p ?? getEnvFloat("OPENAI_TOP_P", 1),
|
|
80
|
+
presence_penalty: this.config.presence_penalty ?? getEnvFloat("OPENAI_PRESENCE_PENALTY", 0),
|
|
81
|
+
frequency_penalty: this.config.frequency_penalty ?? getEnvFloat("OPENAI_FREQUENCY_PENALTY", 0),
|
|
82
|
+
best_of: this.config.best_of ?? getEnvInt("OPENAI_BEST_OF", 1),
|
|
83
|
+
...callApiOptions?.includeLogProbs ? { logprobs: callApiOptions.includeLogProbs } : {},
|
|
84
|
+
...stop ? { stop } : {},
|
|
85
|
+
...this.config.passthrough || {}
|
|
86
|
+
};
|
|
87
|
+
let data, cached = false, latencyMs;
|
|
88
|
+
try {
|
|
89
|
+
({data, cached, latencyMs} = await fetchWithCache(`${this.getApiUrl()}/completions`, {
|
|
90
|
+
method: "POST",
|
|
91
|
+
headers: {
|
|
92
|
+
"Content-Type": "application/json",
|
|
93
|
+
...this.getApiKey() ? { Authorization: `Bearer ${this.getApiKey()}` } : {},
|
|
94
|
+
...this.getOrganization() ? { "OpenAI-Organization": this.getOrganization() } : {},
|
|
95
|
+
...this.config.headers
|
|
96
|
+
},
|
|
97
|
+
body: JSON.stringify(body)
|
|
98
|
+
}, REQUEST_TIMEOUT_MS, "json", context?.bustCache ?? context?.debug, this.config.maxRetries));
|
|
99
|
+
} catch (err) {
|
|
100
|
+
logger.error(`API call error: ${String(err)}`);
|
|
101
|
+
return { error: `API call error: ${String(err)}` };
|
|
102
|
+
}
|
|
103
|
+
if (data.error) return { error: formatOpenAiError(data) };
|
|
104
|
+
try {
|
|
105
|
+
return {
|
|
106
|
+
output: data.choices[0].text,
|
|
107
|
+
tokenUsage: getTokenUsage(data, cached),
|
|
108
|
+
cached,
|
|
109
|
+
latencyMs,
|
|
110
|
+
cost: calculateOpenAICost(this.modelName, this.config, data.usage?.prompt_tokens, data.usage?.completion_tokens)
|
|
111
|
+
};
|
|
112
|
+
} catch (err) {
|
|
113
|
+
return { error: `API error: ${String(err)}: ${JSON.stringify(data)}` };
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
};
|
|
117
|
+
//#endregion
|
|
118
|
+
export { OpenAiEmbeddingProvider as n, OpenAiCompletionProvider as t };
|
|
119
|
+
|
|
120
|
+
//# sourceMappingURL=completion-B8Ctyxpr.js.map
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
const require_logger = require("./logger-DyfK9PBt.cjs");
|
|
2
|
+
const require_fetch = require("./fetch-BTxakTSg.cjs");
|
|
3
|
+
const require_cache = require("./cache-DsCxFlsZ.cjs");
|
|
4
|
+
const require_openai = require("./openai-BWrJK9d8.cjs");
|
|
5
|
+
const require_util = require("./util-aWjdCYMI.cjs");
|
|
6
|
+
//#region src/providers/openai/embedding.ts
|
|
7
|
+
var OpenAiEmbeddingProvider = class extends require_openai.OpenAiGenericProvider {
|
|
8
|
+
async callEmbeddingApi(text) {
|
|
9
|
+
if (this.requiresApiKey() && !this.getApiKey()) return { error: this.getMissingApiKeyErrorMessage() };
|
|
10
|
+
if (typeof text !== "string") return { error: `Invalid input type for embedding API. Expected string, got ${typeof text}. Input: ${JSON.stringify(text)}` };
|
|
11
|
+
const body = {
|
|
12
|
+
input: text,
|
|
13
|
+
model: this.modelName
|
|
14
|
+
};
|
|
15
|
+
let data;
|
|
16
|
+
let status;
|
|
17
|
+
let statusText;
|
|
18
|
+
let deleteFromCache;
|
|
19
|
+
let cached = false;
|
|
20
|
+
let latencyMs;
|
|
21
|
+
try {
|
|
22
|
+
const response = await require_cache.fetchWithCache(`${this.getApiUrl()}/embeddings`, {
|
|
23
|
+
method: "POST",
|
|
24
|
+
headers: {
|
|
25
|
+
"Content-Type": "application/json",
|
|
26
|
+
Authorization: `Bearer ${this.getApiKey()}`,
|
|
27
|
+
...this.getOrganization() ? { "OpenAI-Organization": this.getOrganization() } : {},
|
|
28
|
+
...this.config.headers
|
|
29
|
+
},
|
|
30
|
+
body: JSON.stringify(body)
|
|
31
|
+
}, require_fetch.REQUEST_TIMEOUT_MS, "json", false, this.config.maxRetries);
|
|
32
|
+
({data, cached, status, statusText, latencyMs, deleteFromCache} = response);
|
|
33
|
+
if (status && (status < 200 || status >= 300)) return { error: `API error: ${status} ${statusText || "Unknown error"}\n${typeof data === "string" ? data : JSON.stringify(data)}` };
|
|
34
|
+
} catch (err) {
|
|
35
|
+
require_logger.logger.error(`API call error: ${String(err)}`);
|
|
36
|
+
await deleteFromCache?.();
|
|
37
|
+
return { error: `API call error: ${String(err)}` };
|
|
38
|
+
}
|
|
39
|
+
try {
|
|
40
|
+
const embedding = data?.data?.[0]?.embedding;
|
|
41
|
+
if (!embedding) return { error: "No embedding found in OpenAI embeddings API response" };
|
|
42
|
+
return {
|
|
43
|
+
embedding,
|
|
44
|
+
latencyMs,
|
|
45
|
+
tokenUsage: require_util.getTokenUsage(data, cached)
|
|
46
|
+
};
|
|
47
|
+
} catch (err) {
|
|
48
|
+
require_logger.logger.error(`Response parsing error: ${String(err)}`);
|
|
49
|
+
await deleteFromCache?.();
|
|
50
|
+
return { error: `API error: ${String(err)}: ${JSON.stringify(data)}` };
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
};
|
|
54
|
+
//#endregion
|
|
55
|
+
//#region src/providers/openai/completion.ts
|
|
56
|
+
var OpenAiCompletionProvider = class OpenAiCompletionProvider extends require_openai.OpenAiGenericProvider {
|
|
57
|
+
static OPENAI_COMPLETION_MODELS = require_util.OPENAI_COMPLETION_MODELS;
|
|
58
|
+
static OPENAI_COMPLETION_MODEL_NAMES = require_util.OPENAI_COMPLETION_MODELS.map((model) => model.id);
|
|
59
|
+
config;
|
|
60
|
+
constructor(modelName, options = {}) {
|
|
61
|
+
super(modelName, options);
|
|
62
|
+
this.config = options.config || {};
|
|
63
|
+
if (!OpenAiCompletionProvider.OPENAI_COMPLETION_MODEL_NAMES.includes(modelName) && this.getApiUrl() === this.getApiUrlDefault()) require_logger.logger.warn(`FYI: Using unknown OpenAI completion model: ${modelName}`);
|
|
64
|
+
}
|
|
65
|
+
async callApi(prompt, context, callApiOptions) {
|
|
66
|
+
if (this.requiresApiKey() && !this.getApiKey()) throw new Error(this.getMissingApiKeyErrorMessage());
|
|
67
|
+
let stop;
|
|
68
|
+
try {
|
|
69
|
+
stop = require_logger.getEnvString("OPENAI_STOP") ? JSON.parse(require_logger.getEnvString("OPENAI_STOP") || "") : this.config?.stop || ["<|im_end|>", "<|endoftext|>"];
|
|
70
|
+
} catch (err) {
|
|
71
|
+
throw new Error(`OPENAI_STOP is not a valid JSON string: ${err}`);
|
|
72
|
+
}
|
|
73
|
+
const body = {
|
|
74
|
+
model: this.modelName,
|
|
75
|
+
prompt,
|
|
76
|
+
seed: this.config.seed,
|
|
77
|
+
max_tokens: this.config.max_tokens ?? require_logger.getEnvInt("OPENAI_MAX_TOKENS", 1024),
|
|
78
|
+
temperature: this.config.temperature ?? require_logger.getEnvFloat("OPENAI_TEMPERATURE", 0),
|
|
79
|
+
top_p: this.config.top_p ?? require_logger.getEnvFloat("OPENAI_TOP_P", 1),
|
|
80
|
+
presence_penalty: this.config.presence_penalty ?? require_logger.getEnvFloat("OPENAI_PRESENCE_PENALTY", 0),
|
|
81
|
+
frequency_penalty: this.config.frequency_penalty ?? require_logger.getEnvFloat("OPENAI_FREQUENCY_PENALTY", 0),
|
|
82
|
+
best_of: this.config.best_of ?? require_logger.getEnvInt("OPENAI_BEST_OF", 1),
|
|
83
|
+
...callApiOptions?.includeLogProbs ? { logprobs: callApiOptions.includeLogProbs } : {},
|
|
84
|
+
...stop ? { stop } : {},
|
|
85
|
+
...this.config.passthrough || {}
|
|
86
|
+
};
|
|
87
|
+
let data, cached = false, latencyMs;
|
|
88
|
+
try {
|
|
89
|
+
({data, cached, latencyMs} = await require_cache.fetchWithCache(`${this.getApiUrl()}/completions`, {
|
|
90
|
+
method: "POST",
|
|
91
|
+
headers: {
|
|
92
|
+
"Content-Type": "application/json",
|
|
93
|
+
...this.getApiKey() ? { Authorization: `Bearer ${this.getApiKey()}` } : {},
|
|
94
|
+
...this.getOrganization() ? { "OpenAI-Organization": this.getOrganization() } : {},
|
|
95
|
+
...this.config.headers
|
|
96
|
+
},
|
|
97
|
+
body: JSON.stringify(body)
|
|
98
|
+
}, require_fetch.REQUEST_TIMEOUT_MS, "json", context?.bustCache ?? context?.debug, this.config.maxRetries));
|
|
99
|
+
} catch (err) {
|
|
100
|
+
require_logger.logger.error(`API call error: ${String(err)}`);
|
|
101
|
+
return { error: `API call error: ${String(err)}` };
|
|
102
|
+
}
|
|
103
|
+
if (data.error) return { error: require_util.formatOpenAiError(data) };
|
|
104
|
+
try {
|
|
105
|
+
return {
|
|
106
|
+
output: data.choices[0].text,
|
|
107
|
+
tokenUsage: require_util.getTokenUsage(data, cached),
|
|
108
|
+
cached,
|
|
109
|
+
latencyMs,
|
|
110
|
+
cost: require_util.calculateOpenAICost(this.modelName, this.config, data.usage?.prompt_tokens, data.usage?.completion_tokens)
|
|
111
|
+
};
|
|
112
|
+
} catch (err) {
|
|
113
|
+
return { error: `API error: ${String(err)}: ${JSON.stringify(data)}` };
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
};
|
|
117
|
+
//#endregion
|
|
118
|
+
Object.defineProperty(exports, "OpenAiCompletionProvider", {
|
|
119
|
+
enumerable: true,
|
|
120
|
+
get: function() {
|
|
121
|
+
return OpenAiCompletionProvider;
|
|
122
|
+
}
|
|
123
|
+
});
|
|
124
|
+
Object.defineProperty(exports, "OpenAiEmbeddingProvider", {
|
|
125
|
+
enumerable: true,
|
|
126
|
+
get: function() {
|
|
127
|
+
return OpenAiEmbeddingProvider;
|
|
128
|
+
}
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
//# sourceMappingURL=completion-Cxrt08sj.cjs.map
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
require("./logger-DyfK9PBt.cjs");
|
|
2
|
+
let crypto = require("crypto");
|
|
3
|
+
//#region src/util/createHash.ts
|
|
4
|
+
function sha256(str) {
|
|
5
|
+
return (0, crypto.createHash)("sha256").update(str).digest("hex");
|
|
6
|
+
}
|
|
7
|
+
function randomSequence(length = 3) {
|
|
8
|
+
const characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
|
9
|
+
let result = "";
|
|
10
|
+
for (let i = 0; i < length; i++) result += characters.charAt(Math.floor(Math.random() * 62));
|
|
11
|
+
return result;
|
|
12
|
+
}
|
|
13
|
+
//#endregion
|
|
14
|
+
Object.defineProperty(exports, "randomSequence", {
|
|
15
|
+
enumerable: true,
|
|
16
|
+
get: function() {
|
|
17
|
+
return randomSequence;
|
|
18
|
+
}
|
|
19
|
+
});
|
|
20
|
+
Object.defineProperty(exports, "sha256", {
|
|
21
|
+
enumerable: true,
|
|
22
|
+
get: function() {
|
|
23
|
+
return sha256;
|
|
24
|
+
}
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
//# sourceMappingURL=createHash-BwgE13yv.cjs.map
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { createHash } from "crypto";
|
|
2
|
+
//#region src/util/createHash.ts
|
|
3
|
+
function sha256(str) {
|
|
4
|
+
return createHash("sha256").update(str).digest("hex");
|
|
5
|
+
}
|
|
6
|
+
function randomSequence(length = 3) {
|
|
7
|
+
const characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
|
8
|
+
let result = "";
|
|
9
|
+
for (let i = 0; i < length; i++) result += characters.charAt(Math.floor(Math.random() * 62));
|
|
10
|
+
return result;
|
|
11
|
+
}
|
|
12
|
+
//#endregion
|
|
13
|
+
export { sha256 as n, randomSequence as t };
|
|
14
|
+
|
|
15
|
+
//# sourceMappingURL=createHash-DmPQkvBh.js.map
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import { r as logger, x as getEnvString } from "./logger-C40ZGil9.js";
|
|
2
|
+
import { r as fetchWithCache } from "./cache-BkrqU5Ba.js";
|
|
3
|
+
import { t as OpenAiChatCompletionProvider } from "./chat-CcDgZFJ4.js";
|
|
4
|
+
import { n as OpenAiEmbeddingProvider, t as OpenAiCompletionProvider } from "./completion-B8Ctyxpr.js";
|
|
5
|
+
//#region src/providers/docker.ts
|
|
6
|
+
async function fetchLocalModels(apiBaseUrl) {
|
|
7
|
+
try {
|
|
8
|
+
const { data } = await fetchWithCache(`${apiBaseUrl}/models`, void 0, void 0, "json", true, 0);
|
|
9
|
+
return data?.data ?? [];
|
|
10
|
+
} catch (e) {
|
|
11
|
+
throw new Error(`Failed to connect to Docker Model Runner. Is it enabled? Are the API endpoints enabled? For details, see https://docs.docker.com/ai/model-runner. \n${e.message}`);
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
async function hasLocalModel(modelId, apiBaseUrl) {
|
|
15
|
+
return (await fetchLocalModels(apiBaseUrl)).some((model) => model && model.id?.toLocaleLowerCase() === modelId?.toLocaleLowerCase());
|
|
16
|
+
}
|
|
17
|
+
function parseProviderPath(providerPath) {
|
|
18
|
+
const splits = providerPath.split(":");
|
|
19
|
+
const type = splits[1];
|
|
20
|
+
switch (type) {
|
|
21
|
+
case "chat":
|
|
22
|
+
case "completion":
|
|
23
|
+
case "embeddings": return {
|
|
24
|
+
type,
|
|
25
|
+
model: splits.slice(2).join(":")
|
|
26
|
+
};
|
|
27
|
+
case "embedding": return {
|
|
28
|
+
type: "embeddings",
|
|
29
|
+
model: splits.slice(2).join(":")
|
|
30
|
+
};
|
|
31
|
+
default: return {
|
|
32
|
+
type: "chat",
|
|
33
|
+
model: splits.slice(1).join(":")
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Factory for creating Docker Model Runner providers using OpenAI-compatible endpoints.
|
|
39
|
+
*/
|
|
40
|
+
function createDockerProvider(providerPath, options = {}) {
|
|
41
|
+
const apiBaseUrl = (options?.env?.DOCKER_MODEL_RUNNER_BASE_URL ?? getEnvString("DOCKER_MODEL_RUNNER_BASE_URL") ?? "http://localhost:12434") + "/engines/v1";
|
|
42
|
+
const apiKey = options?.env?.DOCKER_MODEL_RUNNER_API_KEY ?? getEnvString("DOCKER_MODEL_RUNNER_API_KEY") ?? "dmr";
|
|
43
|
+
const openaiOptions = {
|
|
44
|
+
...options,
|
|
45
|
+
config: {
|
|
46
|
+
...options.config || {},
|
|
47
|
+
apiBaseUrl,
|
|
48
|
+
apiKey
|
|
49
|
+
}
|
|
50
|
+
};
|
|
51
|
+
const { type, model } = parseProviderPath(providerPath);
|
|
52
|
+
switch (type) {
|
|
53
|
+
case "chat":
|
|
54
|
+
default: return new DMRChatCompletionProvider(model, openaiOptions);
|
|
55
|
+
case "completion": return new DMRCompletionProvider(model, openaiOptions);
|
|
56
|
+
case "embeddings": return new DMREmbeddingProvider(model, openaiOptions);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
var DMRChatCompletionProvider = class extends OpenAiChatCompletionProvider {
|
|
60
|
+
async callApi(prompt, context, callApiOptions) {
|
|
61
|
+
if (!await hasLocalModel(this.modelName, this.getApiUrl())) logger.warn(`Model '${this.modelName}' not found. Run 'docker model pull ${this.modelName}'.`);
|
|
62
|
+
return super.callApi(prompt, context, callApiOptions);
|
|
63
|
+
}
|
|
64
|
+
};
|
|
65
|
+
var DMRCompletionProvider = class extends OpenAiCompletionProvider {
|
|
66
|
+
async callApi(prompt, context, callApiOptions) {
|
|
67
|
+
if (!await hasLocalModel(this.modelName, this.getApiUrl())) logger.warn(`Model '${this.modelName}' not found. Run 'docker model pull ${this.modelName}'.`);
|
|
68
|
+
return super.callApi(prompt, context, callApiOptions);
|
|
69
|
+
}
|
|
70
|
+
};
|
|
71
|
+
var DMREmbeddingProvider = class extends OpenAiEmbeddingProvider {
|
|
72
|
+
async callEmbeddingApi(text) {
|
|
73
|
+
if (!await hasLocalModel(this.modelName, this.getApiUrl())) logger.warn(`Model '${this.modelName}' not found. Run 'docker model pull ${this.modelName}'.`);
|
|
74
|
+
return super.callEmbeddingApi(text);
|
|
75
|
+
}
|
|
76
|
+
};
|
|
77
|
+
//#endregion
|
|
78
|
+
export { createDockerProvider };
|
|
79
|
+
|
|
80
|
+
//# sourceMappingURL=docker-BiqcTwLv.js.map
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
const require_logger = require("./logger-DyfK9PBt.cjs");
|
|
2
|
+
const require_cache = require("./cache-DsCxFlsZ.cjs");
|
|
3
|
+
const require_chat = require("./chat-CXX3xzkk.cjs");
|
|
4
|
+
const require_completion = require("./completion-Cxrt08sj.cjs");
|
|
5
|
+
//#region src/providers/docker.ts
|
|
6
|
+
async function fetchLocalModels(apiBaseUrl) {
|
|
7
|
+
try {
|
|
8
|
+
const { data } = await require_cache.fetchWithCache(`${apiBaseUrl}/models`, void 0, void 0, "json", true, 0);
|
|
9
|
+
return data?.data ?? [];
|
|
10
|
+
} catch (e) {
|
|
11
|
+
throw new Error(`Failed to connect to Docker Model Runner. Is it enabled? Are the API endpoints enabled? For details, see https://docs.docker.com/ai/model-runner. \n${e.message}`);
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
async function hasLocalModel(modelId, apiBaseUrl) {
|
|
15
|
+
return (await fetchLocalModels(apiBaseUrl)).some((model) => model && model.id?.toLocaleLowerCase() === modelId?.toLocaleLowerCase());
|
|
16
|
+
}
|
|
17
|
+
function parseProviderPath(providerPath) {
|
|
18
|
+
const splits = providerPath.split(":");
|
|
19
|
+
const type = splits[1];
|
|
20
|
+
switch (type) {
|
|
21
|
+
case "chat":
|
|
22
|
+
case "completion":
|
|
23
|
+
case "embeddings": return {
|
|
24
|
+
type,
|
|
25
|
+
model: splits.slice(2).join(":")
|
|
26
|
+
};
|
|
27
|
+
case "embedding": return {
|
|
28
|
+
type: "embeddings",
|
|
29
|
+
model: splits.slice(2).join(":")
|
|
30
|
+
};
|
|
31
|
+
default: return {
|
|
32
|
+
type: "chat",
|
|
33
|
+
model: splits.slice(1).join(":")
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Factory for creating Docker Model Runner providers using OpenAI-compatible endpoints.
|
|
39
|
+
*/
|
|
40
|
+
function createDockerProvider(providerPath, options = {}) {
|
|
41
|
+
const apiBaseUrl = (options?.env?.DOCKER_MODEL_RUNNER_BASE_URL ?? require_logger.getEnvString("DOCKER_MODEL_RUNNER_BASE_URL") ?? "http://localhost:12434") + "/engines/v1";
|
|
42
|
+
const apiKey = options?.env?.DOCKER_MODEL_RUNNER_API_KEY ?? require_logger.getEnvString("DOCKER_MODEL_RUNNER_API_KEY") ?? "dmr";
|
|
43
|
+
const openaiOptions = {
|
|
44
|
+
...options,
|
|
45
|
+
config: {
|
|
46
|
+
...options.config || {},
|
|
47
|
+
apiBaseUrl,
|
|
48
|
+
apiKey
|
|
49
|
+
}
|
|
50
|
+
};
|
|
51
|
+
const { type, model } = parseProviderPath(providerPath);
|
|
52
|
+
switch (type) {
|
|
53
|
+
case "chat":
|
|
54
|
+
default: return new DMRChatCompletionProvider(model, openaiOptions);
|
|
55
|
+
case "completion": return new DMRCompletionProvider(model, openaiOptions);
|
|
56
|
+
case "embeddings": return new DMREmbeddingProvider(model, openaiOptions);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
var DMRChatCompletionProvider = class extends require_chat.OpenAiChatCompletionProvider {
|
|
60
|
+
async callApi(prompt, context, callApiOptions) {
|
|
61
|
+
if (!await hasLocalModel(this.modelName, this.getApiUrl())) require_logger.logger.warn(`Model '${this.modelName}' not found. Run 'docker model pull ${this.modelName}'.`);
|
|
62
|
+
return super.callApi(prompt, context, callApiOptions);
|
|
63
|
+
}
|
|
64
|
+
};
|
|
65
|
+
var DMRCompletionProvider = class extends require_completion.OpenAiCompletionProvider {
|
|
66
|
+
async callApi(prompt, context, callApiOptions) {
|
|
67
|
+
if (!await hasLocalModel(this.modelName, this.getApiUrl())) require_logger.logger.warn(`Model '${this.modelName}' not found. Run 'docker model pull ${this.modelName}'.`);
|
|
68
|
+
return super.callApi(prompt, context, callApiOptions);
|
|
69
|
+
}
|
|
70
|
+
};
|
|
71
|
+
var DMREmbeddingProvider = class extends require_completion.OpenAiEmbeddingProvider {
|
|
72
|
+
async callEmbeddingApi(text) {
|
|
73
|
+
if (!await hasLocalModel(this.modelName, this.getApiUrl())) require_logger.logger.warn(`Model '${this.modelName}' not found. Run 'docker model pull ${this.modelName}'.`);
|
|
74
|
+
return super.callEmbeddingApi(text);
|
|
75
|
+
}
|
|
76
|
+
};
|
|
77
|
+
//#endregion
|
|
78
|
+
exports.createDockerProvider = createDockerProvider;
|
|
79
|
+
|
|
80
|
+
//# sourceMappingURL=docker-C7tEJnP-.cjs.map
|