@anthropic-field/core 0.1.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/src/cognitive/index.d.ts +1 -0
- package/dist/src/cognitive/index.js +8 -0
- package/dist/src/cognitive/index.js.map +1 -1
- package/dist/src/core/contentGenerator.d.ts +5 -1
- package/dist/src/core/contentGenerator.js +28 -1
- package/dist/src/core/contentGenerator.js.map +1 -1
- package/dist/src/core/openaiContentGenerator.d.ts +59 -0
- package/dist/src/core/openaiContentGenerator.js +442 -0
- package/dist/src/core/openaiContentGenerator.js.map +1 -0
- package/dist/src/index.d.ts +1 -0
- package/dist/src/index.js +1 -0
- package/dist/src/index.js.map +1 -1
- package/dist/src/providers/index.d.ts +9 -0
- package/dist/src/providers/index.js +10 -0
- package/dist/src/providers/index.js.map +1 -0
- package/dist/src/providers/openai-compatible.d.ts +118 -0
- package/dist/src/providers/openai-compatible.js +207 -0
- package/dist/src/providers/openai-compatible.js.map +1 -0
- package/dist/src/providers/provider-config.d.ts +57 -0
- package/dist/src/providers/provider-config.js +150 -0
- package/dist/src/providers/provider-config.js.map +1 -0
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +1 -1
|
@@ -20,3 +20,4 @@ export { executeModule, runModule, parseLLMResponse, setDefaultLLMCaller, getDef
|
|
|
20
20
|
export { CognitiveTool, createCognitiveTool, createCognitiveTools, CognitiveModuleRegistry, getGlobalRegistry, initGlobalRegistry, resetGlobalRegistry, getDefaultConfig, type CognitiveToolParams, type CognitiveToolResult, type CognitiveConfig, } from './tool/index.js';
|
|
21
21
|
export { discoverAndRegisterCognitiveTools, registerCognitiveModule, getCognitiveTools, isCognitiveTool, getCognitiveConfigFromSettings, createLLMCallerFromGeminiClient, type GeminiClientAdapter, } from './integration.js';
|
|
22
22
|
export { createLLMCallerFromContentGenerator, createSimpleLLMCaller, } from './gemini-adapter.js';
|
|
23
|
+
export { OpenAICompatibleClient, createLLMCallerFromOpenAI, createLLMCallerFromPreset, createLLMCallerFromEnv, PROVIDER_PRESETS, createLLMCaller, createDefaultLLMCaller, detectProviderFromEnv, DEFAULT_PROVIDER, DEFAULT_MODELS, API_KEY_ENV_VARS, type ProviderType, type ProviderSettings, } from '../providers/index.js';
|
|
@@ -47,4 +47,12 @@ export { discoverAndRegisterCognitiveTools, registerCognitiveModule, getCognitiv
|
|
|
47
47
|
// Gemini Adapter
|
|
48
48
|
// =============================================================================
|
|
49
49
|
export { createLLMCallerFromContentGenerator, createSimpleLLMCaller, } from './gemini-adapter.js';
|
|
50
|
+
// =============================================================================
|
|
51
|
+
// OpenAI-Compatible Providers (MiniMax, DeepSeek, Qwen, etc.)
|
|
52
|
+
// =============================================================================
|
|
53
|
+
export {
|
|
54
|
+
// OpenAI-compatible client
|
|
55
|
+
OpenAICompatibleClient, createLLMCallerFromOpenAI, createLLMCallerFromPreset, createLLMCallerFromEnv, PROVIDER_PRESETS,
|
|
56
|
+
// Provider configuration (MiniMax default)
|
|
57
|
+
createLLMCaller, createDefaultLLMCaller, detectProviderFromEnv, DEFAULT_PROVIDER, DEFAULT_MODELS, API_KEY_ENV_VARS, } from '../providers/index.js';
|
|
50
58
|
//# sourceMappingURL=index.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/cognitive/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AA2CH,gFAAgF;AAChF,SAAS;AACT,gFAAgF;AAEhF,OAAO,EACL,UAAU,EACV,UAAU,EACV,eAAe,EACf,YAAY,EACZ,eAAe,EACf,iBAAiB,EACjB,aAAa,EACb,iBAAiB,EACjB,cAAc,GAEf,MAAM,mBAAmB,CAAC;AAE3B,gFAAgF;AAChF,YAAY;AACZ,gFAAgF;AAEhF,OAAO,EACL,aAAa,EACb,YAAY,EACZ,YAAY,EACZ,aAAa,EACb,gBAAgB,EAChB,gBAAgB,EAChB,YAAY,EACZ,kBAAkB,EAClB,aAAa,EACb,iBAAiB,EACjB,iBAAiB,EACjB,yBAAyB,EACzB,kBAAkB,GAGnB,MAAM,sBAAsB,CAAC;AAE9B,gFAAgF;AAChF,UAAU;AACV,gFAAgF;AAEhF,OAAO;AACL,WAAW;AACX,aAAa,EACb,SAAS,EACT,gBAAgB,EAChB,mBAAmB,EACnB,mBAAmB,EACnB,WAAW,EACX,cAAc,EACd,oBAAoB;AAIpB,mBAAmB;AACnB,YAAY,EACZ,uBAAuB,EACvB,qBAAqB,EACrB,qBAAqB,EACrB,gBAAgB,EAChB,uBAAuB,EACvB,yBAAyB,EACzB,2BAA2B,EAC3B,mBAAmB;AAEnB,OAAO;AACP,aAAa,EACb,qBAAqB,EACrB,WAAW,EACX,OAAO,EACP,cAAc;AAEd,SAAS;AACT,cAAc,EACd,mBAAmB,EACnB,SAAS,EACT,gBAAgB;AAGhB,SAAS;AACT,WAAW,EACX,iBAAiB,EACjB,mBAAmB,EACnB,iBAAiB,GAElB,MAAM,oBAAoB,CAAC;AAE5B,gFAAgF;AAChF,mBAAmB;AACnB,gFAAgF;AAEhF,OAAO,EACL,aAAa,EACb,mBAAmB,EACnB,oBAAoB,EACpB,uBAAuB,EACvB,iBAAiB,EACjB,kBAAkB,EAClB,mBAAmB,EACnB,gBAAgB,GAIjB,MAAM,iBAAiB,CAAC;AAEzB,gFAAgF;AAChF,cAAc;AACd,gFAAgF;AAEhF,OAAO,EACL,iCAAiC,EACjC,uBAAuB,EACvB,iBAAiB,EACjB,eAAe,EACf,8BAA8B,EAC9B,+BAA+B,GAEhC,MAAM,kBAAkB,CAAC;AAE1B,gFAAgF;AAChF,iBAAiB;AACjB,gFAAgF;AAEhF,OAAO,EACL,mCAAmC,EACnC,qBAAqB,GACtB,MAAM,qBAAqB,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/cognitive/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AA2CH,gFAAgF;AAChF,SAAS;AACT,gFAAgF;AAEhF,OAAO,EACL,UAAU,EACV,UAAU,EACV,eAAe,EACf,YAAY,EACZ,eAAe,EACf,iBAAiB,EACjB,aAAa,EACb,iBAAiB,EACjB,cAAc,GAEf,MAAM,mBAAmB,CAAC;AAE3B,gFAAgF;AAChF,YAAY;AACZ,gFAAgF;AAEhF,OAAO,EACL,aAAa,EACb,YAAY,EACZ,YAAY,EACZ,aAAa,EACb,gBAAgB,EAChB,gBAAgB,EAChB,YAAY,EACZ,kBAAkB,EAClB,aAAa,EACb,iBAAiB,EACjB,iBAAiB,EACjB,yBAAyB,EACzB,kBAAkB,GAGnB,MAAM,sBAAsB,CAAC;AAE9B,gFAAgF;AAChF,UAAU;AACV,gFAAgF;AAEhF,OAAO;AACL,WAAW;AACX,aAAa,EACb,SAAS,EACT,gBAAgB,EAChB,mBAAmB,EACnB,mBAAmB,EACnB,WAAW,EACX,cAAc,EACd,oBAAoB;AAIpB,mBAAmB;AACnB,YAAY,EACZ,uBAAuB,EACvB,qBAAqB,EACrB,qBAAqB,EACrB,gBAAgB,EAChB,uBAAuB,EACvB,yBAAyB,EACzB,2BAA2B,EAC3B,mBAAmB;AAEnB,OAAO;AACP,aAAa,EACb,qBAAqB,EACrB,WAAW,EACX,OAAO,EACP,cAAc;AAEd,SAAS;AACT,cAAc,EACd,mBAAmB,EACnB,SAAS,EACT,gBAAgB;AAGhB,SAAS;AACT,WAAW,EACX,iBAAiB,EACjB,mBAAmB,EACnB,iBAAiB,GAElB,MAAM,oBAAoB,CAAC;AAE5B,gFAAgF;AAChF,mBAAmB;AACnB,gFAAgF;AAEhF,OAAO,EACL,aAAa,EACb,mBAAmB,EACnB,oBAAoB,EACpB,uBAAuB,EACvB,iBAAiB,EACjB,kBAAkB,EAClB,mBAAmB,EACnB,gBAAgB,GAIjB,MAAM,iBAAiB,CAAC;AAEzB,gFAAgF;AAChF,cAAc;AACd,gFAAgF;AAEhF,OAAO,EACL,iCAAiC,EACjC,uBAAuB,EACvB,iBAAiB,EACjB,eAAe,EACf,8BAA8B,EAC9B,+BAA+B,GAEhC,MAAM,kBAAkB,CAAC;AAE1B,gFAAgF;AAChF,iBAAiB;AACjB,gFAAgF;AAEhF,OAAO,EACL,mCAAmC,EACnC,qBAAqB,GACtB,MAAM,qBAAqB,CAAC;AAE7B,gFAAgF;AAChF,8DAA8D;AAC9D,gFAAgF;AAEhF,OAAO;AACL,2BAA2B;AAC3B,sBAAsB,EACtB,yBAAyB,EACzB,yBAAyB,EACzB,sBAAsB,EACtB,gBAAgB;AAChB,2CAA2C;AAC3C,eAAe,EACf,sBAAsB,EACtB,qBAAqB,EACrB,gBAAgB,EAChB,cAAc,EACd,gBAAgB,GAGjB,MAAM,uBAAuB,CAAC"}
|
|
@@ -6,6 +6,7 @@
|
|
|
6
6
|
import type { CountTokensResponse, GenerateContentResponse, GenerateContentParameters, CountTokensParameters, EmbedContentResponse, EmbedContentParameters } from '@google/genai';
|
|
7
7
|
import type { Config } from '../config/config.js';
|
|
8
8
|
import type { UserTierId } from '../code_assist/types.js';
|
|
9
|
+
import { type ProviderType } from '../providers/provider-config.js';
|
|
9
10
|
/**
|
|
10
11
|
* Interface abstracting the core functionalities for generating content and counting tokens.
|
|
11
12
|
*/
|
|
@@ -22,13 +23,16 @@ export declare enum AuthType {
|
|
|
22
23
|
USE_GEMINI = "gemini-api-key",
|
|
23
24
|
USE_VERTEX_AI = "vertex-ai",
|
|
24
25
|
LEGACY_CLOUD_SHELL = "cloud-shell",
|
|
25
|
-
COMPUTE_ADC = "compute-default-credentials"
|
|
26
|
+
COMPUTE_ADC = "compute-default-credentials",
|
|
27
|
+
USE_OPENAI_COMPATIBLE = "openai-compatible"
|
|
26
28
|
}
|
|
27
29
|
export type ContentGeneratorConfig = {
|
|
28
30
|
apiKey?: string;
|
|
29
31
|
vertexai?: boolean;
|
|
30
32
|
authType?: AuthType;
|
|
31
33
|
proxy?: string;
|
|
34
|
+
openaiProvider?: ProviderType;
|
|
35
|
+
openaiModel?: string;
|
|
32
36
|
};
|
|
33
37
|
export declare function createContentGeneratorConfig(config: Config, authType: AuthType | undefined): Promise<ContentGeneratorConfig>;
|
|
34
38
|
export declare function createContentGenerator(config: ContentGeneratorConfig, gcConfig: Config, sessionId?: string): Promise<ContentGenerator>;
|
|
@@ -12,6 +12,8 @@ import { FakeContentGenerator } from './fakeContentGenerator.js';
|
|
|
12
12
|
import { parseCustomHeaders } from '../utils/customHeaderUtils.js';
|
|
13
13
|
import { RecordingContentGenerator } from './recordingContentGenerator.js';
|
|
14
14
|
import { getVersion, resolveModel } from '../../index.js';
|
|
15
|
+
import { OpenAIContentGenerator } from './openaiContentGenerator.js';
|
|
16
|
+
import { detectProviderFromEnv, getApiKey, DEFAULT_MODELS, } from '../providers/provider-config.js';
|
|
15
17
|
export var AuthType;
|
|
16
18
|
(function (AuthType) {
|
|
17
19
|
AuthType["LOGIN_WITH_GOOGLE"] = "oauth-personal";
|
|
@@ -19,6 +21,8 @@ export var AuthType;
|
|
|
19
21
|
AuthType["USE_VERTEX_AI"] = "vertex-ai";
|
|
20
22
|
AuthType["LEGACY_CLOUD_SHELL"] = "cloud-shell";
|
|
21
23
|
AuthType["COMPUTE_ADC"] = "compute-default-credentials";
|
|
24
|
+
// OpenAI-compatible providers (MiniMax, DeepSeek, Qwen, etc.)
|
|
25
|
+
AuthType["USE_OPENAI_COMPATIBLE"] = "openai-compatible";
|
|
22
26
|
})(AuthType || (AuthType = {}));
|
|
23
27
|
export async function createContentGeneratorConfig(config, authType) {
|
|
24
28
|
const geminiApiKey = process.env['GEMINI_API_KEY'] || (await loadApiKey()) || undefined;
|
|
@@ -31,6 +35,18 @@ export async function createContentGeneratorConfig(config, authType) {
|
|
|
31
35
|
authType,
|
|
32
36
|
proxy: config?.getProxy(),
|
|
33
37
|
};
|
|
38
|
+
// Check for OpenAI-compatible providers first (MiniMax is default)
|
|
39
|
+
const detectedProvider = detectProviderFromEnv();
|
|
40
|
+
if (detectedProvider && detectedProvider !== 'gemini') {
|
|
41
|
+
const apiKey = getApiKey(detectedProvider);
|
|
42
|
+
if (apiKey) {
|
|
43
|
+
contentGeneratorConfig.authType = AuthType.USE_OPENAI_COMPATIBLE;
|
|
44
|
+
contentGeneratorConfig.apiKey = apiKey;
|
|
45
|
+
contentGeneratorConfig.openaiProvider = detectedProvider;
|
|
46
|
+
contentGeneratorConfig.openaiModel = DEFAULT_MODELS[detectedProvider];
|
|
47
|
+
return contentGeneratorConfig;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
34
50
|
// If we are using Google auth or we are in Cloud Shell, there is nothing else to validate for now
|
|
35
51
|
if (authType === AuthType.LOGIN_WITH_GOOGLE ||
|
|
36
52
|
authType === AuthType.COMPUTE_ADC) {
|
|
@@ -55,10 +71,21 @@ export async function createContentGenerator(config, gcConfig, sessionId) {
|
|
|
55
71
|
const fakeGenerator = await FakeContentGenerator.fromFile(gcConfig.fakeResponses);
|
|
56
72
|
return new LoggingContentGenerator(fakeGenerator, gcConfig);
|
|
57
73
|
}
|
|
74
|
+
// Handle OpenAI-compatible providers (MiniMax, DeepSeek, Qwen, etc.)
|
|
75
|
+
if (config.authType === AuthType.USE_OPENAI_COMPATIBLE &&
|
|
76
|
+
config.openaiProvider &&
|
|
77
|
+
config.apiKey) {
|
|
78
|
+
const openaiGenerator = new OpenAIContentGenerator({
|
|
79
|
+
provider: config.openaiProvider,
|
|
80
|
+
apiKey: config.apiKey,
|
|
81
|
+
model: config.openaiModel,
|
|
82
|
+
});
|
|
83
|
+
return new LoggingContentGenerator(openaiGenerator, gcConfig);
|
|
84
|
+
}
|
|
58
85
|
const version = await getVersion();
|
|
59
86
|
const model = resolveModel(gcConfig.getModel(), gcConfig.getPreviewFeatures());
|
|
60
87
|
const customHeadersEnv = process.env['GEMINI_CLI_CUSTOM_HEADERS'] || undefined;
|
|
61
|
-
const userAgent = `
|
|
88
|
+
const userAgent = `FieldCLI/${version}/${model} (${process.platform}; ${process.arch})`;
|
|
62
89
|
const customHeadersMap = parseCustomHeaders(customHeadersEnv);
|
|
63
90
|
const apiKeyAuthMechanism = process.env['GEMINI_API_KEY_AUTH_MECHANISM'] || 'x-goog-api-key';
|
|
64
91
|
const apiVersionEnv = process.env['GOOGLE_GENAI_API_VERSION'];
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"contentGenerator.js","sourceRoot":"","sources":["../../../src/core/contentGenerator.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAUH,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,gCAAgC,EAAE,MAAM,8BAA8B,CAAC;AAEhF,OAAO,EAAE,UAAU,EAAE,MAAM,8BAA8B,CAAC;AAG1D,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAC;AACvE,OAAO,EAAE,mBAAmB,EAAE,MAAM,iCAAiC,CAAC;AACtE,OAAO,EAAE,oBAAoB,EAAE,MAAM,2BAA2B,CAAC;AACjE,OAAO,EAAE,kBAAkB,EAAE,MAAM,+BAA+B,CAAC;AACnE,OAAO,EAAE,yBAAyB,EAAE,MAAM,gCAAgC,CAAC;AAC3E,OAAO,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;
|
|
1
|
+
{"version":3,"file":"contentGenerator.js","sourceRoot":"","sources":["../../../src/core/contentGenerator.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAUH,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,gCAAgC,EAAE,MAAM,8BAA8B,CAAC;AAEhF,OAAO,EAAE,UAAU,EAAE,MAAM,8BAA8B,CAAC;AAG1D,OAAO,EAAE,uBAAuB,EAAE,MAAM,8BAA8B,CAAC;AACvE,OAAO,EAAE,mBAAmB,EAAE,MAAM,iCAAiC,CAAC;AACtE,OAAO,EAAE,oBAAoB,EAAE,MAAM,2BAA2B,CAAC;AACjE,OAAO,EAAE,kBAAkB,EAAE,MAAM,+BAA+B,CAAC;AACnE,OAAO,EAAE,yBAAyB,EAAE,MAAM,gCAAgC,CAAC;AAC3E,OAAO,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAC1D,OAAO,EAAE,sBAAsB,EAAE,MAAM,6BAA6B,CAAC;AACrE,OAAO,EACL,qBAAqB,EACrB,SAAS,EACT,cAAc,GAEf,MAAM,iCAAiC,CAAC;AAyBzC,MAAM,CAAN,IAAY,QAQX;AARD,WAAY,QAAQ;IAClB,gDAAoC,CAAA;IACpC,yCAA6B,CAAA;IAC7B,uCAA2B,CAAA;IAC3B,8CAAkC,CAAA;IAClC,uDAA2C,CAAA;IAC3C,8DAA8D;IAC9D,uDAA2C,CAAA;AAC7C,CAAC,EARW,QAAQ,KAAR,QAAQ,QAQnB;AAYD,MAAM,CAAC,KAAK,UAAU,4BAA4B,CAChD,MAAc,EACd,QAA8B;IAE9B,MAAM,YAAY,GAChB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,IAAI,CAAC,MAAM,UAAU,EAAE,CAAC,IAAI,SAAS,CAAC;IACrE,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,IAAI,SAAS,CAAC;IAChE,MAAM,kBAAkB,GACtB,OAAO,CAAC,GAAG,CAAC,sBAAsB,CAAC;QACnC,OAAO,CAAC,GAAG,CAAC,yBAAyB,CAAC;QACtC,SAAS,CAAC;IACZ,MAAM,mBAAmB,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,IAAI,SAAS,CAAC;IAE9E,MAAM,sBAAsB,GAA2B;QACrD,QAAQ;QACR,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE;KAC1B,CAAC;IAEF,mEAAmE;IACnE,MAAM,gBAAgB,GAAG,qBAAqB,EAAE,CAAC;IACjD,IAAI,gBAAgB,IAAI,gBAAgB,KAAK,QAAQ,EAAE,CAAC;QACtD,MAAM,MAAM,GAAG,SAAS,CAAC,gBAAgB,CAAC,CAAC;QAC3C,IAAI,MAAM,EAAE,CAAC;YACX,sBAAsB,CAAC,QAAQ,GAAG,QAAQ,CAAC,qBAAqB,CAAC;YACjE,sBAAsB,CAAC,MAAM,GAAG,MAAM,CAAC;YACvC,sBAAsB,CAAC,cAAc,GAAG,gBAAgB,CAAC;YACzD,sBAAsB,CAAC,WAAW,GAAG,cAAc,CAAC,gBAAgB,CAAC,CAAC;YACtE,OAAO,sBAAsB,CAAC;QAChC,CAAC;IACH,CAAC;IAED,kGAAkG;IAClG,IACE,QAAQ,KAAK,QAAQ,CAAC,iBAAiB;QACvC,QAAQ,KAAK,QAAQ,CAAC,WAAW,EACjC,CAAC;QACD,OAAO,sBAAsB,CAAC;IAChC,CAAC;IAED,IAAI,QAAQ,KAAK,QAAQ,CAAC,UAAU,IAAI,YAAY,EAAE,CAAC;QACrD,sBAAsB,CAAC,MAAM,GAAG,YAAY,CAAC;QAC7C,sBAAsB,CAAC,QAAQ,GAAG,KAAK,CAAC;QAExC,OAAO,sBAAsB,CAAC;IAChC,CAAC;IAED,IACE,QAAQ,KAAK,QAAQ,CAAC,aAAa;QACnC,CAAC,YAAY,IAAI,CAAC,kBAAkB,IAAI,mBAAmB,CAAC,CAAC,EAC7D,CAAC;QACD,sBAAsB,CAAC,MAAM,GAAG,YAAY,CAAC;QAC7C,sBAAsB,CAAC,QAAQ,GAAG,IAAI,CAAC;QAEvC,OAAO,sBAAsB,CAAC;IAChC,CAAC;IAED,OAAO,sBAAsB,CAAC;AAChC,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,sBAAsB,CAC1C,MAA8B,EAC9B,QAAgB,EAChB,SAAkB;IAElB,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,IAAI,EAAE;QAClC,IAAI,QAAQ,CAAC,aAAa,EAAE,CAAC;YAC3B,MAAM,aAAa,GAAG,MAAM,oBAAoB,CAAC,QAAQ,CACvD,QAAQ,CAAC,aAAa,CACvB,CAAC;YACF,OAAO,IAAI,uBAAuB,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;QAC9D,CAAC;QAED,qEAAqE;QACrE,IACE,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAC,qBAAqB;YAClD,MAAM,CAAC,cAAc;YACrB,MAAM,CAAC,MAAM,EACb,CAAC;YACD,MAAM,eAAe,GAAG,IAAI,sBAAsB,CAAC;gBACjD,QAAQ,EAAE,MAAM,CAAC,cAAc;gBAC/B,MAAM,EAAE,MAAM,CAAC,MAAM;gBACrB,KAAK,EAAE,MAAM,CAAC,WAAW;aAC1B,CAAC,CAAC;YACH,OAAO,IAAI,uBAAuB,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QAChE,CAAC;QAED,MAAM,OAAO,GAAG,MAAM,UAAU,EAAE,CAAC;QACnC,MAAM,KAAK,GAAG,YAAY,CACxB,QAAQ,CAAC,QAAQ,EAAE,EACnB,QAAQ,CAAC,kBAAkB,EAAE,CAC9B,CAAC;QACF,MAAM,gBAAgB,GACpB,OAAO,CAAC,GAAG,CAAC,2BAA2B,CAAC,IAAI,SAAS,CAAC;QACxD,MAAM,SAAS,GAAG,YAAY,OAAO,IAAI,KAAK,KAAK,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAC,IAAI,GAAG,CAAC;QACxF,MAAM,gBAAgB,GAAG,kBAAkB,CAAC,gBAAgB,CAAC,CAAC;QAC9D,MAAM,mBAAmB,GACvB,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,IAAI,gBAAgB,CAAC;QACnE,MAAM,aAAa,GAAG,OAAO,CAAC,GAAG,CAAC,0BAA0B,CAAC,CAAC;QAE9D,MAAM,WAAW,GAA2B;YAC1C,GAAG,gBAAgB;YACnB,YAAY,EAAE,SAAS;SACxB,CAAC;QAEF,IACE,mBAAmB,KAAK,QAAQ;YAChC,CAAC,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAC,UAAU;gBACtC,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAC,aAAa,CAAC;YAC7C,MAAM,CAAC,MAAM,EACb,CAAC;YACD,WAAW,CAAC,eAAe,CAAC,GAAG,UAAU,MAAM,CAAC,MAAM,EAAE,CAAC;QAC3D,CAAC;QACD,IACE,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAC,iBAAiB;YAC9C,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAC,WAAW,EACxC,CAAC;YACD,MAAM,WAAW,GAAG,EAAE,OAAO,EAAE,WAAW,EAAE,CAAC;YAC7C,OAAO,IAAI,uBAAuB,CAChC,MAAM,gCAAgC,CACpC,WAAW,EACX,MAAM,CAAC,QAAQ,EACf,QAAQ,EACR,SAAS,CACV,EACD,QAAQ,CACT,CAAC;QACJ,CAAC;QAED,IACE,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAC,UAAU;YACvC,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAC,aAAa,EAC1C,CAAC;YACD,IAAI,OAAO,GAA2B,EAAE,GAAG,WAAW,EAAE,CAAC;YACzD,IAAI,QAAQ,EAAE,yBAAyB,EAAE,EAAE,CAAC;gBAC1C,MAAM,mBAAmB,GAAG,IAAI,mBAAmB,EAAE,CAAC;gBACtD,MAAM,cAAc,GAAG,mBAAmB,CAAC,iBAAiB,EAAE,CAAC;gBAC/D,OAAO,GAAG;oBACR,GAAG,OAAO;oBACV,iCAAiC,EAAE,GAAG,cAAc,EAAE;iBACvD,CAAC;YACJ,CAAC;YACD,MAAM,WAAW,GAAG,EAAE,OAAO,EAAE,CAAC;YAEhC,MAAM,WAAW,GAAG,IAAI,WAAW,CAAC;gBAClC,MAAM,EAAE,MAAM,CAAC,MAAM,KAAK,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM;gBACxD,QAAQ,EAAE,MAAM,CAAC,QAAQ;gBACzB,WAAW;gBACX,GAAG,CAAC,aAAa,IAAI,EAAE,UAAU,EAAE,aAAa,EAAE,CAAC;aACpD,CAAC,CAAC;YACH,OAAO,IAAI,uBAAuB,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;QACnE,CAAC;QACD,MAAM,IAAI,KAAK,CACb,0DAA0D,MAAM,CAAC,QAAQ,EAAE,CAC5E,CAAC;IACJ,CAAC,CAAC,EAAE,CAAC;IAEL,IAAI,QAAQ,CAAC,eAAe,EAAE,CAAC;QAC7B,OAAO,IAAI,yBAAyB,CAAC,SAAS,EAAE,QAAQ,CAAC,eAAe,CAAC,CAAC;IAC5E,CAAC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC"}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @license
|
|
3
|
+
* Copyright 2025 Google LLC
|
|
4
|
+
* SPDX-License-Identifier: Apache-2.0
|
|
5
|
+
*
|
|
6
|
+
* OpenAI-Compatible ContentGenerator
|
|
7
|
+
*
|
|
8
|
+
* Allows Field CLI to use any OpenAI-compatible API as the main model:
|
|
9
|
+
* - MiniMax (default)
|
|
10
|
+
* - DeepSeek
|
|
11
|
+
* - Qwen
|
|
12
|
+
* - Moonshot
|
|
13
|
+
* - OpenAI
|
|
14
|
+
* - Together AI
|
|
15
|
+
* - OpenRouter
|
|
16
|
+
*/
|
|
17
|
+
import type { CountTokensResponse, GenerateContentResponse, GenerateContentParameters, CountTokensParameters, EmbedContentResponse, EmbedContentParameters } from '@google/genai';
|
|
18
|
+
import type { ContentGenerator } from './contentGenerator.js';
|
|
19
|
+
import type { UserTierId } from '../code_assist/types.js';
|
|
20
|
+
import type { ProviderType } from '../providers/provider-config.js';
|
|
21
|
+
export interface OpenAIContentGeneratorConfig {
|
|
22
|
+
provider: ProviderType;
|
|
23
|
+
apiKey: string;
|
|
24
|
+
model?: string;
|
|
25
|
+
baseUrl?: string;
|
|
26
|
+
}
|
|
27
|
+
export declare class OpenAIContentGenerator implements ContentGenerator {
|
|
28
|
+
private client;
|
|
29
|
+
private config;
|
|
30
|
+
userTier?: UserTierId;
|
|
31
|
+
userTierName?: string;
|
|
32
|
+
constructor(config: OpenAIContentGeneratorConfig);
|
|
33
|
+
/**
|
|
34
|
+
* Convert Gemini-style Content to OpenAI messages.
|
|
35
|
+
*/
|
|
36
|
+
private convertToOpenAIMessages;
|
|
37
|
+
/**
|
|
38
|
+
* Convert OpenAI response to Gemini-style GenerateContentResponse.
|
|
39
|
+
*/
|
|
40
|
+
private convertToGeminiResponse;
|
|
41
|
+
private mapFinishReason;
|
|
42
|
+
/**
|
|
43
|
+
* Generate content (non-streaming).
|
|
44
|
+
*/
|
|
45
|
+
generateContent(request: GenerateContentParameters, _userPromptId: string): Promise<GenerateContentResponse>;
|
|
46
|
+
/**
|
|
47
|
+
* Generate content (streaming).
|
|
48
|
+
*/
|
|
49
|
+
generateContentStream(request: GenerateContentParameters, _userPromptId: string): Promise<AsyncGenerator<GenerateContentResponse>>;
|
|
50
|
+
/**
|
|
51
|
+
* Count tokens (estimated - OpenAI-compatible APIs may not support this).
|
|
52
|
+
*/
|
|
53
|
+
countTokens(request: CountTokensParameters): Promise<CountTokensResponse>;
|
|
54
|
+
/**
|
|
55
|
+
* Embed content (not supported by most OpenAI-compatible APIs).
|
|
56
|
+
*/
|
|
57
|
+
embedContent(_request: EmbedContentParameters): Promise<EmbedContentResponse>;
|
|
58
|
+
}
|
|
59
|
+
export declare function createOpenAIContentGenerator(config: OpenAIContentGeneratorConfig): ContentGenerator;
|
|
@@ -0,0 +1,442 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @license
|
|
3
|
+
* Copyright 2025 Google LLC
|
|
4
|
+
* SPDX-License-Identifier: Apache-2.0
|
|
5
|
+
*
|
|
6
|
+
* OpenAI-Compatible ContentGenerator
|
|
7
|
+
*
|
|
8
|
+
* Allows Field CLI to use any OpenAI-compatible API as the main model:
|
|
9
|
+
* - MiniMax (default)
|
|
10
|
+
* - DeepSeek
|
|
11
|
+
* - Qwen
|
|
12
|
+
* - Moonshot
|
|
13
|
+
* - OpenAI
|
|
14
|
+
* - Together AI
|
|
15
|
+
* - OpenRouter
|
|
16
|
+
*/
|
|
17
|
+
import { OpenAICompatibleClient, PROVIDER_PRESETS, } from '../providers/openai-compatible.js';
|
|
18
|
+
// =============================================================================
|
|
19
|
+
// Helper Functions
|
|
20
|
+
// =============================================================================
|
|
21
|
+
function normalizeContents(contents) {
|
|
22
|
+
if (!contents)
|
|
23
|
+
return [];
|
|
24
|
+
if (typeof contents === 'string') {
|
|
25
|
+
return [{ role: 'user', parts: [{ text: contents }] }];
|
|
26
|
+
}
|
|
27
|
+
if (Array.isArray(contents)) {
|
|
28
|
+
return contents.map((c) => {
|
|
29
|
+
if (typeof c === 'string') {
|
|
30
|
+
return { role: 'user', parts: [{ text: c }] };
|
|
31
|
+
}
|
|
32
|
+
if (c && typeof c === 'object' && 'role' in c && 'parts' in c) {
|
|
33
|
+
return c;
|
|
34
|
+
}
|
|
35
|
+
// It's a Part
|
|
36
|
+
return { role: 'user', parts: [c] };
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
// Single Content or Part
|
|
40
|
+
if (contents && typeof contents === 'object') {
|
|
41
|
+
if ('role' in contents && 'parts' in contents) {
|
|
42
|
+
return [contents];
|
|
43
|
+
}
|
|
44
|
+
return [{ role: 'user', parts: [contents] }];
|
|
45
|
+
}
|
|
46
|
+
return [];
|
|
47
|
+
}
|
|
48
|
+
function getSystemInstruction(systemInstruction) {
|
|
49
|
+
if (!systemInstruction)
|
|
50
|
+
return undefined;
|
|
51
|
+
if (typeof systemInstruction === 'string')
|
|
52
|
+
return systemInstruction;
|
|
53
|
+
if (typeof systemInstruction === 'object' && systemInstruction !== null) {
|
|
54
|
+
const obj = systemInstruction;
|
|
55
|
+
if ('parts' in obj && Array.isArray(obj['parts'])) {
|
|
56
|
+
return obj['parts']
|
|
57
|
+
.filter((p) => typeof p === 'object' && p !== null && 'text' in p)
|
|
58
|
+
.map((p) => String(p['text']))
|
|
59
|
+
.join('\n');
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
return undefined;
|
|
63
|
+
}
|
|
64
|
+
// =============================================================================
|
|
65
|
+
// OpenAI Content Generator
|
|
66
|
+
// =============================================================================
|
|
67
|
+
export class OpenAIContentGenerator {
|
|
68
|
+
client;
|
|
69
|
+
config;
|
|
70
|
+
userTier;
|
|
71
|
+
userTierName;
|
|
72
|
+
constructor(config) {
|
|
73
|
+
this.config = config;
|
|
74
|
+
// Create client
|
|
75
|
+
const preset = PROVIDER_PRESETS[config.provider];
|
|
76
|
+
if (preset) {
|
|
77
|
+
this.client = new OpenAICompatibleClient({
|
|
78
|
+
...preset,
|
|
79
|
+
apiKey: config.apiKey,
|
|
80
|
+
defaultModel: config.model || preset.defaultModel,
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
else if (config.baseUrl) {
|
|
84
|
+
this.client = new OpenAICompatibleClient({
|
|
85
|
+
baseUrl: config.baseUrl,
|
|
86
|
+
apiKey: config.apiKey,
|
|
87
|
+
defaultModel: config.model || 'custom-model',
|
|
88
|
+
providerName: 'Custom',
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
else {
|
|
92
|
+
throw new Error(`Unknown provider: ${config.provider}`);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
/**
|
|
96
|
+
* Convert Gemini-style Content to OpenAI messages.
|
|
97
|
+
*/
|
|
98
|
+
convertToOpenAIMessages(contents, systemInstruction) {
|
|
99
|
+
const messages = [];
|
|
100
|
+
// Add system message if present
|
|
101
|
+
if (systemInstruction) {
|
|
102
|
+
messages.push({ role: 'system', content: systemInstruction });
|
|
103
|
+
}
|
|
104
|
+
// Convert each content
|
|
105
|
+
for (const content of contents) {
|
|
106
|
+
const role = content.role === 'model' ? 'assistant' : 'user';
|
|
107
|
+
// Check for function calls
|
|
108
|
+
const functionCalls = content.parts?.filter((p) => 'functionCall' in p);
|
|
109
|
+
// Check for function responses
|
|
110
|
+
const functionResponses = content.parts?.filter((p) => 'functionResponse' in p);
|
|
111
|
+
if (functionCalls && functionCalls.length > 0) {
|
|
112
|
+
// Assistant with tool calls
|
|
113
|
+
messages.push({
|
|
114
|
+
role: 'assistant',
|
|
115
|
+
content: null,
|
|
116
|
+
tool_calls: functionCalls.map((fc, idx) => ({
|
|
117
|
+
id: `call_${idx}`,
|
|
118
|
+
type: 'function',
|
|
119
|
+
function: {
|
|
120
|
+
name: fc.functionCall.name || '',
|
|
121
|
+
arguments: JSON.stringify(fc.functionCall.args || {}),
|
|
122
|
+
},
|
|
123
|
+
})),
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
else if (functionResponses && functionResponses.length > 0) {
|
|
127
|
+
// Tool response
|
|
128
|
+
for (const fr of functionResponses) {
|
|
129
|
+
messages.push({
|
|
130
|
+
role: 'tool',
|
|
131
|
+
content: JSON.stringify(fr.functionResponse.response),
|
|
132
|
+
tool_call_id: `call_0`, // Simplified
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
else {
|
|
137
|
+
// Regular text message
|
|
138
|
+
const textParts = content.parts
|
|
139
|
+
?.filter((p) => 'text' in p)
|
|
140
|
+
.map((p) => p.text)
|
|
141
|
+
.join('');
|
|
142
|
+
if (textParts) {
|
|
143
|
+
messages.push({ role, content: textParts });
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
return messages;
|
|
148
|
+
}
|
|
149
|
+
/**
|
|
150
|
+
* Convert OpenAI response to Gemini-style GenerateContentResponse.
|
|
151
|
+
*/
|
|
152
|
+
convertToGeminiResponse(response) {
|
|
153
|
+
const choice = response.choices[0];
|
|
154
|
+
if (!choice) {
|
|
155
|
+
// Empty response - create minimal valid response
|
|
156
|
+
return {
|
|
157
|
+
text: '',
|
|
158
|
+
candidates: [],
|
|
159
|
+
modelVersion: response.model,
|
|
160
|
+
usageMetadata: response.usage
|
|
161
|
+
? {
|
|
162
|
+
promptTokenCount: response.usage.prompt_tokens,
|
|
163
|
+
candidatesTokenCount: response.usage.completion_tokens,
|
|
164
|
+
totalTokenCount: response.usage.total_tokens,
|
|
165
|
+
}
|
|
166
|
+
: undefined,
|
|
167
|
+
};
|
|
168
|
+
}
|
|
169
|
+
const parts = [];
|
|
170
|
+
// Handle tool calls
|
|
171
|
+
if (choice.message.tool_calls && choice.message.tool_calls.length > 0) {
|
|
172
|
+
for (const tc of choice.message.tool_calls) {
|
|
173
|
+
parts.push({
|
|
174
|
+
functionCall: {
|
|
175
|
+
name: tc.function.name,
|
|
176
|
+
args: JSON.parse(tc.function.arguments || '{}'),
|
|
177
|
+
},
|
|
178
|
+
});
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
// Handle text content
|
|
182
|
+
if (choice.message.content) {
|
|
183
|
+
parts.push({ text: choice.message.content });
|
|
184
|
+
}
|
|
185
|
+
const textContent = choice.message.content || '';
|
|
186
|
+
return {
|
|
187
|
+
text: textContent,
|
|
188
|
+
candidates: [
|
|
189
|
+
{
|
|
190
|
+
content: {
|
|
191
|
+
role: 'model',
|
|
192
|
+
parts,
|
|
193
|
+
},
|
|
194
|
+
finishReason: this.mapFinishReason(choice.finish_reason),
|
|
195
|
+
index: choice.index,
|
|
196
|
+
},
|
|
197
|
+
],
|
|
198
|
+
modelVersion: response.model,
|
|
199
|
+
usageMetadata: response.usage
|
|
200
|
+
? {
|
|
201
|
+
promptTokenCount: response.usage.prompt_tokens,
|
|
202
|
+
candidatesTokenCount: response.usage.completion_tokens,
|
|
203
|
+
totalTokenCount: response.usage.total_tokens,
|
|
204
|
+
}
|
|
205
|
+
: undefined,
|
|
206
|
+
};
|
|
207
|
+
}
|
|
208
|
+
mapFinishReason(reason) {
|
|
209
|
+
switch (reason) {
|
|
210
|
+
case 'stop':
|
|
211
|
+
return 'STOP';
|
|
212
|
+
case 'length':
|
|
213
|
+
return 'MAX_TOKENS';
|
|
214
|
+
case 'content_filter':
|
|
215
|
+
return 'SAFETY';
|
|
216
|
+
default:
|
|
217
|
+
return 'OTHER';
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
/**
|
|
221
|
+
* Generate content (non-streaming).
|
|
222
|
+
*/
|
|
223
|
+
async generateContent(request, _userPromptId) {
|
|
224
|
+
const contents = normalizeContents(request.contents);
|
|
225
|
+
const systemInstruction = getSystemInstruction(request.config?.systemInstruction);
|
|
226
|
+
const messages = this.convertToOpenAIMessages(contents, systemInstruction);
|
|
227
|
+
const openaiRequest = {
|
|
228
|
+
model: request.model || this.client.getDefaultModel(),
|
|
229
|
+
messages,
|
|
230
|
+
temperature: request.config?.temperature,
|
|
231
|
+
max_tokens: request.config?.maxOutputTokens,
|
|
232
|
+
};
|
|
233
|
+
// Add tools if present
|
|
234
|
+
if (request.config?.tools && request.config.tools.length > 0) {
|
|
235
|
+
openaiRequest.tools = [];
|
|
236
|
+
for (const tool of request.config.tools) {
|
|
237
|
+
if ('functionDeclarations' in tool && tool.functionDeclarations) {
|
|
238
|
+
for (const fd of tool.functionDeclarations) {
|
|
239
|
+
openaiRequest.tools.push({
|
|
240
|
+
type: 'function',
|
|
241
|
+
function: {
|
|
242
|
+
name: fd.name || '',
|
|
243
|
+
description: fd.description,
|
|
244
|
+
parameters: fd.parameters,
|
|
245
|
+
},
|
|
246
|
+
});
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
// Handle JSON mode
|
|
252
|
+
if (request.config?.responseMimeType === 'application/json') {
|
|
253
|
+
openaiRequest.response_format = { type: 'json_object' };
|
|
254
|
+
}
|
|
255
|
+
const baseUrl = PROVIDER_PRESETS[this.config.provider]?.baseUrl || this.config.baseUrl;
|
|
256
|
+
const url = `${baseUrl}/chat/completions`;
|
|
257
|
+
const headers = {
|
|
258
|
+
'Content-Type': 'application/json',
|
|
259
|
+
Authorization: `Bearer ${this.config.apiKey}`,
|
|
260
|
+
};
|
|
261
|
+
const response = await fetch(url, {
|
|
262
|
+
method: 'POST',
|
|
263
|
+
headers,
|
|
264
|
+
body: JSON.stringify(openaiRequest),
|
|
265
|
+
});
|
|
266
|
+
if (!response.ok) {
|
|
267
|
+
const errorText = await response.text();
|
|
268
|
+
throw new Error(`API error (${response.status}): ${errorText}`);
|
|
269
|
+
}
|
|
270
|
+
const data = (await response.json());
|
|
271
|
+
return this.convertToGeminiResponse(data);
|
|
272
|
+
}
|
|
273
|
+
/**
|
|
274
|
+
* Generate content (streaming).
|
|
275
|
+
*/
|
|
276
|
+
async generateContentStream(request, _userPromptId) {
|
|
277
|
+
const contents = normalizeContents(request.contents);
|
|
278
|
+
const systemInstruction = getSystemInstruction(request.config?.systemInstruction);
|
|
279
|
+
const messages = this.convertToOpenAIMessages(contents, systemInstruction);
|
|
280
|
+
const openaiRequest = {
|
|
281
|
+
model: request.model || this.client.getDefaultModel(),
|
|
282
|
+
messages,
|
|
283
|
+
temperature: request.config?.temperature,
|
|
284
|
+
max_tokens: request.config?.maxOutputTokens,
|
|
285
|
+
stream: true,
|
|
286
|
+
};
|
|
287
|
+
// Add tools if present
|
|
288
|
+
if (request.config?.tools && request.config.tools.length > 0) {
|
|
289
|
+
openaiRequest.tools = [];
|
|
290
|
+
for (const tool of request.config.tools) {
|
|
291
|
+
if ('functionDeclarations' in tool && tool.functionDeclarations) {
|
|
292
|
+
for (const fd of tool.functionDeclarations) {
|
|
293
|
+
openaiRequest.tools.push({
|
|
294
|
+
type: 'function',
|
|
295
|
+
function: {
|
|
296
|
+
name: fd.name || '',
|
|
297
|
+
description: fd.description,
|
|
298
|
+
parameters: fd.parameters,
|
|
299
|
+
},
|
|
300
|
+
});
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
const baseUrl = PROVIDER_PRESETS[this.config.provider]?.baseUrl || this.config.baseUrl;
|
|
306
|
+
const url = `${baseUrl}/chat/completions`;
|
|
307
|
+
const headers = {
|
|
308
|
+
'Content-Type': 'application/json',
|
|
309
|
+
Authorization: `Bearer ${this.config.apiKey}`,
|
|
310
|
+
};
|
|
311
|
+
const response = await fetch(url, {
|
|
312
|
+
method: 'POST',
|
|
313
|
+
headers,
|
|
314
|
+
body: JSON.stringify(openaiRequest),
|
|
315
|
+
});
|
|
316
|
+
if (!response.ok) {
|
|
317
|
+
const errorText = await response.text();
|
|
318
|
+
throw new Error(`API error (${response.status}): ${errorText}`);
|
|
319
|
+
}
|
|
320
|
+
const reader = response.body?.getReader();
|
|
321
|
+
if (!reader) {
|
|
322
|
+
throw new Error('No response body');
|
|
323
|
+
}
|
|
324
|
+
const readerRef = reader;
|
|
325
|
+
async function* streamGenerator() {
|
|
326
|
+
const decoder = new TextDecoder();
|
|
327
|
+
let buffer = '';
|
|
328
|
+
let accumulatedContent = '';
|
|
329
|
+
const accumulatedToolCalls = [];
|
|
330
|
+
while (true) {
|
|
331
|
+
const { done, value } = await readerRef.read();
|
|
332
|
+
if (done)
|
|
333
|
+
break;
|
|
334
|
+
buffer += decoder.decode(value, { stream: true });
|
|
335
|
+
const lines = buffer.split('\n');
|
|
336
|
+
buffer = lines.pop() || '';
|
|
337
|
+
for (const line of lines) {
|
|
338
|
+
if (line.startsWith('data: ')) {
|
|
339
|
+
const data = line.slice(6);
|
|
340
|
+
if (data === '[DONE]')
|
|
341
|
+
continue;
|
|
342
|
+
try {
|
|
343
|
+
const parsed = JSON.parse(data);
|
|
344
|
+
const delta = parsed.choices?.[0]?.delta;
|
|
345
|
+
if (delta?.content) {
|
|
346
|
+
accumulatedContent += delta.content;
|
|
347
|
+
}
|
|
348
|
+
if (delta?.tool_calls) {
|
|
349
|
+
for (const tc of delta.tool_calls) {
|
|
350
|
+
if (!accumulatedToolCalls[tc.index]) {
|
|
351
|
+
accumulatedToolCalls[tc.index] = {
|
|
352
|
+
id: tc.id || `call_${tc.index}`,
|
|
353
|
+
type: 'function',
|
|
354
|
+
function: { name: '', arguments: '' },
|
|
355
|
+
};
|
|
356
|
+
}
|
|
357
|
+
if (tc.function?.name) {
|
|
358
|
+
accumulatedToolCalls[tc.index].function.name = tc.function.name;
|
|
359
|
+
}
|
|
360
|
+
if (tc.function?.arguments) {
|
|
361
|
+
accumulatedToolCalls[tc.index].function.arguments +=
|
|
362
|
+
tc.function.arguments;
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
// Yield intermediate response
|
|
367
|
+
const parts = [];
|
|
368
|
+
if (accumulatedContent) {
|
|
369
|
+
parts.push({ text: accumulatedContent });
|
|
370
|
+
}
|
|
371
|
+
for (const toolCall of accumulatedToolCalls) {
|
|
372
|
+
if (toolCall) {
|
|
373
|
+
try {
|
|
374
|
+
parts.push({
|
|
375
|
+
functionCall: {
|
|
376
|
+
name: toolCall.function.name,
|
|
377
|
+
args: toolCall.function.arguments
|
|
378
|
+
? JSON.parse(toolCall.function.arguments)
|
|
379
|
+
: {},
|
|
380
|
+
},
|
|
381
|
+
});
|
|
382
|
+
}
|
|
383
|
+
catch {
|
|
384
|
+
// Arguments not complete yet
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
if (parts.length > 0) {
|
|
389
|
+
yield {
|
|
390
|
+
text: accumulatedContent,
|
|
391
|
+
candidates: [
|
|
392
|
+
{
|
|
393
|
+
content: { role: 'model', parts },
|
|
394
|
+
index: 0,
|
|
395
|
+
},
|
|
396
|
+
],
|
|
397
|
+
modelVersion: parsed.model,
|
|
398
|
+
};
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
catch {
|
|
402
|
+
// Ignore parse errors for incomplete chunks
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
return streamGenerator();
|
|
409
|
+
}
|
|
410
|
+
/**
|
|
411
|
+
* Count tokens (estimated - OpenAI-compatible APIs may not support this).
|
|
412
|
+
*/
|
|
413
|
+
async countTokens(request) {
|
|
414
|
+
// Simple estimation: ~4 chars per token
|
|
415
|
+
const contents = normalizeContents(request.contents);
|
|
416
|
+
let totalChars = 0;
|
|
417
|
+
for (const content of contents) {
|
|
418
|
+
for (const part of content.parts || []) {
|
|
419
|
+
if ('text' in part) {
|
|
420
|
+
totalChars += part.text.length;
|
|
421
|
+
}
|
|
422
|
+
}
|
|
423
|
+
}
|
|
424
|
+
const estimatedTokens = Math.ceil(totalChars / 4);
|
|
425
|
+
return {
|
|
426
|
+
totalTokens: estimatedTokens,
|
|
427
|
+
};
|
|
428
|
+
}
|
|
429
|
+
/**
|
|
430
|
+
* Embed content (not supported by most OpenAI-compatible APIs).
|
|
431
|
+
*/
|
|
432
|
+
async embedContent(_request) {
|
|
433
|
+
throw new Error('Embedding is not supported by this provider. Use Gemini for embeddings.');
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
// =============================================================================
|
|
437
|
+
// Factory
|
|
438
|
+
// =============================================================================
|
|
439
|
+
export function createOpenAIContentGenerator(config) {
|
|
440
|
+
return new OpenAIContentGenerator(config);
|
|
441
|
+
}
|
|
442
|
+
//# sourceMappingURL=openaiContentGenerator.js.map
|