longcat-ai-sdk-provider 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +80 -0
- package/dist/index.d.ts +80 -0
- package/dist/index.js +402 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +380 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +34 -0
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import { LanguageModelV1, LanguageModelV1CallOptions, LanguageModelV1FunctionToolCall, LanguageModelV1FinishReason, LanguageModelV1CallWarning, LanguageModelV1StreamPart } from '@ai-sdk/provider';
|
|
2
|
+
|
|
3
|
+
type LongcatChatModelId = 'longcat/chat' | 'LongCat-Flash-Chat' | 'LongCat-Flash-Thinking' | 'LongCat-Flash-Thinking-2601' | 'LongCat-Flash-Lite' | 'LongCat-Flash-Omni-2603' | (string & {});
|
|
4
|
+
interface LongcatAudioConfig {
|
|
5
|
+
voice?: string;
|
|
6
|
+
speed?: number;
|
|
7
|
+
volume?: number;
|
|
8
|
+
outputAudioSamplerate?: number;
|
|
9
|
+
}
|
|
10
|
+
interface LongcatChatSettings {
|
|
11
|
+
maxTokens?: number;
|
|
12
|
+
temperature?: number;
|
|
13
|
+
topP?: number;
|
|
14
|
+
topK?: number;
|
|
15
|
+
sessionId?: string;
|
|
16
|
+
audio?: LongcatAudioConfig;
|
|
17
|
+
outputModalities?: ('text' | 'audio')[];
|
|
18
|
+
textRepetitionPenalty?: number;
|
|
19
|
+
audioRepetitionPenalty?: number;
|
|
20
|
+
[key: string]: unknown;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
interface LongcatProviderSettings {
|
|
24
|
+
apiKey?: string;
|
|
25
|
+
baseURL?: string;
|
|
26
|
+
headers?: Record<string, string>;
|
|
27
|
+
queryParams?: Record<string, string>;
|
|
28
|
+
fetch?: typeof fetch;
|
|
29
|
+
}
|
|
30
|
+
interface LongcatProvider {
|
|
31
|
+
(modelId: LongcatChatModelId, settings?: LongcatChatSettings): LanguageModelV1;
|
|
32
|
+
chatModel(modelId: LongcatChatModelId, settings?: LongcatChatSettings): LanguageModelV1;
|
|
33
|
+
}
|
|
34
|
+
declare function createLongcat(options?: LongcatProviderSettings): LongcatProvider;
|
|
35
|
+
declare const longcat: LongcatProvider;
|
|
36
|
+
|
|
37
|
+
interface LongcatChatConfig {
|
|
38
|
+
provider: string;
|
|
39
|
+
url: ({ path }: {
|
|
40
|
+
path: string;
|
|
41
|
+
}) => string;
|
|
42
|
+
headers: () => Record<string, string>;
|
|
43
|
+
fetch?: typeof fetch;
|
|
44
|
+
}
|
|
45
|
+
declare class LongcatChatLanguageModel implements LanguageModelV1 {
|
|
46
|
+
readonly specificationVersion = "v1";
|
|
47
|
+
readonly model: string;
|
|
48
|
+
readonly settings: LongcatChatSettings;
|
|
49
|
+
readonly defaultObjectGenerationMode: 'json' | 'tool' | 'grammar' | undefined;
|
|
50
|
+
private readonly config;
|
|
51
|
+
constructor(modelId: LongcatChatModelId, settings: LongcatChatSettings, config: LongcatChatConfig);
|
|
52
|
+
get provider(): string;
|
|
53
|
+
get modelId(): string;
|
|
54
|
+
doGenerate(options: LanguageModelV1CallOptions): Promise<{
|
|
55
|
+
text?: string;
|
|
56
|
+
toolCalls?: Array<LanguageModelV1FunctionToolCall>;
|
|
57
|
+
finishReason: LanguageModelV1FinishReason;
|
|
58
|
+
usage: {
|
|
59
|
+
promptTokens: number;
|
|
60
|
+
completionTokens: number;
|
|
61
|
+
};
|
|
62
|
+
rawCall: {
|
|
63
|
+
rawPrompt: unknown;
|
|
64
|
+
rawSettings: Record<string, unknown>;
|
|
65
|
+
};
|
|
66
|
+
warnings?: LanguageModelV1CallWarning[];
|
|
67
|
+
}>;
|
|
68
|
+
doStream(options: LanguageModelV1CallOptions): Promise<{
|
|
69
|
+
stream: ReadableStream<LanguageModelV1StreamPart>;
|
|
70
|
+
rawCall: {
|
|
71
|
+
rawPrompt: unknown;
|
|
72
|
+
rawSettings: Record<string, unknown>;
|
|
73
|
+
};
|
|
74
|
+
warnings?: LanguageModelV1CallWarning[];
|
|
75
|
+
}>;
|
|
76
|
+
private convertToOmniMessages;
|
|
77
|
+
private convertToStandardMessages;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
export { type LongcatAudioConfig, LongcatChatLanguageModel, type LongcatChatModelId, type LongcatChatSettings, type LongcatProvider, type LongcatProviderSettings, createLongcat, longcat };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import { LanguageModelV1, LanguageModelV1CallOptions, LanguageModelV1FunctionToolCall, LanguageModelV1FinishReason, LanguageModelV1CallWarning, LanguageModelV1StreamPart } from '@ai-sdk/provider';
|
|
2
|
+
|
|
3
|
+
type LongcatChatModelId = 'longcat/chat' | 'LongCat-Flash-Chat' | 'LongCat-Flash-Thinking' | 'LongCat-Flash-Thinking-2601' | 'LongCat-Flash-Lite' | 'LongCat-Flash-Omni-2603' | (string & {});
|
|
4
|
+
interface LongcatAudioConfig {
|
|
5
|
+
voice?: string;
|
|
6
|
+
speed?: number;
|
|
7
|
+
volume?: number;
|
|
8
|
+
outputAudioSamplerate?: number;
|
|
9
|
+
}
|
|
10
|
+
interface LongcatChatSettings {
|
|
11
|
+
maxTokens?: number;
|
|
12
|
+
temperature?: number;
|
|
13
|
+
topP?: number;
|
|
14
|
+
topK?: number;
|
|
15
|
+
sessionId?: string;
|
|
16
|
+
audio?: LongcatAudioConfig;
|
|
17
|
+
outputModalities?: ('text' | 'audio')[];
|
|
18
|
+
textRepetitionPenalty?: number;
|
|
19
|
+
audioRepetitionPenalty?: number;
|
|
20
|
+
[key: string]: unknown;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
interface LongcatProviderSettings {
|
|
24
|
+
apiKey?: string;
|
|
25
|
+
baseURL?: string;
|
|
26
|
+
headers?: Record<string, string>;
|
|
27
|
+
queryParams?: Record<string, string>;
|
|
28
|
+
fetch?: typeof fetch;
|
|
29
|
+
}
|
|
30
|
+
interface LongcatProvider {
|
|
31
|
+
(modelId: LongcatChatModelId, settings?: LongcatChatSettings): LanguageModelV1;
|
|
32
|
+
chatModel(modelId: LongcatChatModelId, settings?: LongcatChatSettings): LanguageModelV1;
|
|
33
|
+
}
|
|
34
|
+
declare function createLongcat(options?: LongcatProviderSettings): LongcatProvider;
|
|
35
|
+
declare const longcat: LongcatProvider;
|
|
36
|
+
|
|
37
|
+
interface LongcatChatConfig {
|
|
38
|
+
provider: string;
|
|
39
|
+
url: ({ path }: {
|
|
40
|
+
path: string;
|
|
41
|
+
}) => string;
|
|
42
|
+
headers: () => Record<string, string>;
|
|
43
|
+
fetch?: typeof fetch;
|
|
44
|
+
}
|
|
45
|
+
declare class LongcatChatLanguageModel implements LanguageModelV1 {
|
|
46
|
+
readonly specificationVersion = "v1";
|
|
47
|
+
readonly model: string;
|
|
48
|
+
readonly settings: LongcatChatSettings;
|
|
49
|
+
readonly defaultObjectGenerationMode: 'json' | 'tool' | 'grammar' | undefined;
|
|
50
|
+
private readonly config;
|
|
51
|
+
constructor(modelId: LongcatChatModelId, settings: LongcatChatSettings, config: LongcatChatConfig);
|
|
52
|
+
get provider(): string;
|
|
53
|
+
get modelId(): string;
|
|
54
|
+
doGenerate(options: LanguageModelV1CallOptions): Promise<{
|
|
55
|
+
text?: string;
|
|
56
|
+
toolCalls?: Array<LanguageModelV1FunctionToolCall>;
|
|
57
|
+
finishReason: LanguageModelV1FinishReason;
|
|
58
|
+
usage: {
|
|
59
|
+
promptTokens: number;
|
|
60
|
+
completionTokens: number;
|
|
61
|
+
};
|
|
62
|
+
rawCall: {
|
|
63
|
+
rawPrompt: unknown;
|
|
64
|
+
rawSettings: Record<string, unknown>;
|
|
65
|
+
};
|
|
66
|
+
warnings?: LanguageModelV1CallWarning[];
|
|
67
|
+
}>;
|
|
68
|
+
doStream(options: LanguageModelV1CallOptions): Promise<{
|
|
69
|
+
stream: ReadableStream<LanguageModelV1StreamPart>;
|
|
70
|
+
rawCall: {
|
|
71
|
+
rawPrompt: unknown;
|
|
72
|
+
rawSettings: Record<string, unknown>;
|
|
73
|
+
};
|
|
74
|
+
warnings?: LanguageModelV1CallWarning[];
|
|
75
|
+
}>;
|
|
76
|
+
private convertToOmniMessages;
|
|
77
|
+
private convertToStandardMessages;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
export { type LongcatAudioConfig, LongcatChatLanguageModel, type LongcatChatModelId, type LongcatChatSettings, type LongcatProvider, type LongcatProviderSettings, createLongcat, longcat };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,402 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var index_exports = {};
|
|
22
|
+
__export(index_exports, {
|
|
23
|
+
LongcatChatLanguageModel: () => LongcatChatLanguageModel,
|
|
24
|
+
createLongcat: () => createLongcat,
|
|
25
|
+
longcat: () => longcat
|
|
26
|
+
});
|
|
27
|
+
module.exports = __toCommonJS(index_exports);
|
|
28
|
+
|
|
29
|
+
// src/longcat-provider.ts
|
|
30
|
+
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
|
31
|
+
|
|
32
|
+
// src/longcat-chat-language-model.ts
|
|
33
|
+
var import_provider = require("@ai-sdk/provider");
|
|
34
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
35
|
+
var LongcatChatLanguageModel = class {
|
|
36
|
+
constructor(modelId, settings, config) {
|
|
37
|
+
this.specificationVersion = "v1";
|
|
38
|
+
this.defaultObjectGenerationMode = void 0;
|
|
39
|
+
this.model = modelId;
|
|
40
|
+
this.settings = settings;
|
|
41
|
+
this.config = config;
|
|
42
|
+
}
|
|
43
|
+
get provider() {
|
|
44
|
+
return this.config.provider;
|
|
45
|
+
}
|
|
46
|
+
get modelId() {
|
|
47
|
+
return this.model;
|
|
48
|
+
}
|
|
49
|
+
async doGenerate(options) {
|
|
50
|
+
const isOmniModel = this.model.toLowerCase().includes("omni");
|
|
51
|
+
const messages = isOmniModel ? await this.convertToOmniMessages(options.prompt) : this.convertToStandardMessages(options.prompt);
|
|
52
|
+
const body = {
|
|
53
|
+
model: this.model,
|
|
54
|
+
messages,
|
|
55
|
+
stream: false
|
|
56
|
+
};
|
|
57
|
+
if (isOmniModel) {
|
|
58
|
+
if (this.settings.sessionId) {
|
|
59
|
+
body.sessionId = this.settings.sessionId;
|
|
60
|
+
}
|
|
61
|
+
if (this.settings.audio) {
|
|
62
|
+
body.audio = this.settings.audio;
|
|
63
|
+
}
|
|
64
|
+
if (this.settings.outputModalities) {
|
|
65
|
+
body.output_modalities = this.settings.outputModalities;
|
|
66
|
+
}
|
|
67
|
+
if (this.settings.textRepetitionPenalty) {
|
|
68
|
+
body.textRepetitionPenalty = this.settings.textRepetitionPenalty;
|
|
69
|
+
}
|
|
70
|
+
if (this.settings.audioRepetitionPenalty) {
|
|
71
|
+
body.audioRepetitionPenalty = this.settings.audioRepetitionPenalty;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
if (this.settings.maxTokens) {
|
|
75
|
+
body.max_tokens = this.settings.maxTokens;
|
|
76
|
+
}
|
|
77
|
+
if (this.settings.temperature) {
|
|
78
|
+
body.temperature = this.settings.temperature;
|
|
79
|
+
}
|
|
80
|
+
if (this.settings.topP) {
|
|
81
|
+
body.topP = this.settings.topP;
|
|
82
|
+
}
|
|
83
|
+
if (this.settings.topK) {
|
|
84
|
+
body.topK = this.settings.topK;
|
|
85
|
+
}
|
|
86
|
+
const url = this.config.url({ path: "/chat/completions" });
|
|
87
|
+
const response = await (this.config.fetch ?? fetch)(url, {
|
|
88
|
+
method: "POST",
|
|
89
|
+
headers: {
|
|
90
|
+
"Content-Type": "application/json",
|
|
91
|
+
...this.config.headers()
|
|
92
|
+
},
|
|
93
|
+
body: JSON.stringify(body)
|
|
94
|
+
});
|
|
95
|
+
if (!response.ok) {
|
|
96
|
+
const text = await response.text();
|
|
97
|
+
throw new Error(
|
|
98
|
+
`Longcat API error: ${response.status} ${response.statusText} - ${text}`
|
|
99
|
+
);
|
|
100
|
+
}
|
|
101
|
+
const json = await response.json();
|
|
102
|
+
const choice = json.choices[0];
|
|
103
|
+
return {
|
|
104
|
+
finishReason: choice?.finish_reason === "stop" ? "stop" : choice?.finish_reason === "length" ? "length" : "stop",
|
|
105
|
+
usage: {
|
|
106
|
+
promptTokens: json.usage?.prompt_tokens ?? 0,
|
|
107
|
+
completionTokens: json.usage?.completion_tokens ?? 0
|
|
108
|
+
},
|
|
109
|
+
rawCall: {
|
|
110
|
+
rawPrompt: messages,
|
|
111
|
+
rawSettings: this.settings
|
|
112
|
+
},
|
|
113
|
+
text: choice?.message?.content ?? "",
|
|
114
|
+
warnings: []
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
async doStream(options) {
|
|
118
|
+
const isOmniModel = this.model.toLowerCase().includes("omni");
|
|
119
|
+
const messages = isOmniModel ? await this.convertToOmniMessages(options.prompt) : this.convertToStandardMessages(options.prompt);
|
|
120
|
+
const body = {
|
|
121
|
+
model: this.model,
|
|
122
|
+
messages,
|
|
123
|
+
stream: true
|
|
124
|
+
};
|
|
125
|
+
if (isOmniModel) {
|
|
126
|
+
if (this.settings.sessionId) {
|
|
127
|
+
body.sessionId = this.settings.sessionId;
|
|
128
|
+
}
|
|
129
|
+
if (this.settings.audio) {
|
|
130
|
+
body.audio = this.settings.audio;
|
|
131
|
+
}
|
|
132
|
+
if (this.settings.outputModalities) {
|
|
133
|
+
body.output_modalities = this.settings.outputModalities;
|
|
134
|
+
}
|
|
135
|
+
if (this.settings.textRepetitionPenalty) {
|
|
136
|
+
body.textRepetitionPenalty = this.settings.textRepetitionPenalty;
|
|
137
|
+
}
|
|
138
|
+
if (this.settings.audioRepetitionPenalty) {
|
|
139
|
+
body.audioRepetitionPenalty = this.settings.audioRepetitionPenalty;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
if (this.settings.maxTokens) {
|
|
143
|
+
body.max_tokens = this.settings.maxTokens;
|
|
144
|
+
}
|
|
145
|
+
if (this.settings.temperature) {
|
|
146
|
+
body.temperature = this.settings.temperature;
|
|
147
|
+
}
|
|
148
|
+
if (this.settings.topP) {
|
|
149
|
+
body.topP = this.settings.topP;
|
|
150
|
+
}
|
|
151
|
+
if (this.settings.topK) {
|
|
152
|
+
body.topK = this.settings.topK;
|
|
153
|
+
}
|
|
154
|
+
const url = this.config.url({ path: "/chat/completions" });
|
|
155
|
+
const response = await (this.config.fetch ?? fetch)(url, {
|
|
156
|
+
method: "POST",
|
|
157
|
+
headers: {
|
|
158
|
+
"Content-Type": "application/json",
|
|
159
|
+
...this.config.headers()
|
|
160
|
+
},
|
|
161
|
+
body: JSON.stringify(body)
|
|
162
|
+
});
|
|
163
|
+
if (!response.ok) {
|
|
164
|
+
const error = await response.json().catch(() => ({}));
|
|
165
|
+
throw new Error(
|
|
166
|
+
error.error?.message ?? `Longcat API error: ${response.status} ${response.statusText}`
|
|
167
|
+
);
|
|
168
|
+
}
|
|
169
|
+
const decoder = new TextDecoder();
|
|
170
|
+
const bodyStream = response.body;
|
|
171
|
+
let buffer = "";
|
|
172
|
+
const { readable, writable } = new TransformStream();
|
|
173
|
+
(async () => {
|
|
174
|
+
try {
|
|
175
|
+
const reader = bodyStream.getReader();
|
|
176
|
+
try {
|
|
177
|
+
while (true) {
|
|
178
|
+
const { done, value } = await reader.read();
|
|
179
|
+
if (done) break;
|
|
180
|
+
buffer += decoder.decode(value, { stream: true });
|
|
181
|
+
const lines = buffer.split("\n");
|
|
182
|
+
buffer = lines.pop() ?? "";
|
|
183
|
+
for (const line of lines) {
|
|
184
|
+
const trimmed = line.trim();
|
|
185
|
+
if (!trimmed || trimmed === "data: [DONE]") {
|
|
186
|
+
continue;
|
|
187
|
+
}
|
|
188
|
+
if (trimmed.startsWith("data: ")) {
|
|
189
|
+
const data = trimmed.slice(6);
|
|
190
|
+
try {
|
|
191
|
+
const parsed = JSON.parse(data);
|
|
192
|
+
const choice = parsed.choices[0];
|
|
193
|
+
if (!choice) continue;
|
|
194
|
+
if (choice.delta.content !== void 0) {
|
|
195
|
+
const part = {
|
|
196
|
+
type: "text-delta",
|
|
197
|
+
textDelta: choice.delta.content ?? ""
|
|
198
|
+
};
|
|
199
|
+
writable.getWriter().write(part);
|
|
200
|
+
}
|
|
201
|
+
} catch {
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
} finally {
|
|
207
|
+
reader.releaseLock();
|
|
208
|
+
}
|
|
209
|
+
const finishPart = {
|
|
210
|
+
type: "finish",
|
|
211
|
+
finishReason: "stop",
|
|
212
|
+
usage: { promptTokens: 0, completionTokens: 0 }
|
|
213
|
+
};
|
|
214
|
+
writable.getWriter().write(finishPart);
|
|
215
|
+
} catch (e) {
|
|
216
|
+
writable.getWriter().abort(e);
|
|
217
|
+
} finally {
|
|
218
|
+
writable.getWriter().close();
|
|
219
|
+
}
|
|
220
|
+
})();
|
|
221
|
+
return {
|
|
222
|
+
stream: readable,
|
|
223
|
+
rawCall: {
|
|
224
|
+
rawPrompt: messages,
|
|
225
|
+
rawSettings: this.settings
|
|
226
|
+
},
|
|
227
|
+
warnings: []
|
|
228
|
+
};
|
|
229
|
+
}
|
|
230
|
+
async convertToOmniMessages(prompt) {
|
|
231
|
+
const messages = [];
|
|
232
|
+
for (const message of prompt) {
|
|
233
|
+
const { role, content } = message;
|
|
234
|
+
if (role === "system") {
|
|
235
|
+
let text = "";
|
|
236
|
+
if (typeof content === "string") {
|
|
237
|
+
text = content;
|
|
238
|
+
} else if (Array.isArray(content)) {
|
|
239
|
+
const textParts = content.filter((c) => c.type === "text");
|
|
240
|
+
text = textParts.map((c) => c.text).join("");
|
|
241
|
+
}
|
|
242
|
+
messages.push({
|
|
243
|
+
role: "system",
|
|
244
|
+
content: [{ type: "text", text }]
|
|
245
|
+
});
|
|
246
|
+
} else if (role === "user") {
|
|
247
|
+
const userContent = content;
|
|
248
|
+
if (userContent.length === 1 && userContent[0].type === "text") {
|
|
249
|
+
messages.push({
|
|
250
|
+
role: "user",
|
|
251
|
+
content: [{ type: "text", text: userContent[0].text }]
|
|
252
|
+
});
|
|
253
|
+
continue;
|
|
254
|
+
}
|
|
255
|
+
messages.push({
|
|
256
|
+
role: "user",
|
|
257
|
+
content: await Promise.all(userContent.map(async (part) => {
|
|
258
|
+
if (part.type === "text") {
|
|
259
|
+
return { type: "text", text: part.text };
|
|
260
|
+
}
|
|
261
|
+
if (part.type === "image") {
|
|
262
|
+
const imagePart = part;
|
|
263
|
+
let base64Data;
|
|
264
|
+
if (imagePart.image instanceof URL) {
|
|
265
|
+
const response = await (this.config.fetch ?? fetch)(imagePart.image.toString());
|
|
266
|
+
const arrayBuffer = await response.arrayBuffer();
|
|
267
|
+
const bytes = new Uint8Array(arrayBuffer);
|
|
268
|
+
let binary = "";
|
|
269
|
+
for (let i = 0; i < bytes.byteLength; i++) {
|
|
270
|
+
binary += String.fromCharCode(bytes[i]);
|
|
271
|
+
}
|
|
272
|
+
base64Data = btoa(binary);
|
|
273
|
+
} else {
|
|
274
|
+
base64Data = (0, import_provider_utils.convertUint8ArrayToBase64)(imagePart.image);
|
|
275
|
+
}
|
|
276
|
+
return {
|
|
277
|
+
type: "input_image",
|
|
278
|
+
input_image: {
|
|
279
|
+
type: "base64",
|
|
280
|
+
data: base64Data
|
|
281
|
+
}
|
|
282
|
+
};
|
|
283
|
+
}
|
|
284
|
+
throw new import_provider.UnsupportedFunctionalityError({
|
|
285
|
+
functionality: `user message with ${part.type} content`
|
|
286
|
+
});
|
|
287
|
+
}))
|
|
288
|
+
});
|
|
289
|
+
} else if (role === "assistant") {
|
|
290
|
+
const assistantContent = content;
|
|
291
|
+
let text = "";
|
|
292
|
+
for (const part of assistantContent) {
|
|
293
|
+
if (part.type === "text") {
|
|
294
|
+
text += part.text;
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
messages.push({
|
|
298
|
+
role: "assistant",
|
|
299
|
+
content: text
|
|
300
|
+
});
|
|
301
|
+
} else if (role === "tool") {
|
|
302
|
+
for (const toolResponse of content) {
|
|
303
|
+
if (toolResponse.type === "tool-result") {
|
|
304
|
+
const output = toolResponse.result;
|
|
305
|
+
let contentValue;
|
|
306
|
+
if (typeof output === "string") {
|
|
307
|
+
contentValue = output;
|
|
308
|
+
} else if (typeof output === "object") {
|
|
309
|
+
contentValue = JSON.stringify(output);
|
|
310
|
+
} else {
|
|
311
|
+
contentValue = String(output);
|
|
312
|
+
}
|
|
313
|
+
messages.push({
|
|
314
|
+
role: "user",
|
|
315
|
+
content: contentValue
|
|
316
|
+
});
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
return messages;
|
|
322
|
+
}
|
|
323
|
+
convertToStandardMessages(prompt) {
|
|
324
|
+
const messages = [];
|
|
325
|
+
for (const { role, content } of prompt) {
|
|
326
|
+
if (role === "system") {
|
|
327
|
+
const text = Array.isArray(content) ? content.find((c) => c.type === "text")?.text ?? "" : content;
|
|
328
|
+
messages.push({ role: "system", content: text });
|
|
329
|
+
} else if (role === "user") {
|
|
330
|
+
const text = Array.isArray(content) ? content.filter((c) => c.type === "text").map((c) => c.text).join("\n") : content;
|
|
331
|
+
messages.push({ role: "user", content: text });
|
|
332
|
+
} else if (role === "assistant") {
|
|
333
|
+
const text = Array.isArray(content) ? content.find((c) => c.type === "text")?.text ?? "" : content;
|
|
334
|
+
messages.push({ role: "assistant", content: text });
|
|
335
|
+
} else if (role === "tool") {
|
|
336
|
+
for (const toolResponse of content) {
|
|
337
|
+
if (toolResponse.type === "tool-result") {
|
|
338
|
+
const output = toolResponse.result;
|
|
339
|
+
let contentValue;
|
|
340
|
+
if (typeof output === "string") {
|
|
341
|
+
contentValue = output;
|
|
342
|
+
} else if (typeof output === "object") {
|
|
343
|
+
contentValue = JSON.stringify(output);
|
|
344
|
+
} else {
|
|
345
|
+
contentValue = String(output);
|
|
346
|
+
}
|
|
347
|
+
messages.push({
|
|
348
|
+
role: "tool",
|
|
349
|
+
content: contentValue
|
|
350
|
+
});
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
return messages;
|
|
356
|
+
}
|
|
357
|
+
};
|
|
358
|
+
|
|
359
|
+
// src/longcat-provider.ts
|
|
360
|
+
function createLongcat(options = {}) {
|
|
361
|
+
const baseURL = (0, import_provider_utils2.withoutTrailingSlash)(
|
|
362
|
+
options.baseURL ?? "https://api.longcat.chat/openai/v1"
|
|
363
|
+
);
|
|
364
|
+
const getHeaders = () => ({
|
|
365
|
+
Authorization: `Bearer ${(0, import_provider_utils2.loadApiKey)({
|
|
366
|
+
apiKey: options.apiKey,
|
|
367
|
+
environmentVariableName: "LONGCAT_API_KEY",
|
|
368
|
+
description: "Longcat API key"
|
|
369
|
+
})}`,
|
|
370
|
+
...options.headers
|
|
371
|
+
});
|
|
372
|
+
const config = {
|
|
373
|
+
provider: "longcat",
|
|
374
|
+
url: ({ path }) => {
|
|
375
|
+
const url = new URL(`${baseURL}${path}`);
|
|
376
|
+
if (options.queryParams) {
|
|
377
|
+
url.search = new URLSearchParams(options.queryParams).toString();
|
|
378
|
+
}
|
|
379
|
+
return url.toString();
|
|
380
|
+
},
|
|
381
|
+
headers: getHeaders,
|
|
382
|
+
fetch: options.fetch
|
|
383
|
+
};
|
|
384
|
+
const createChatModel = (modelId, settings = {}) => {
|
|
385
|
+
return new LongcatChatLanguageModel(
|
|
386
|
+
modelId,
|
|
387
|
+
settings,
|
|
388
|
+
config
|
|
389
|
+
);
|
|
390
|
+
};
|
|
391
|
+
const provider = (modelId, settings) => createChatModel(modelId, settings);
|
|
392
|
+
provider.chatModel = createChatModel;
|
|
393
|
+
return provider;
|
|
394
|
+
}
|
|
395
|
+
var longcat = createLongcat();
|
|
396
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
397
|
+
0 && (module.exports = {
|
|
398
|
+
LongcatChatLanguageModel,
|
|
399
|
+
createLongcat,
|
|
400
|
+
longcat
|
|
401
|
+
});
|
|
402
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/longcat-provider.ts","../src/longcat-chat-language-model.ts"],"sourcesContent":["export { createLongcat, longcat } from './longcat-provider';\nexport { LongcatChatLanguageModel } from './longcat-chat-language-model';\nexport type {\n LongcatProvider,\n LongcatProviderSettings,\n} from './longcat-provider';\nexport type {\n LongcatChatModelId,\n LongcatChatSettings,\n LongcatAudioConfig,\n} from './longcat-chat-settings';\n","import { LanguageModelV1 } from '@ai-sdk/provider';\nimport {\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { LongcatChatLanguageModel } from './longcat-chat-language-model';\nimport type { LongcatChatModelId, LongcatChatSettings } from './longcat-chat-settings';\nimport type { LongcatChatConfig } from './longcat-chat-language-model';\n\nexport interface LongcatProviderSettings {\n apiKey?: string;\n baseURL?: string;\n headers?: Record<string, string>;\n queryParams?: Record<string, string>;\n fetch?: typeof fetch;\n}\n\nexport interface LongcatProvider {\n (\n modelId: LongcatChatModelId,\n settings?: LongcatChatSettings,\n ): LanguageModelV1;\n\n chatModel(\n modelId: LongcatChatModelId,\n settings?: LongcatChatSettings,\n ): LanguageModelV1;\n}\n\nexport function createLongcat(\n options: LongcatProviderSettings = {},\n): LongcatProvider {\n const baseURL = withoutTrailingSlash(\n options.baseURL ?? 'https://api.longcat.chat/openai/v1',\n );\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'LONGCAT_API_KEY',\n description: 'Longcat API key',\n })}`,\n ...options.headers,\n });\n\n const config: LongcatChatConfig = {\n provider: 'longcat',\n url: ({ path }) => {\n const url = new URL(`${baseURL}${path}`);\n if (options.queryParams) {\n url.search = new URLSearchParams(options.queryParams).toString();\n }\n return url.toString();\n },\n headers: getHeaders,\n fetch: options.fetch,\n };\n\n const createChatModel = (\n modelId: LongcatChatModelId,\n settings: LongcatChatSettings = {},\n ) => {\n return new LongcatChatLanguageModel(\n modelId,\n settings,\n config,\n );\n };\n\n const provider = (\n modelId: LongcatChatModelId,\n settings?: LongcatChatSettings,\n ) => createChatModel(modelId, settings);\n\n provider.chatModel = createChatModel;\n\n return provider;\n}\n\nexport const longcat = createLongcat();\n","import {\n LanguageModelV1,\n LanguageModelV1CallOptions,\n LanguageModelV1Prompt,\n LanguageModelV1StreamPart,\n LanguageModelV1FinishReason,\n LanguageModelV1CallWarning,\n LanguageModelV1FunctionToolCall,\n LanguageModelV1Message,\n LanguageModelV1TextPart,\n LanguageModelV1ImagePart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n convertUint8ArrayToBase64,\n} from '@ai-sdk/provider-utils';\nimport type { LongcatChatModelId, LongcatChatSettings } from './longcat-chat-settings';\n\nexport interface LongcatChatConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: typeof fetch;\n}\n\ninterface LongcatResponseChunk {\n id: string;\n object: string;\n created: number;\n model: string;\n choices: Array<{\n index: number;\n delta: {\n role?: string;\n content?: string;\n audio?: string | null;\n type?: string;\n };\n finish_reason?: string | null;\n }>;\n usage?: {\n prompt_tokens: number;\n completion_tokens: number;\n total_tokens: number;\n };\n session_id?: string;\n}\n\nexport class LongcatChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly model: string;\n readonly settings: LongcatChatSettings;\n readonly defaultObjectGenerationMode: 'json' | 'tool' | 'grammar' | undefined = undefined;\n\n private readonly config: LongcatChatConfig;\n\n constructor(\n modelId: LongcatChatModelId,\n settings: LongcatChatSettings,\n config: LongcatChatConfig,\n ) {\n this.model = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n get modelId(): string {\n return this.model;\n }\n\n async doGenerate(options: LanguageModelV1CallOptions): Promise<{\n text?: string;\n toolCalls?: Array<LanguageModelV1FunctionToolCall>;\n finishReason: LanguageModelV1FinishReason;\n usage: { promptTokens: number; completionTokens: number };\n rawCall: { rawPrompt: unknown; rawSettings: Record<string, unknown> };\n warnings?: LanguageModelV1CallWarning[];\n }> {\n const isOmniModel = this.model.toLowerCase().includes('omni');\n \n const messages = isOmniModel\n ? await this.convertToOmniMessages(options.prompt)\n : this.convertToStandardMessages(options.prompt);\n\n const body: Record<string, unknown> = {\n model: this.model,\n messages,\n stream: false,\n };\n\n if (isOmniModel) {\n if (this.settings.sessionId) {\n body.sessionId = this.settings.sessionId;\n }\n if (this.settings.audio) {\n body.audio = this.settings.audio;\n }\n if (this.settings.outputModalities) {\n body.output_modalities = this.settings.outputModalities;\n }\n if (this.settings.textRepetitionPenalty) {\n body.textRepetitionPenalty = this.settings.textRepetitionPenalty;\n }\n if (this.settings.audioRepetitionPenalty) {\n body.audioRepetitionPenalty = this.settings.audioRepetitionPenalty;\n }\n }\n\n if (this.settings.maxTokens) {\n body.max_tokens = this.settings.maxTokens;\n }\n if (this.settings.temperature) {\n body.temperature = this.settings.temperature;\n }\n if (this.settings.topP) {\n body.topP = this.settings.topP;\n }\n if (this.settings.topK) {\n body.topK = this.settings.topK;\n }\n\n const url = this.config.url({ path: '/chat/completions' });\n const response = await (this.config.fetch ?? fetch)(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...this.config.headers(),\n },\n body: JSON.stringify(body),\n });\n\n if (!response.ok) {\n const text = await response.text();\n throw new Error(\n `Longcat API error: ${response.status} ${response.statusText} - ${text}`,\n );\n }\n\n const json = await response.json();\n const choice = json.choices[0];\n\n return {\n finishReason: choice?.finish_reason === 'stop'\n ? 'stop'\n : choice?.finish_reason === 'length'\n ? 'length'\n : 'stop',\n usage: {\n promptTokens: json.usage?.prompt_tokens ?? 0,\n completionTokens: json.usage?.completion_tokens ?? 0,\n },\n rawCall: {\n rawPrompt: messages,\n rawSettings: this.settings,\n },\n text: choice?.message?.content ?? '',\n warnings: [],\n };\n }\n\n async doStream(options: LanguageModelV1CallOptions): Promise<{\n stream: ReadableStream<LanguageModelV1StreamPart>;\n rawCall: { rawPrompt: unknown; rawSettings: Record<string, unknown> };\n warnings?: LanguageModelV1CallWarning[];\n }> {\n const isOmniModel = this.model.toLowerCase().includes('omni');\n \n const messages = isOmniModel\n ? await this.convertToOmniMessages(options.prompt)\n : this.convertToStandardMessages(options.prompt);\n\n const body: Record<string, unknown> = {\n model: this.model,\n messages,\n stream: true,\n };\n\n if (isOmniModel) {\n if (this.settings.sessionId) {\n body.sessionId = this.settings.sessionId;\n }\n if (this.settings.audio) {\n body.audio = this.settings.audio;\n }\n if (this.settings.outputModalities) {\n body.output_modalities = this.settings.outputModalities;\n }\n if (this.settings.textRepetitionPenalty) {\n body.textRepetitionPenalty = this.settings.textRepetitionPenalty;\n }\n if (this.settings.audioRepetitionPenalty) {\n body.audioRepetitionPenalty = this.settings.audioRepetitionPenalty;\n }\n }\n\n if (this.settings.maxTokens) {\n body.max_tokens = this.settings.maxTokens;\n }\n if (this.settings.temperature) {\n body.temperature = this.settings.temperature;\n }\n if (this.settings.topP) {\n body.topP = this.settings.topP;\n }\n if (this.settings.topK) {\n body.topK = this.settings.topK;\n }\n\n const url = this.config.url({ path: '/chat/completions' });\n const response = await (this.config.fetch ?? fetch)(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...this.config.headers(),\n },\n body: JSON.stringify(body),\n });\n\n if (!response.ok) {\n const error = await response.json().catch(() => ({}));\n throw new Error(\n error.error?.message ?? `Longcat API error: ${response.status} ${response.statusText}`,\n );\n }\n\n const decoder = new TextDecoder();\n const bodyStream = response.body!;\n let buffer = '';\n\n const { readable, writable } = new TransformStream<LanguageModelV1StreamPart>();\n\n (async () => {\n try {\n const reader = bodyStream.getReader();\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n buffer = lines.pop() ?? '';\n\n for (const line of lines) {\n const trimmed = line.trim();\n if (!trimmed || trimmed === 'data: [DONE]') {\n continue;\n }\n\n if (trimmed.startsWith('data: ')) {\n const data = trimmed.slice(6);\n try {\n const parsed: LongcatResponseChunk = JSON.parse(data);\n const choice = parsed.choices[0];\n if (!choice) continue;\n\n if (choice.delta.content !== undefined) {\n const part: LanguageModelV1StreamPart = {\n type: 'text-delta',\n textDelta: choice.delta.content ?? '',\n };\n writable.getWriter().write(part);\n }\n } catch {\n // Skip invalid JSON\n }\n }\n }\n }\n } finally {\n reader.releaseLock();\n }\n const finishPart: LanguageModelV1StreamPart = {\n type: 'finish',\n finishReason: 'stop',\n usage: { promptTokens: 0, completionTokens: 0 },\n };\n writable.getWriter().write(finishPart);\n } catch (e) {\n writable.getWriter().abort(e as Error);\n } finally {\n writable.getWriter().close();\n }\n })();\n\n return {\n stream: readable,\n rawCall: {\n rawPrompt: messages,\n rawSettings: this.settings,\n },\n warnings: [],\n };\n }\n\n private async convertToOmniMessages(prompt: LanguageModelV1Prompt): Promise<unknown> {\n const messages: Array<{ role: string; content: unknown }> = [];\n\n for (const message of prompt) {\n const { role, content } = message;\n\n if (role === 'system') {\n let text = '';\n if (typeof content === 'string') {\n text = content;\n } else if (Array.isArray(content)) {\n const textParts = (content as Array<LanguageModelV1TextPart>).filter(c => c.type === 'text') as LanguageModelV1TextPart[];\n text = textParts.map(c => c.text).join('');\n }\n messages.push({\n role: 'system',\n content: [{ type: 'text', text }],\n });\n } else if (role === 'user') {\n const userContent = content as Array<LanguageModelV1TextPart | LanguageModelV1ImagePart>;\n if (userContent.length === 1 && userContent[0].type === 'text') {\n messages.push({\n role: 'user',\n content: [{ type: 'text', text: (userContent[0] as LanguageModelV1TextPart).text }],\n });\n continue;\n }\n\n messages.push({\n role: 'user',\n content: await Promise.all(userContent.map(async (part) => {\n if (part.type === 'text') {\n return { type: 'text', text: (part as LanguageModelV1TextPart).text };\n }\n if (part.type === 'image') {\n const imagePart = part as LanguageModelV1ImagePart;\n let base64Data: string;\n \n if (imagePart.image instanceof URL) {\n const response = await (this.config.fetch ?? fetch)(imagePart.image.toString());\n const arrayBuffer = await response.arrayBuffer();\n const bytes = new Uint8Array(arrayBuffer);\n let binary = '';\n for (let i = 0; i < bytes.byteLength; i++) {\n binary += String.fromCharCode(bytes[i]);\n }\n base64Data = btoa(binary);\n } else {\n base64Data = convertUint8ArrayToBase64(imagePart.image);\n }\n \n return {\n type: 'input_image',\n input_image: {\n type: 'base64',\n data: base64Data,\n },\n };\n }\n throw new UnsupportedFunctionalityError({\n functionality: `user message with ${(part as { type: string }).type} content`,\n });\n })),\n });\n } else if (role === 'assistant') {\n const assistantContent = content as Array<LanguageModelV1TextPart>;\n let text = '';\n for (const part of assistantContent) {\n if (part.type === 'text') {\n text += part.text;\n }\n }\n messages.push({\n role: 'assistant',\n content: text,\n });\n } else if (role === 'tool') {\n for (const toolResponse of content) {\n if (toolResponse.type === 'tool-result') {\n const output = toolResponse.result;\n let contentValue: string;\n\n if (typeof output === 'string') {\n contentValue = output;\n } else if (typeof output === 'object') {\n contentValue = JSON.stringify(output);\n } else {\n contentValue = String(output);\n }\n\n messages.push({\n role: 'user',\n content: contentValue,\n });\n }\n }\n }\n }\n\n return messages;\n }\n\n private convertToStandardMessages(prompt: LanguageModelV1Prompt): Array<{ role: string; content: string }> {\n const messages: Array<{ role: string; content: string }> = [];\n\n for (const { role, content } of prompt) {\n if (role === 'system') {\n const text = Array.isArray(content)\n ? (content as Array<LanguageModelV1TextPart>).find((c) => c.type === 'text')?.text ?? ''\n : content;\n messages.push({ role: 'system', content: text });\n } else if (role === 'user') {\n const text = Array.isArray(content)\n ? (content as Array<LanguageModelV1TextPart>)\n .filter((c) => c.type === 'text')\n .map((c) => (c as LanguageModelV1TextPart).text)\n .join('\\n')\n : content;\n messages.push({ role: 'user', content: text });\n } else if (role === 'assistant') {\n const text = Array.isArray(content)\n ? (content as Array<LanguageModelV1TextPart>).find((c) => c.type === 'text')?.text ?? ''\n : content;\n messages.push({ role: 'assistant', content: text });\n } else if (role === 'tool') {\n for (const toolResponse of content) {\n if (toolResponse.type === 'tool-result') {\n const output = toolResponse.result;\n let contentValue: string;\n\n if (typeof output === 'string') {\n contentValue = output;\n } else if (typeof output === 'object') {\n contentValue = JSON.stringify(output);\n } else {\n contentValue = String(output);\n }\n\n messages.push({\n role: 'tool',\n content: contentValue,\n });\n }\n }\n }\n }\n\n return messages;\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCA,IAAAA,yBAGO;;;ACJP,sBAYO;AACP,4BAEO;AAiCA,IAAM,2BAAN,MAA0D;AAAA,EAQ/D,YACE,SACA,UACA,QACA;AAXF,SAAS,uBAAuB;AAGhC,SAAS,8BAAuE;AAS9E,SAAK,QAAQ;AACb,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,UAAkB;AACpB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,WAAW,SAOd;AACD,UAAM,cAAc,KAAK,MAAM,YAAY,EAAE,SAAS,MAAM;AAE5D,UAAM,WAAW,cACb,MAAM,KAAK,sBAAsB,QAAQ,MAAM,IAC/C,KAAK,0BAA0B,QAAQ,MAAM;AAEjD,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,QAAI,aAAa;AACf,UAAI,KAAK,SAAS,WAAW;AAC3B,aAAK,YAAY,KAAK,SAAS;AAAA,MACjC;AACA,UAAI,KAAK,SAAS,OAAO;AACvB,aAAK,QAAQ,KAAK,SAAS;AAAA,MAC7B;AACA,UAAI,KAAK,SAAS,kBAAkB;AAClC,aAAK,oBAAoB,KAAK,SAAS;AAAA,MACzC;AACA,UAAI,KAAK,SAAS,uBAAuB;AACvC,aAAK,wBAAwB,KAAK,SAAS;AAAA,MAC7C;AACA,UAAI,KAAK,SAAS,wBAAwB;AACxC,aAAK,yBAAyB,KAAK,SAAS;AAAA,MAC9C;AAAA,IACF;AAEA,QAAI,KAAK,SAAS,WAAW;AAC3B,WAAK,aAAa,KAAK,SAAS;AAAA,IAClC;AACA,QAAI,KAAK,SAAS,aAAa;AAC7B,WAAK,cAAc,KAAK,SAAS;AAAA,IACnC;AACA,QAAI,KAAK,SAAS,MAAM;AACtB,WAAK,OAAO,KAAK,SAAS;AAAA,IAC5B;AACA,QAAI,KAAK,SAAS,MAAM;AACtB,WAAK,OAAO,KAAK,SAAS;AAAA,IAC5B;AAEA,UAAM,MAAM,KAAK,OAAO,IAAI,EAAE,MAAM,oBAAoB,CAAC;AACzD,UAAM,WAAW,OAAO,KAAK,OAAO,SAAS,OAAO,KAAK;AAAA,MACvD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,GAAG,KAAK,OAAO,QAAQ;AAAA,MACzB;AAAA,MACA,MAAM,KAAK,UAAU,IAAI;AAAA,IAC3B,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,OAAO,MAAM,SAAS,KAAK;AACjC,YAAM,IAAI;AAAA,QACR,sBAAsB,SAAS,MAAM,IAAI,SAAS,UAAU,MAAM,IAAI;AAAA,MACxE;AAAA,IACF;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AACjC,UAAM,SAAS,KAAK,QAAQ,CAAC;AAE7B,WAAO;AAAA,MACL,cAAc,QAAQ,kBAAkB,SACpC,SACA,QAAQ,kBAAkB,WAC1B,WACA;AAAA,MACJ,OAAO;AAAA,QACL,cAAc,KAAK,OAAO,iBAAiB;AAAA,QAC3C,kBAAkB,KAAK,OAAO,qBAAqB;AAAA,MACrD;AAAA,MACA,SAAS;AAAA,QACP,WAAW;AAAA,QACX,aAAa,KAAK;AAAA,MACpB;AAAA,MACA,MAAM,QAAQ,SAAS,WAAW;AAAA,MAClC,UAAU,CAAC;AAAA,IACb;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,SAIZ;AACD,UAAM,cAAc,KAAK,MAAM,YAAY,EAAE,SAAS,MAAM;AAE5D,UAAM,WAAW,cACb,MAAM,KAAK,sBAAsB,QAAQ,MAAM,IAC/C,KAAK,0BAA0B,QAAQ,MAAM;AAEjD,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,QAAI,aAAa;AACf,UAAI,KAAK,SAAS,WAAW;AAC3B,aAAK,YAAY,KAAK,SAAS;AAAA,MACjC;AACA,UAAI,KAAK,SAAS,OAAO;AACvB,aAAK,QAAQ,KAAK,SAAS;AAAA,MAC7B;AACA,UAAI,KAAK,SAAS,kBAAkB;AAClC,aAAK,oBAAoB,KAAK,SAAS;AAAA,MACzC;AACA,UAAI,KAAK,SAAS,uBAAuB;AACvC,aAAK,wBAAwB,KAAK,SAAS;AAAA,MAC7C;AACA,UAAI,KAAK,SAAS,wBAAwB;AACxC,aAAK,yBAAyB,KAAK,SAAS;AAAA,MAC9C;AAAA,IACF;AAEA,QAAI,KAAK,SAAS,WAAW;AAC3B,WAAK,aAAa,KAAK,SAAS;AAAA,IAClC;AACA,QAAI,KAAK,SAAS,aAAa;AAC7B,WAAK,cAAc,KAAK,SAAS;AAAA,IACnC;AACA,QAAI,KAAK,SAAS,MAAM;AACtB,WAAK,OAAO,KAAK,SAAS;AAAA,IAC5B;AACA,QAAI,KAAK,SAAS,MAAM;AACtB,WAAK,OAAO,KAAK,SAAS;AAAA,IAC5B;AAEA,UAAM,MAAM,KAAK,OAAO,IAAI,EAAE,MAAM,oBAAoB,CAAC;AACzD,UAAM,WAAW,OAAO,KAAK,OAAO,SAAS,OAAO,KAAK;AAAA,MACvD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,GAAG,KAAK,OAAO,QAAQ;AAAA,MACzB;AAAA,MACA,MAAM,KAAK,UAAU,IAAI;AAAA,IAC3B,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,QAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AACpD,YAAM,IAAI;AAAA,QACR,MAAM,OAAO,WAAW,sBAAsB,SAAS,MAAM,IAAI,SAAS,UAAU;AAAA,MACtF;AAAA,IACF;AAEA,UAAM,UAAU,IAAI,YAAY;AAChC,UAAM,aAAa,SAAS;AAC5B,QAAI,SAAS;AAEb,UAAM,EAAE,UAAU,SAAS,IAAI,IAAI,gBAA2C;AAE9E,KAAC,YAAY;AACX,UAAI;AACF,cAAM,SAAS,WAAW,UAAU;AACpC,YAAI;AACF,iBAAO,MAAM;AACX,kBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,gBAAI,KAAM;AACV,sBAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAChD,kBAAM,QAAQ,OAAO,MAAM,IAAI;AAC/B,qBAAS,MAAM,IAAI,KAAK;AAExB,uBAAW,QAAQ,OAAO;AACxB,oBAAM,UAAU,KAAK,KAAK;AAC1B,kBAAI,CAAC,WAAW,YAAY,gBAAgB;AAC1C;AAAA,cACF;AAEA,kBAAI,QAAQ,WAAW,QAAQ,GAAG;AAChC,sBAAM,OAAO,QAAQ,MAAM,CAAC;AAC5B,oBAAI;AACF,wBAAM,SAA+B,KAAK,MAAM,IAAI;AACpD,wBAAM,SAAS,OAAO,QAAQ,CAAC;AAC/B,sBAAI,CAAC,OAAQ;AAEb,sBAAI,OAAO,MAAM,YAAY,QAAW;AACtC,0BAAM,OAAkC;AAAA,sBACtC,MAAM;AAAA,sBACN,WAAW,OAAO,MAAM,WAAW;AAAA,oBACrC;AACA,6BAAS,UAAU,EAAE,MAAM,IAAI;AAAA,kBACjC;AAAA,gBACF,QAAQ;AAAA,gBAER;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF,UAAE;AACA,iBAAO,YAAY;AAAA,QACrB;AACA,cAAM,aAAwC;AAAA,UAC5C,MAAM;AAAA,UACN,cAAc;AAAA,UACd,OAAO,EAAE,cAAc,GAAG,kBAAkB,EAAE;AAAA,QAChD;AACA,iBAAS,UAAU,EAAE,MAAM,UAAU;AAAA,MACvC,SAAS,GAAG;AACV,iBAAS,UAAU,EAAE,MAAM,CAAU;AAAA,MACvC,UAAE;AACA,iBAAS,UAAU,EAAE,MAAM;AAAA,MAC7B;AAAA,IACF,GAAG;AAEH,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,WAAW;AAAA,QACX,aAAa,KAAK;AAAA,MACpB;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,EACF;AAAA,EAEA,MAAc,sBAAsB,QAAiD;AACnF,UAAM,WAAsD,CAAC;AAE7D,eAAW,WAAW,QAAQ;AAC5B,YAAM,EAAE,MAAM,QAAQ,IAAI;AAE1B,UAAI,SAAS,UAAU;AACrB,YAAI,OAAO;AACX,YAAI,OAAO,YAAY,UAAU;AAC/B,iBAAO;AAAA,QACT,WAAW,MAAM,QAAQ,OAAO,GAAG;AACjC,gBAAM,YAAa,QAA2C,OAAO,OAAK,EAAE,SAAS,MAAM;AAC3F,iBAAO,UAAU,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,EAAE;AAAA,QAC3C;AACA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,CAAC,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,QAClC,CAAC;AAAA,MACH,WAAW,SAAS,QAAQ;AAC1B,cAAM,cAAc;AACpB,YAAI,YAAY,WAAW,KAAK,YAAY,CAAC,EAAE,SAAS,QAAQ;AAC9D,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAO,YAAY,CAAC,EAA8B,KAAK,CAAC;AAAA,UACpF,CAAC;AACD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,MAAM,QAAQ,IAAI,YAAY,IAAI,OAAO,SAAS;AACzD,gBAAI,KAAK,SAAS,QAAQ;AACxB,qBAAO,EAAE,MAAM,QAAQ,MAAO,KAAiC,KAAK;AAAA,YACtE;AACA,gBAAI,KAAK,SAAS,SAAS;AACzB,oBAAM,YAAY;AAClB,kBAAI;AAEJ,kBAAI,UAAU,iBAAiB,KAAK;AAClC,sBAAM,WAAW,OAAO,KAAK,OAAO,SAAS,OAAO,UAAU,MAAM,SAAS,CAAC;AAC9E,sBAAM,cAAc,MAAM,SAAS,YAAY;AAC/C,sBAAM,QAAQ,IAAI,WAAW,WAAW;AACxC,oBAAI,SAAS;AACb,yBAAS,IAAI,GAAG,IAAI,MAAM,YAAY,KAAK;AACzC,4BAAU,OAAO,aAAa,MAAM,CAAC,CAAC;AAAA,gBACxC;AACA,6BAAa,KAAK,MAAM;AAAA,cAC1B,OAAO;AACL,iCAAa,iDAA0B,UAAU,KAAK;AAAA,cACxD;AAEA,qBAAO;AAAA,gBACL,MAAM;AAAA,gBACN,aAAa;AAAA,kBACX,MAAM;AAAA,kBACN,MAAM;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AACA,kBAAM,IAAI,8CAA8B;AAAA,cACtC,eAAe,qBAAsB,KAA0B,IAAI;AAAA,YACrE,CAAC;AAAA,UACH,CAAC,CAAC;AAAA,QACJ,CAAC;AAAA,MACH,WAAW,SAAS,aAAa;AAC/B,cAAM,mBAAmB;AACzB,YAAI,OAAO;AACX,mBAAW,QAAQ,kBAAkB;AACnC,cAAI,KAAK,SAAS,QAAQ;AACxB,oBAAQ,KAAK;AAAA,UACf;AAAA,QACF;AACA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,QACX,CAAC;AAAA,MACH,WAAW,SAAS,QAAQ;AAC1B,mBAAW,gBAAgB,SAAS;AAClC,cAAI,aAAa,SAAS,eAAe;AACvC,kBAAM,SAAS,aAAa;AAC5B,gBAAI;AAEJ,gBAAI,OAAO,WAAW,UAAU;AAC9B,6BAAe;AAAA,YACjB,WAAW,OAAO,WAAW,UAAU;AACrC,6BAAe,KAAK,UAAU,MAAM;AAAA,YACtC,OAAO;AACL,6BAAe,OAAO,MAAM;AAAA,YAC9B;AAEA,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,SAAS;AAAA,YACX,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,0BAA0B,QAAyE;AACzG,UAAM,WAAqD,CAAC;AAE5D,eAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,UAAI,SAAS,UAAU;AACrB,cAAM,OAAO,MAAM,QAAQ,OAAO,IAC7B,QAA2C,KAAK,CAAC,MAAM,EAAE,SAAS,MAAM,GAAG,QAAQ,KACpF;AACJ,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,KAAK,CAAC;AAAA,MACjD,WAAW,SAAS,QAAQ;AAC1B,cAAM,OAAO,MAAM,QAAQ,OAAO,IAC7B,QACE,OAAO,CAAC,MAAM,EAAE,SAAS,MAAM,EAC/B,IAAI,CAAC,MAAO,EAA8B,IAAI,EAC9C,KAAK,IAAI,IACZ;AACJ,iBAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,KAAK,CAAC;AAAA,MAC/C,WAAW,SAAS,aAAa;AAC/B,cAAM,OAAO,MAAM,QAAQ,OAAO,IAC7B,QAA2C,KAAK,CAAC,MAAM,EAAE,SAAS,MAAM,GAAG,QAAQ,KACpF;AACJ,iBAAS,KAAK,EAAE,MAAM,aAAa,SAAS,KAAK,CAAC;AAAA,MACpD,WAAW,SAAS,QAAQ;AAC1B,mBAAW,gBAAgB,SAAS;AAClC,cAAI,aAAa,SAAS,eAAe;AACvC,kBAAM,SAAS,aAAa;AAC5B,gBAAI;AAEJ,gBAAI,OAAO,WAAW,UAAU;AAC9B,6BAAe;AAAA,YACjB,WAAW,OAAO,WAAW,UAAU;AACrC,6BAAe,KAAK,UAAU,MAAM;AAAA,YACtC,OAAO;AACL,6BAAe,OAAO,MAAM;AAAA,YAC9B;AAEA,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,SAAS;AAAA,YACX,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;;;ADlaO,SAAS,cACd,UAAmC,CAAC,GACnB;AACjB,QAAM,cAAU;AAAA,IACd,QAAQ,WAAW;AAAA,EACrB;AAEA,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,SAA4B;AAAA,IAChC,UAAU;AAAA,IACV,KAAK,CAAC,EAAE,KAAK,MAAM;AACjB,YAAM,MAAM,IAAI,IAAI,GAAG,OAAO,GAAG,IAAI,EAAE;AACvC,UAAI,QAAQ,aAAa;AACvB,YAAI,SAAS,IAAI,gBAAgB,QAAQ,WAAW,EAAE,SAAS;AAAA,MACjE;AACA,aAAO,IAAI,SAAS;AAAA,IACtB;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAgC,CAAC,MAC9B;AACH,WAAO,IAAI;AAAA,MACT;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAW,CACf,SACA,aACG,gBAAgB,SAAS,QAAQ;AAEtC,WAAS,YAAY;AAErB,SAAO;AACT;AAEO,IAAM,UAAU,cAAc;","names":["import_provider_utils"]}
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,380 @@
|
|
|
1
|
+
// src/longcat-provider.ts
|
|
2
|
+
import {
|
|
3
|
+
loadApiKey,
|
|
4
|
+
withoutTrailingSlash
|
|
5
|
+
} from "@ai-sdk/provider-utils";
|
|
6
|
+
|
|
7
|
+
// src/longcat-chat-language-model.ts
|
|
8
|
+
import {
|
|
9
|
+
UnsupportedFunctionalityError
|
|
10
|
+
} from "@ai-sdk/provider";
|
|
11
|
+
import {
|
|
12
|
+
convertUint8ArrayToBase64
|
|
13
|
+
} from "@ai-sdk/provider-utils";
|
|
14
|
+
var LongcatChatLanguageModel = class {
|
|
15
|
+
constructor(modelId, settings, config) {
|
|
16
|
+
this.specificationVersion = "v1";
|
|
17
|
+
this.defaultObjectGenerationMode = void 0;
|
|
18
|
+
this.model = modelId;
|
|
19
|
+
this.settings = settings;
|
|
20
|
+
this.config = config;
|
|
21
|
+
}
|
|
22
|
+
get provider() {
|
|
23
|
+
return this.config.provider;
|
|
24
|
+
}
|
|
25
|
+
get modelId() {
|
|
26
|
+
return this.model;
|
|
27
|
+
}
|
|
28
|
+
async doGenerate(options) {
|
|
29
|
+
const isOmniModel = this.model.toLowerCase().includes("omni");
|
|
30
|
+
const messages = isOmniModel ? await this.convertToOmniMessages(options.prompt) : this.convertToStandardMessages(options.prompt);
|
|
31
|
+
const body = {
|
|
32
|
+
model: this.model,
|
|
33
|
+
messages,
|
|
34
|
+
stream: false
|
|
35
|
+
};
|
|
36
|
+
if (isOmniModel) {
|
|
37
|
+
if (this.settings.sessionId) {
|
|
38
|
+
body.sessionId = this.settings.sessionId;
|
|
39
|
+
}
|
|
40
|
+
if (this.settings.audio) {
|
|
41
|
+
body.audio = this.settings.audio;
|
|
42
|
+
}
|
|
43
|
+
if (this.settings.outputModalities) {
|
|
44
|
+
body.output_modalities = this.settings.outputModalities;
|
|
45
|
+
}
|
|
46
|
+
if (this.settings.textRepetitionPenalty) {
|
|
47
|
+
body.textRepetitionPenalty = this.settings.textRepetitionPenalty;
|
|
48
|
+
}
|
|
49
|
+
if (this.settings.audioRepetitionPenalty) {
|
|
50
|
+
body.audioRepetitionPenalty = this.settings.audioRepetitionPenalty;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
if (this.settings.maxTokens) {
|
|
54
|
+
body.max_tokens = this.settings.maxTokens;
|
|
55
|
+
}
|
|
56
|
+
if (this.settings.temperature) {
|
|
57
|
+
body.temperature = this.settings.temperature;
|
|
58
|
+
}
|
|
59
|
+
if (this.settings.topP) {
|
|
60
|
+
body.topP = this.settings.topP;
|
|
61
|
+
}
|
|
62
|
+
if (this.settings.topK) {
|
|
63
|
+
body.topK = this.settings.topK;
|
|
64
|
+
}
|
|
65
|
+
const url = this.config.url({ path: "/chat/completions" });
|
|
66
|
+
const response = await (this.config.fetch ?? fetch)(url, {
|
|
67
|
+
method: "POST",
|
|
68
|
+
headers: {
|
|
69
|
+
"Content-Type": "application/json",
|
|
70
|
+
...this.config.headers()
|
|
71
|
+
},
|
|
72
|
+
body: JSON.stringify(body)
|
|
73
|
+
});
|
|
74
|
+
if (!response.ok) {
|
|
75
|
+
const text = await response.text();
|
|
76
|
+
throw new Error(
|
|
77
|
+
`Longcat API error: ${response.status} ${response.statusText} - ${text}`
|
|
78
|
+
);
|
|
79
|
+
}
|
|
80
|
+
const json = await response.json();
|
|
81
|
+
const choice = json.choices[0];
|
|
82
|
+
return {
|
|
83
|
+
finishReason: choice?.finish_reason === "stop" ? "stop" : choice?.finish_reason === "length" ? "length" : "stop",
|
|
84
|
+
usage: {
|
|
85
|
+
promptTokens: json.usage?.prompt_tokens ?? 0,
|
|
86
|
+
completionTokens: json.usage?.completion_tokens ?? 0
|
|
87
|
+
},
|
|
88
|
+
rawCall: {
|
|
89
|
+
rawPrompt: messages,
|
|
90
|
+
rawSettings: this.settings
|
|
91
|
+
},
|
|
92
|
+
text: choice?.message?.content ?? "",
|
|
93
|
+
warnings: []
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
async doStream(options) {
|
|
97
|
+
const isOmniModel = this.model.toLowerCase().includes("omni");
|
|
98
|
+
const messages = isOmniModel ? await this.convertToOmniMessages(options.prompt) : this.convertToStandardMessages(options.prompt);
|
|
99
|
+
const body = {
|
|
100
|
+
model: this.model,
|
|
101
|
+
messages,
|
|
102
|
+
stream: true
|
|
103
|
+
};
|
|
104
|
+
if (isOmniModel) {
|
|
105
|
+
if (this.settings.sessionId) {
|
|
106
|
+
body.sessionId = this.settings.sessionId;
|
|
107
|
+
}
|
|
108
|
+
if (this.settings.audio) {
|
|
109
|
+
body.audio = this.settings.audio;
|
|
110
|
+
}
|
|
111
|
+
if (this.settings.outputModalities) {
|
|
112
|
+
body.output_modalities = this.settings.outputModalities;
|
|
113
|
+
}
|
|
114
|
+
if (this.settings.textRepetitionPenalty) {
|
|
115
|
+
body.textRepetitionPenalty = this.settings.textRepetitionPenalty;
|
|
116
|
+
}
|
|
117
|
+
if (this.settings.audioRepetitionPenalty) {
|
|
118
|
+
body.audioRepetitionPenalty = this.settings.audioRepetitionPenalty;
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
if (this.settings.maxTokens) {
|
|
122
|
+
body.max_tokens = this.settings.maxTokens;
|
|
123
|
+
}
|
|
124
|
+
if (this.settings.temperature) {
|
|
125
|
+
body.temperature = this.settings.temperature;
|
|
126
|
+
}
|
|
127
|
+
if (this.settings.topP) {
|
|
128
|
+
body.topP = this.settings.topP;
|
|
129
|
+
}
|
|
130
|
+
if (this.settings.topK) {
|
|
131
|
+
body.topK = this.settings.topK;
|
|
132
|
+
}
|
|
133
|
+
const url = this.config.url({ path: "/chat/completions" });
|
|
134
|
+
const response = await (this.config.fetch ?? fetch)(url, {
|
|
135
|
+
method: "POST",
|
|
136
|
+
headers: {
|
|
137
|
+
"Content-Type": "application/json",
|
|
138
|
+
...this.config.headers()
|
|
139
|
+
},
|
|
140
|
+
body: JSON.stringify(body)
|
|
141
|
+
});
|
|
142
|
+
if (!response.ok) {
|
|
143
|
+
const error = await response.json().catch(() => ({}));
|
|
144
|
+
throw new Error(
|
|
145
|
+
error.error?.message ?? `Longcat API error: ${response.status} ${response.statusText}`
|
|
146
|
+
);
|
|
147
|
+
}
|
|
148
|
+
const decoder = new TextDecoder();
|
|
149
|
+
const bodyStream = response.body;
|
|
150
|
+
let buffer = "";
|
|
151
|
+
const { readable, writable } = new TransformStream();
|
|
152
|
+
(async () => {
|
|
153
|
+
try {
|
|
154
|
+
const reader = bodyStream.getReader();
|
|
155
|
+
try {
|
|
156
|
+
while (true) {
|
|
157
|
+
const { done, value } = await reader.read();
|
|
158
|
+
if (done) break;
|
|
159
|
+
buffer += decoder.decode(value, { stream: true });
|
|
160
|
+
const lines = buffer.split("\n");
|
|
161
|
+
buffer = lines.pop() ?? "";
|
|
162
|
+
for (const line of lines) {
|
|
163
|
+
const trimmed = line.trim();
|
|
164
|
+
if (!trimmed || trimmed === "data: [DONE]") {
|
|
165
|
+
continue;
|
|
166
|
+
}
|
|
167
|
+
if (trimmed.startsWith("data: ")) {
|
|
168
|
+
const data = trimmed.slice(6);
|
|
169
|
+
try {
|
|
170
|
+
const parsed = JSON.parse(data);
|
|
171
|
+
const choice = parsed.choices[0];
|
|
172
|
+
if (!choice) continue;
|
|
173
|
+
if (choice.delta.content !== void 0) {
|
|
174
|
+
const part = {
|
|
175
|
+
type: "text-delta",
|
|
176
|
+
textDelta: choice.delta.content ?? ""
|
|
177
|
+
};
|
|
178
|
+
writable.getWriter().write(part);
|
|
179
|
+
}
|
|
180
|
+
} catch {
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
} finally {
|
|
186
|
+
reader.releaseLock();
|
|
187
|
+
}
|
|
188
|
+
const finishPart = {
|
|
189
|
+
type: "finish",
|
|
190
|
+
finishReason: "stop",
|
|
191
|
+
usage: { promptTokens: 0, completionTokens: 0 }
|
|
192
|
+
};
|
|
193
|
+
writable.getWriter().write(finishPart);
|
|
194
|
+
} catch (e) {
|
|
195
|
+
writable.getWriter().abort(e);
|
|
196
|
+
} finally {
|
|
197
|
+
writable.getWriter().close();
|
|
198
|
+
}
|
|
199
|
+
})();
|
|
200
|
+
return {
|
|
201
|
+
stream: readable,
|
|
202
|
+
rawCall: {
|
|
203
|
+
rawPrompt: messages,
|
|
204
|
+
rawSettings: this.settings
|
|
205
|
+
},
|
|
206
|
+
warnings: []
|
|
207
|
+
};
|
|
208
|
+
}
|
|
209
|
+
async convertToOmniMessages(prompt) {
|
|
210
|
+
const messages = [];
|
|
211
|
+
for (const message of prompt) {
|
|
212
|
+
const { role, content } = message;
|
|
213
|
+
if (role === "system") {
|
|
214
|
+
let text = "";
|
|
215
|
+
if (typeof content === "string") {
|
|
216
|
+
text = content;
|
|
217
|
+
} else if (Array.isArray(content)) {
|
|
218
|
+
const textParts = content.filter((c) => c.type === "text");
|
|
219
|
+
text = textParts.map((c) => c.text).join("");
|
|
220
|
+
}
|
|
221
|
+
messages.push({
|
|
222
|
+
role: "system",
|
|
223
|
+
content: [{ type: "text", text }]
|
|
224
|
+
});
|
|
225
|
+
} else if (role === "user") {
|
|
226
|
+
const userContent = content;
|
|
227
|
+
if (userContent.length === 1 && userContent[0].type === "text") {
|
|
228
|
+
messages.push({
|
|
229
|
+
role: "user",
|
|
230
|
+
content: [{ type: "text", text: userContent[0].text }]
|
|
231
|
+
});
|
|
232
|
+
continue;
|
|
233
|
+
}
|
|
234
|
+
messages.push({
|
|
235
|
+
role: "user",
|
|
236
|
+
content: await Promise.all(userContent.map(async (part) => {
|
|
237
|
+
if (part.type === "text") {
|
|
238
|
+
return { type: "text", text: part.text };
|
|
239
|
+
}
|
|
240
|
+
if (part.type === "image") {
|
|
241
|
+
const imagePart = part;
|
|
242
|
+
let base64Data;
|
|
243
|
+
if (imagePart.image instanceof URL) {
|
|
244
|
+
const response = await (this.config.fetch ?? fetch)(imagePart.image.toString());
|
|
245
|
+
const arrayBuffer = await response.arrayBuffer();
|
|
246
|
+
const bytes = new Uint8Array(arrayBuffer);
|
|
247
|
+
let binary = "";
|
|
248
|
+
for (let i = 0; i < bytes.byteLength; i++) {
|
|
249
|
+
binary += String.fromCharCode(bytes[i]);
|
|
250
|
+
}
|
|
251
|
+
base64Data = btoa(binary);
|
|
252
|
+
} else {
|
|
253
|
+
base64Data = convertUint8ArrayToBase64(imagePart.image);
|
|
254
|
+
}
|
|
255
|
+
return {
|
|
256
|
+
type: "input_image",
|
|
257
|
+
input_image: {
|
|
258
|
+
type: "base64",
|
|
259
|
+
data: base64Data
|
|
260
|
+
}
|
|
261
|
+
};
|
|
262
|
+
}
|
|
263
|
+
throw new UnsupportedFunctionalityError({
|
|
264
|
+
functionality: `user message with ${part.type} content`
|
|
265
|
+
});
|
|
266
|
+
}))
|
|
267
|
+
});
|
|
268
|
+
} else if (role === "assistant") {
|
|
269
|
+
const assistantContent = content;
|
|
270
|
+
let text = "";
|
|
271
|
+
for (const part of assistantContent) {
|
|
272
|
+
if (part.type === "text") {
|
|
273
|
+
text += part.text;
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
messages.push({
|
|
277
|
+
role: "assistant",
|
|
278
|
+
content: text
|
|
279
|
+
});
|
|
280
|
+
} else if (role === "tool") {
|
|
281
|
+
for (const toolResponse of content) {
|
|
282
|
+
if (toolResponse.type === "tool-result") {
|
|
283
|
+
const output = toolResponse.result;
|
|
284
|
+
let contentValue;
|
|
285
|
+
if (typeof output === "string") {
|
|
286
|
+
contentValue = output;
|
|
287
|
+
} else if (typeof output === "object") {
|
|
288
|
+
contentValue = JSON.stringify(output);
|
|
289
|
+
} else {
|
|
290
|
+
contentValue = String(output);
|
|
291
|
+
}
|
|
292
|
+
messages.push({
|
|
293
|
+
role: "user",
|
|
294
|
+
content: contentValue
|
|
295
|
+
});
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
return messages;
|
|
301
|
+
}
|
|
302
|
+
convertToStandardMessages(prompt) {
|
|
303
|
+
const messages = [];
|
|
304
|
+
for (const { role, content } of prompt) {
|
|
305
|
+
if (role === "system") {
|
|
306
|
+
const text = Array.isArray(content) ? content.find((c) => c.type === "text")?.text ?? "" : content;
|
|
307
|
+
messages.push({ role: "system", content: text });
|
|
308
|
+
} else if (role === "user") {
|
|
309
|
+
const text = Array.isArray(content) ? content.filter((c) => c.type === "text").map((c) => c.text).join("\n") : content;
|
|
310
|
+
messages.push({ role: "user", content: text });
|
|
311
|
+
} else if (role === "assistant") {
|
|
312
|
+
const text = Array.isArray(content) ? content.find((c) => c.type === "text")?.text ?? "" : content;
|
|
313
|
+
messages.push({ role: "assistant", content: text });
|
|
314
|
+
} else if (role === "tool") {
|
|
315
|
+
for (const toolResponse of content) {
|
|
316
|
+
if (toolResponse.type === "tool-result") {
|
|
317
|
+
const output = toolResponse.result;
|
|
318
|
+
let contentValue;
|
|
319
|
+
if (typeof output === "string") {
|
|
320
|
+
contentValue = output;
|
|
321
|
+
} else if (typeof output === "object") {
|
|
322
|
+
contentValue = JSON.stringify(output);
|
|
323
|
+
} else {
|
|
324
|
+
contentValue = String(output);
|
|
325
|
+
}
|
|
326
|
+
messages.push({
|
|
327
|
+
role: "tool",
|
|
328
|
+
content: contentValue
|
|
329
|
+
});
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
return messages;
|
|
335
|
+
}
|
|
336
|
+
};
|
|
337
|
+
|
|
338
|
+
// src/longcat-provider.ts
|
|
339
|
+
function createLongcat(options = {}) {
|
|
340
|
+
const baseURL = withoutTrailingSlash(
|
|
341
|
+
options.baseURL ?? "https://api.longcat.chat/openai/v1"
|
|
342
|
+
);
|
|
343
|
+
const getHeaders = () => ({
|
|
344
|
+
Authorization: `Bearer ${loadApiKey({
|
|
345
|
+
apiKey: options.apiKey,
|
|
346
|
+
environmentVariableName: "LONGCAT_API_KEY",
|
|
347
|
+
description: "Longcat API key"
|
|
348
|
+
})}`,
|
|
349
|
+
...options.headers
|
|
350
|
+
});
|
|
351
|
+
const config = {
|
|
352
|
+
provider: "longcat",
|
|
353
|
+
url: ({ path }) => {
|
|
354
|
+
const url = new URL(`${baseURL}${path}`);
|
|
355
|
+
if (options.queryParams) {
|
|
356
|
+
url.search = new URLSearchParams(options.queryParams).toString();
|
|
357
|
+
}
|
|
358
|
+
return url.toString();
|
|
359
|
+
},
|
|
360
|
+
headers: getHeaders,
|
|
361
|
+
fetch: options.fetch
|
|
362
|
+
};
|
|
363
|
+
const createChatModel = (modelId, settings = {}) => {
|
|
364
|
+
return new LongcatChatLanguageModel(
|
|
365
|
+
modelId,
|
|
366
|
+
settings,
|
|
367
|
+
config
|
|
368
|
+
);
|
|
369
|
+
};
|
|
370
|
+
const provider = (modelId, settings) => createChatModel(modelId, settings);
|
|
371
|
+
provider.chatModel = createChatModel;
|
|
372
|
+
return provider;
|
|
373
|
+
}
|
|
374
|
+
var longcat = createLongcat();
|
|
375
|
+
export {
|
|
376
|
+
LongcatChatLanguageModel,
|
|
377
|
+
createLongcat,
|
|
378
|
+
longcat
|
|
379
|
+
};
|
|
380
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/longcat-provider.ts","../src/longcat-chat-language-model.ts"],"sourcesContent":["import { LanguageModelV1 } from '@ai-sdk/provider';\nimport {\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { LongcatChatLanguageModel } from './longcat-chat-language-model';\nimport type { LongcatChatModelId, LongcatChatSettings } from './longcat-chat-settings';\nimport type { LongcatChatConfig } from './longcat-chat-language-model';\n\nexport interface LongcatProviderSettings {\n apiKey?: string;\n baseURL?: string;\n headers?: Record<string, string>;\n queryParams?: Record<string, string>;\n fetch?: typeof fetch;\n}\n\nexport interface LongcatProvider {\n (\n modelId: LongcatChatModelId,\n settings?: LongcatChatSettings,\n ): LanguageModelV1;\n\n chatModel(\n modelId: LongcatChatModelId,\n settings?: LongcatChatSettings,\n ): LanguageModelV1;\n}\n\nexport function createLongcat(\n options: LongcatProviderSettings = {},\n): LongcatProvider {\n const baseURL = withoutTrailingSlash(\n options.baseURL ?? 'https://api.longcat.chat/openai/v1',\n );\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'LONGCAT_API_KEY',\n description: 'Longcat API key',\n })}`,\n ...options.headers,\n });\n\n const config: LongcatChatConfig = {\n provider: 'longcat',\n url: ({ path }) => {\n const url = new URL(`${baseURL}${path}`);\n if (options.queryParams) {\n url.search = new URLSearchParams(options.queryParams).toString();\n }\n return url.toString();\n },\n headers: getHeaders,\n fetch: options.fetch,\n };\n\n const createChatModel = (\n modelId: LongcatChatModelId,\n settings: LongcatChatSettings = {},\n ) => {\n return new LongcatChatLanguageModel(\n modelId,\n settings,\n config,\n );\n };\n\n const provider = (\n modelId: LongcatChatModelId,\n settings?: LongcatChatSettings,\n ) => createChatModel(modelId, settings);\n\n provider.chatModel = createChatModel;\n\n return provider;\n}\n\nexport const longcat = createLongcat();\n","import {\n LanguageModelV1,\n LanguageModelV1CallOptions,\n LanguageModelV1Prompt,\n LanguageModelV1StreamPart,\n LanguageModelV1FinishReason,\n LanguageModelV1CallWarning,\n LanguageModelV1FunctionToolCall,\n LanguageModelV1Message,\n LanguageModelV1TextPart,\n LanguageModelV1ImagePart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n convertUint8ArrayToBase64,\n} from '@ai-sdk/provider-utils';\nimport type { LongcatChatModelId, LongcatChatSettings } from './longcat-chat-settings';\n\nexport interface LongcatChatConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: typeof fetch;\n}\n\ninterface LongcatResponseChunk {\n id: string;\n object: string;\n created: number;\n model: string;\n choices: Array<{\n index: number;\n delta: {\n role?: string;\n content?: string;\n audio?: string | null;\n type?: string;\n };\n finish_reason?: string | null;\n }>;\n usage?: {\n prompt_tokens: number;\n completion_tokens: number;\n total_tokens: number;\n };\n session_id?: string;\n}\n\nexport class LongcatChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly model: string;\n readonly settings: LongcatChatSettings;\n readonly defaultObjectGenerationMode: 'json' | 'tool' | 'grammar' | undefined = undefined;\n\n private readonly config: LongcatChatConfig;\n\n constructor(\n modelId: LongcatChatModelId,\n settings: LongcatChatSettings,\n config: LongcatChatConfig,\n ) {\n this.model = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n get modelId(): string {\n return this.model;\n }\n\n async doGenerate(options: LanguageModelV1CallOptions): Promise<{\n text?: string;\n toolCalls?: Array<LanguageModelV1FunctionToolCall>;\n finishReason: LanguageModelV1FinishReason;\n usage: { promptTokens: number; completionTokens: number };\n rawCall: { rawPrompt: unknown; rawSettings: Record<string, unknown> };\n warnings?: LanguageModelV1CallWarning[];\n }> {\n const isOmniModel = this.model.toLowerCase().includes('omni');\n \n const messages = isOmniModel\n ? await this.convertToOmniMessages(options.prompt)\n : this.convertToStandardMessages(options.prompt);\n\n const body: Record<string, unknown> = {\n model: this.model,\n messages,\n stream: false,\n };\n\n if (isOmniModel) {\n if (this.settings.sessionId) {\n body.sessionId = this.settings.sessionId;\n }\n if (this.settings.audio) {\n body.audio = this.settings.audio;\n }\n if (this.settings.outputModalities) {\n body.output_modalities = this.settings.outputModalities;\n }\n if (this.settings.textRepetitionPenalty) {\n body.textRepetitionPenalty = this.settings.textRepetitionPenalty;\n }\n if (this.settings.audioRepetitionPenalty) {\n body.audioRepetitionPenalty = this.settings.audioRepetitionPenalty;\n }\n }\n\n if (this.settings.maxTokens) {\n body.max_tokens = this.settings.maxTokens;\n }\n if (this.settings.temperature) {\n body.temperature = this.settings.temperature;\n }\n if (this.settings.topP) {\n body.topP = this.settings.topP;\n }\n if (this.settings.topK) {\n body.topK = this.settings.topK;\n }\n\n const url = this.config.url({ path: '/chat/completions' });\n const response = await (this.config.fetch ?? fetch)(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...this.config.headers(),\n },\n body: JSON.stringify(body),\n });\n\n if (!response.ok) {\n const text = await response.text();\n throw new Error(\n `Longcat API error: ${response.status} ${response.statusText} - ${text}`,\n );\n }\n\n const json = await response.json();\n const choice = json.choices[0];\n\n return {\n finishReason: choice?.finish_reason === 'stop'\n ? 'stop'\n : choice?.finish_reason === 'length'\n ? 'length'\n : 'stop',\n usage: {\n promptTokens: json.usage?.prompt_tokens ?? 0,\n completionTokens: json.usage?.completion_tokens ?? 0,\n },\n rawCall: {\n rawPrompt: messages,\n rawSettings: this.settings,\n },\n text: choice?.message?.content ?? '',\n warnings: [],\n };\n }\n\n async doStream(options: LanguageModelV1CallOptions): Promise<{\n stream: ReadableStream<LanguageModelV1StreamPart>;\n rawCall: { rawPrompt: unknown; rawSettings: Record<string, unknown> };\n warnings?: LanguageModelV1CallWarning[];\n }> {\n const isOmniModel = this.model.toLowerCase().includes('omni');\n \n const messages = isOmniModel\n ? await this.convertToOmniMessages(options.prompt)\n : this.convertToStandardMessages(options.prompt);\n\n const body: Record<string, unknown> = {\n model: this.model,\n messages,\n stream: true,\n };\n\n if (isOmniModel) {\n if (this.settings.sessionId) {\n body.sessionId = this.settings.sessionId;\n }\n if (this.settings.audio) {\n body.audio = this.settings.audio;\n }\n if (this.settings.outputModalities) {\n body.output_modalities = this.settings.outputModalities;\n }\n if (this.settings.textRepetitionPenalty) {\n body.textRepetitionPenalty = this.settings.textRepetitionPenalty;\n }\n if (this.settings.audioRepetitionPenalty) {\n body.audioRepetitionPenalty = this.settings.audioRepetitionPenalty;\n }\n }\n\n if (this.settings.maxTokens) {\n body.max_tokens = this.settings.maxTokens;\n }\n if (this.settings.temperature) {\n body.temperature = this.settings.temperature;\n }\n if (this.settings.topP) {\n body.topP = this.settings.topP;\n }\n if (this.settings.topK) {\n body.topK = this.settings.topK;\n }\n\n const url = this.config.url({ path: '/chat/completions' });\n const response = await (this.config.fetch ?? fetch)(url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n ...this.config.headers(),\n },\n body: JSON.stringify(body),\n });\n\n if (!response.ok) {\n const error = await response.json().catch(() => ({}));\n throw new Error(\n error.error?.message ?? `Longcat API error: ${response.status} ${response.statusText}`,\n );\n }\n\n const decoder = new TextDecoder();\n const bodyStream = response.body!;\n let buffer = '';\n\n const { readable, writable } = new TransformStream<LanguageModelV1StreamPart>();\n\n (async () => {\n try {\n const reader = bodyStream.getReader();\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n buffer = lines.pop() ?? '';\n\n for (const line of lines) {\n const trimmed = line.trim();\n if (!trimmed || trimmed === 'data: [DONE]') {\n continue;\n }\n\n if (trimmed.startsWith('data: ')) {\n const data = trimmed.slice(6);\n try {\n const parsed: LongcatResponseChunk = JSON.parse(data);\n const choice = parsed.choices[0];\n if (!choice) continue;\n\n if (choice.delta.content !== undefined) {\n const part: LanguageModelV1StreamPart = {\n type: 'text-delta',\n textDelta: choice.delta.content ?? '',\n };\n writable.getWriter().write(part);\n }\n } catch {\n // Skip invalid JSON\n }\n }\n }\n }\n } finally {\n reader.releaseLock();\n }\n const finishPart: LanguageModelV1StreamPart = {\n type: 'finish',\n finishReason: 'stop',\n usage: { promptTokens: 0, completionTokens: 0 },\n };\n writable.getWriter().write(finishPart);\n } catch (e) {\n writable.getWriter().abort(e as Error);\n } finally {\n writable.getWriter().close();\n }\n })();\n\n return {\n stream: readable,\n rawCall: {\n rawPrompt: messages,\n rawSettings: this.settings,\n },\n warnings: [],\n };\n }\n\n private async convertToOmniMessages(prompt: LanguageModelV1Prompt): Promise<unknown> {\n const messages: Array<{ role: string; content: unknown }> = [];\n\n for (const message of prompt) {\n const { role, content } = message;\n\n if (role === 'system') {\n let text = '';\n if (typeof content === 'string') {\n text = content;\n } else if (Array.isArray(content)) {\n const textParts = (content as Array<LanguageModelV1TextPart>).filter(c => c.type === 'text') as LanguageModelV1TextPart[];\n text = textParts.map(c => c.text).join('');\n }\n messages.push({\n role: 'system',\n content: [{ type: 'text', text }],\n });\n } else if (role === 'user') {\n const userContent = content as Array<LanguageModelV1TextPart | LanguageModelV1ImagePart>;\n if (userContent.length === 1 && userContent[0].type === 'text') {\n messages.push({\n role: 'user',\n content: [{ type: 'text', text: (userContent[0] as LanguageModelV1TextPart).text }],\n });\n continue;\n }\n\n messages.push({\n role: 'user',\n content: await Promise.all(userContent.map(async (part) => {\n if (part.type === 'text') {\n return { type: 'text', text: (part as LanguageModelV1TextPart).text };\n }\n if (part.type === 'image') {\n const imagePart = part as LanguageModelV1ImagePart;\n let base64Data: string;\n \n if (imagePart.image instanceof URL) {\n const response = await (this.config.fetch ?? fetch)(imagePart.image.toString());\n const arrayBuffer = await response.arrayBuffer();\n const bytes = new Uint8Array(arrayBuffer);\n let binary = '';\n for (let i = 0; i < bytes.byteLength; i++) {\n binary += String.fromCharCode(bytes[i]);\n }\n base64Data = btoa(binary);\n } else {\n base64Data = convertUint8ArrayToBase64(imagePart.image);\n }\n \n return {\n type: 'input_image',\n input_image: {\n type: 'base64',\n data: base64Data,\n },\n };\n }\n throw new UnsupportedFunctionalityError({\n functionality: `user message with ${(part as { type: string }).type} content`,\n });\n })),\n });\n } else if (role === 'assistant') {\n const assistantContent = content as Array<LanguageModelV1TextPart>;\n let text = '';\n for (const part of assistantContent) {\n if (part.type === 'text') {\n text += part.text;\n }\n }\n messages.push({\n role: 'assistant',\n content: text,\n });\n } else if (role === 'tool') {\n for (const toolResponse of content) {\n if (toolResponse.type === 'tool-result') {\n const output = toolResponse.result;\n let contentValue: string;\n\n if (typeof output === 'string') {\n contentValue = output;\n } else if (typeof output === 'object') {\n contentValue = JSON.stringify(output);\n } else {\n contentValue = String(output);\n }\n\n messages.push({\n role: 'user',\n content: contentValue,\n });\n }\n }\n }\n }\n\n return messages;\n }\n\n private convertToStandardMessages(prompt: LanguageModelV1Prompt): Array<{ role: string; content: string }> {\n const messages: Array<{ role: string; content: string }> = [];\n\n for (const { role, content } of prompt) {\n if (role === 'system') {\n const text = Array.isArray(content)\n ? (content as Array<LanguageModelV1TextPart>).find((c) => c.type === 'text')?.text ?? ''\n : content;\n messages.push({ role: 'system', content: text });\n } else if (role === 'user') {\n const text = Array.isArray(content)\n ? (content as Array<LanguageModelV1TextPart>)\n .filter((c) => c.type === 'text')\n .map((c) => (c as LanguageModelV1TextPart).text)\n .join('\\n')\n : content;\n messages.push({ role: 'user', content: text });\n } else if (role === 'assistant') {\n const text = Array.isArray(content)\n ? (content as Array<LanguageModelV1TextPart>).find((c) => c.type === 'text')?.text ?? ''\n : content;\n messages.push({ role: 'assistant', content: text });\n } else if (role === 'tool') {\n for (const toolResponse of content) {\n if (toolResponse.type === 'tool-result') {\n const output = toolResponse.result;\n let contentValue: string;\n\n if (typeof output === 'string') {\n contentValue = output;\n } else if (typeof output === 'object') {\n contentValue = JSON.stringify(output);\n } else {\n contentValue = String(output);\n }\n\n messages.push({\n role: 'tool',\n content: contentValue,\n });\n }\n }\n }\n }\n\n return messages;\n }\n}\n"],"mappings":";AACA;AAAA,EACE;AAAA,EACA;AAAA,OACK;;;ACJP;AAAA,EAWE;AAAA,OACK;AACP;AAAA,EACE;AAAA,OACK;AAiCA,IAAM,2BAAN,MAA0D;AAAA,EAQ/D,YACE,SACA,UACA,QACA;AAXF,SAAS,uBAAuB;AAGhC,SAAS,8BAAuE;AAS9E,SAAK,QAAQ;AACb,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,UAAkB;AACpB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,WAAW,SAOd;AACD,UAAM,cAAc,KAAK,MAAM,YAAY,EAAE,SAAS,MAAM;AAE5D,UAAM,WAAW,cACb,MAAM,KAAK,sBAAsB,QAAQ,MAAM,IAC/C,KAAK,0BAA0B,QAAQ,MAAM;AAEjD,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,QAAI,aAAa;AACf,UAAI,KAAK,SAAS,WAAW;AAC3B,aAAK,YAAY,KAAK,SAAS;AAAA,MACjC;AACA,UAAI,KAAK,SAAS,OAAO;AACvB,aAAK,QAAQ,KAAK,SAAS;AAAA,MAC7B;AACA,UAAI,KAAK,SAAS,kBAAkB;AAClC,aAAK,oBAAoB,KAAK,SAAS;AAAA,MACzC;AACA,UAAI,KAAK,SAAS,uBAAuB;AACvC,aAAK,wBAAwB,KAAK,SAAS;AAAA,MAC7C;AACA,UAAI,KAAK,SAAS,wBAAwB;AACxC,aAAK,yBAAyB,KAAK,SAAS;AAAA,MAC9C;AAAA,IACF;AAEA,QAAI,KAAK,SAAS,WAAW;AAC3B,WAAK,aAAa,KAAK,SAAS;AAAA,IAClC;AACA,QAAI,KAAK,SAAS,aAAa;AAC7B,WAAK,cAAc,KAAK,SAAS;AAAA,IACnC;AACA,QAAI,KAAK,SAAS,MAAM;AACtB,WAAK,OAAO,KAAK,SAAS;AAAA,IAC5B;AACA,QAAI,KAAK,SAAS,MAAM;AACtB,WAAK,OAAO,KAAK,SAAS;AAAA,IAC5B;AAEA,UAAM,MAAM,KAAK,OAAO,IAAI,EAAE,MAAM,oBAAoB,CAAC;AACzD,UAAM,WAAW,OAAO,KAAK,OAAO,SAAS,OAAO,KAAK;AAAA,MACvD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,GAAG,KAAK,OAAO,QAAQ;AAAA,MACzB;AAAA,MACA,MAAM,KAAK,UAAU,IAAI;AAAA,IAC3B,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,OAAO,MAAM,SAAS,KAAK;AACjC,YAAM,IAAI;AAAA,QACR,sBAAsB,SAAS,MAAM,IAAI,SAAS,UAAU,MAAM,IAAI;AAAA,MACxE;AAAA,IACF;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AACjC,UAAM,SAAS,KAAK,QAAQ,CAAC;AAE7B,WAAO;AAAA,MACL,cAAc,QAAQ,kBAAkB,SACpC,SACA,QAAQ,kBAAkB,WAC1B,WACA;AAAA,MACJ,OAAO;AAAA,QACL,cAAc,KAAK,OAAO,iBAAiB;AAAA,QAC3C,kBAAkB,KAAK,OAAO,qBAAqB;AAAA,MACrD;AAAA,MACA,SAAS;AAAA,QACP,WAAW;AAAA,QACX,aAAa,KAAK;AAAA,MACpB;AAAA,MACA,MAAM,QAAQ,SAAS,WAAW;AAAA,MAClC,UAAU,CAAC;AAAA,IACb;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,SAIZ;AACD,UAAM,cAAc,KAAK,MAAM,YAAY,EAAE,SAAS,MAAM;AAE5D,UAAM,WAAW,cACb,MAAM,KAAK,sBAAsB,QAAQ,MAAM,IAC/C,KAAK,0BAA0B,QAAQ,MAAM;AAEjD,UAAM,OAAgC;AAAA,MACpC,OAAO,KAAK;AAAA,MACZ;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,QAAI,aAAa;AACf,UAAI,KAAK,SAAS,WAAW;AAC3B,aAAK,YAAY,KAAK,SAAS;AAAA,MACjC;AACA,UAAI,KAAK,SAAS,OAAO;AACvB,aAAK,QAAQ,KAAK,SAAS;AAAA,MAC7B;AACA,UAAI,KAAK,SAAS,kBAAkB;AAClC,aAAK,oBAAoB,KAAK,SAAS;AAAA,MACzC;AACA,UAAI,KAAK,SAAS,uBAAuB;AACvC,aAAK,wBAAwB,KAAK,SAAS;AAAA,MAC7C;AACA,UAAI,KAAK,SAAS,wBAAwB;AACxC,aAAK,yBAAyB,KAAK,SAAS;AAAA,MAC9C;AAAA,IACF;AAEA,QAAI,KAAK,SAAS,WAAW;AAC3B,WAAK,aAAa,KAAK,SAAS;AAAA,IAClC;AACA,QAAI,KAAK,SAAS,aAAa;AAC7B,WAAK,cAAc,KAAK,SAAS;AAAA,IACnC;AACA,QAAI,KAAK,SAAS,MAAM;AACtB,WAAK,OAAO,KAAK,SAAS;AAAA,IAC5B;AACA,QAAI,KAAK,SAAS,MAAM;AACtB,WAAK,OAAO,KAAK,SAAS;AAAA,IAC5B;AAEA,UAAM,MAAM,KAAK,OAAO,IAAI,EAAE,MAAM,oBAAoB,CAAC;AACzD,UAAM,WAAW,OAAO,KAAK,OAAO,SAAS,OAAO,KAAK;AAAA,MACvD,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,GAAG,KAAK,OAAO,QAAQ;AAAA,MACzB;AAAA,MACA,MAAM,KAAK,UAAU,IAAI;AAAA,IAC3B,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,QAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AACpD,YAAM,IAAI;AAAA,QACR,MAAM,OAAO,WAAW,sBAAsB,SAAS,MAAM,IAAI,SAAS,UAAU;AAAA,MACtF;AAAA,IACF;AAEA,UAAM,UAAU,IAAI,YAAY;AAChC,UAAM,aAAa,SAAS;AAC5B,QAAI,SAAS;AAEb,UAAM,EAAE,UAAU,SAAS,IAAI,IAAI,gBAA2C;AAE9E,KAAC,YAAY;AACX,UAAI;AACF,cAAM,SAAS,WAAW,UAAU;AACpC,YAAI;AACF,iBAAO,MAAM;AACX,kBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,gBAAI,KAAM;AACV,sBAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAChD,kBAAM,QAAQ,OAAO,MAAM,IAAI;AAC/B,qBAAS,MAAM,IAAI,KAAK;AAExB,uBAAW,QAAQ,OAAO;AACxB,oBAAM,UAAU,KAAK,KAAK;AAC1B,kBAAI,CAAC,WAAW,YAAY,gBAAgB;AAC1C;AAAA,cACF;AAEA,kBAAI,QAAQ,WAAW,QAAQ,GAAG;AAChC,sBAAM,OAAO,QAAQ,MAAM,CAAC;AAC5B,oBAAI;AACF,wBAAM,SAA+B,KAAK,MAAM,IAAI;AACpD,wBAAM,SAAS,OAAO,QAAQ,CAAC;AAC/B,sBAAI,CAAC,OAAQ;AAEb,sBAAI,OAAO,MAAM,YAAY,QAAW;AACtC,0BAAM,OAAkC;AAAA,sBACtC,MAAM;AAAA,sBACN,WAAW,OAAO,MAAM,WAAW;AAAA,oBACrC;AACA,6BAAS,UAAU,EAAE,MAAM,IAAI;AAAA,kBACjC;AAAA,gBACF,QAAQ;AAAA,gBAER;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF,UAAE;AACA,iBAAO,YAAY;AAAA,QACrB;AACA,cAAM,aAAwC;AAAA,UAC5C,MAAM;AAAA,UACN,cAAc;AAAA,UACd,OAAO,EAAE,cAAc,GAAG,kBAAkB,EAAE;AAAA,QAChD;AACA,iBAAS,UAAU,EAAE,MAAM,UAAU;AAAA,MACvC,SAAS,GAAG;AACV,iBAAS,UAAU,EAAE,MAAM,CAAU;AAAA,MACvC,UAAE;AACA,iBAAS,UAAU,EAAE,MAAM;AAAA,MAC7B;AAAA,IACF,GAAG;AAEH,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,WAAW;AAAA,QACX,aAAa,KAAK;AAAA,MACpB;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,EACF;AAAA,EAEA,MAAc,sBAAsB,QAAiD;AACnF,UAAM,WAAsD,CAAC;AAE7D,eAAW,WAAW,QAAQ;AAC5B,YAAM,EAAE,MAAM,QAAQ,IAAI;AAE1B,UAAI,SAAS,UAAU;AACrB,YAAI,OAAO;AACX,YAAI,OAAO,YAAY,UAAU;AAC/B,iBAAO;AAAA,QACT,WAAW,MAAM,QAAQ,OAAO,GAAG;AACjC,gBAAM,YAAa,QAA2C,OAAO,OAAK,EAAE,SAAS,MAAM;AAC3F,iBAAO,UAAU,IAAI,OAAK,EAAE,IAAI,EAAE,KAAK,EAAE;AAAA,QAC3C;AACA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,CAAC,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,QAClC,CAAC;AAAA,MACH,WAAW,SAAS,QAAQ;AAC1B,cAAM,cAAc;AACpB,YAAI,YAAY,WAAW,KAAK,YAAY,CAAC,EAAE,SAAS,QAAQ;AAC9D,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAO,YAAY,CAAC,EAA8B,KAAK,CAAC;AAAA,UACpF,CAAC;AACD;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,MAAM,QAAQ,IAAI,YAAY,IAAI,OAAO,SAAS;AACzD,gBAAI,KAAK,SAAS,QAAQ;AACxB,qBAAO,EAAE,MAAM,QAAQ,MAAO,KAAiC,KAAK;AAAA,YACtE;AACA,gBAAI,KAAK,SAAS,SAAS;AACzB,oBAAM,YAAY;AAClB,kBAAI;AAEJ,kBAAI,UAAU,iBAAiB,KAAK;AAClC,sBAAM,WAAW,OAAO,KAAK,OAAO,SAAS,OAAO,UAAU,MAAM,SAAS,CAAC;AAC9E,sBAAM,cAAc,MAAM,SAAS,YAAY;AAC/C,sBAAM,QAAQ,IAAI,WAAW,WAAW;AACxC,oBAAI,SAAS;AACb,yBAAS,IAAI,GAAG,IAAI,MAAM,YAAY,KAAK;AACzC,4BAAU,OAAO,aAAa,MAAM,CAAC,CAAC;AAAA,gBACxC;AACA,6BAAa,KAAK,MAAM;AAAA,cAC1B,OAAO;AACL,6BAAa,0BAA0B,UAAU,KAAK;AAAA,cACxD;AAEA,qBAAO;AAAA,gBACL,MAAM;AAAA,gBACN,aAAa;AAAA,kBACX,MAAM;AAAA,kBACN,MAAM;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AACA,kBAAM,IAAI,8BAA8B;AAAA,cACtC,eAAe,qBAAsB,KAA0B,IAAI;AAAA,YACrE,CAAC;AAAA,UACH,CAAC,CAAC;AAAA,QACJ,CAAC;AAAA,MACH,WAAW,SAAS,aAAa;AAC/B,cAAM,mBAAmB;AACzB,YAAI,OAAO;AACX,mBAAW,QAAQ,kBAAkB;AACnC,cAAI,KAAK,SAAS,QAAQ;AACxB,oBAAQ,KAAK;AAAA,UACf;AAAA,QACF;AACA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,QACX,CAAC;AAAA,MACH,WAAW,SAAS,QAAQ;AAC1B,mBAAW,gBAAgB,SAAS;AAClC,cAAI,aAAa,SAAS,eAAe;AACvC,kBAAM,SAAS,aAAa;AAC5B,gBAAI;AAEJ,gBAAI,OAAO,WAAW,UAAU;AAC9B,6BAAe;AAAA,YACjB,WAAW,OAAO,WAAW,UAAU;AACrC,6BAAe,KAAK,UAAU,MAAM;AAAA,YACtC,OAAO;AACL,6BAAe,OAAO,MAAM;AAAA,YAC9B;AAEA,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,SAAS;AAAA,YACX,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,0BAA0B,QAAyE;AACzG,UAAM,WAAqD,CAAC;AAE5D,eAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,UAAI,SAAS,UAAU;AACrB,cAAM,OAAO,MAAM,QAAQ,OAAO,IAC7B,QAA2C,KAAK,CAAC,MAAM,EAAE,SAAS,MAAM,GAAG,QAAQ,KACpF;AACJ,iBAAS,KAAK,EAAE,MAAM,UAAU,SAAS,KAAK,CAAC;AAAA,MACjD,WAAW,SAAS,QAAQ;AAC1B,cAAM,OAAO,MAAM,QAAQ,OAAO,IAC7B,QACE,OAAO,CAAC,MAAM,EAAE,SAAS,MAAM,EAC/B,IAAI,CAAC,MAAO,EAA8B,IAAI,EAC9C,KAAK,IAAI,IACZ;AACJ,iBAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,KAAK,CAAC;AAAA,MAC/C,WAAW,SAAS,aAAa;AAC/B,cAAM,OAAO,MAAM,QAAQ,OAAO,IAC7B,QAA2C,KAAK,CAAC,MAAM,EAAE,SAAS,MAAM,GAAG,QAAQ,KACpF;AACJ,iBAAS,KAAK,EAAE,MAAM,aAAa,SAAS,KAAK,CAAC;AAAA,MACpD,WAAW,SAAS,QAAQ;AAC1B,mBAAW,gBAAgB,SAAS;AAClC,cAAI,aAAa,SAAS,eAAe;AACvC,kBAAM,SAAS,aAAa;AAC5B,gBAAI;AAEJ,gBAAI,OAAO,WAAW,UAAU;AAC9B,6BAAe;AAAA,YACjB,WAAW,OAAO,WAAW,UAAU;AACrC,6BAAe,KAAK,UAAU,MAAM;AAAA,YACtC,OAAO;AACL,6BAAe,OAAO,MAAM;AAAA,YAC9B;AAEA,qBAAS,KAAK;AAAA,cACZ,MAAM;AAAA,cACN,SAAS;AAAA,YACX,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;;;ADlaO,SAAS,cACd,UAAmC,CAAC,GACnB;AACjB,QAAM,UAAU;AAAA,IACd,QAAQ,WAAW;AAAA,EACrB;AAEA,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,SAA4B;AAAA,IAChC,UAAU;AAAA,IACV,KAAK,CAAC,EAAE,KAAK,MAAM;AACjB,YAAM,MAAM,IAAI,IAAI,GAAG,OAAO,GAAG,IAAI,EAAE;AACvC,UAAI,QAAQ,aAAa;AACvB,YAAI,SAAS,IAAI,gBAAgB,QAAQ,WAAW,EAAE,SAAS;AAAA,MACjE;AACA,aAAO,IAAI,SAAS;AAAA,IACtB;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAgC,CAAC,MAC9B;AACH,WAAO,IAAI;AAAA,MACT;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAW,CACf,SACA,aACG,gBAAgB,SAAS,QAAQ;AAEtC,WAAS,YAAY;AAErB,SAAO;AACT;AAEO,IAAM,UAAU,cAAc;","names":[]}
|
package/package.json
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "longcat-ai-sdk-provider",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "Vercel AI SDK provider for Longcat",
|
|
5
|
+
"main": "./dist/index.js",
|
|
6
|
+
"module": "./dist/index.mjs",
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"import": "./dist/index.mjs",
|
|
12
|
+
"require": "./dist/index.js"
|
|
13
|
+
}
|
|
14
|
+
},
|
|
15
|
+
"files": [
|
|
16
|
+
"dist"
|
|
17
|
+
],
|
|
18
|
+
"scripts": {
|
|
19
|
+
"build": "tsup",
|
|
20
|
+
"typecheck": "tsc --noEmit"
|
|
21
|
+
},
|
|
22
|
+
"dependencies": {
|
|
23
|
+
"@ai-sdk/openai-compatible": "^0.0.7",
|
|
24
|
+
"@ai-sdk/provider": "^0.0.1",
|
|
25
|
+
"@ai-sdk/provider-utils": "^0.0.1"
|
|
26
|
+
},
|
|
27
|
+
"devDependencies": {
|
|
28
|
+
"tsup": "^8.0.0",
|
|
29
|
+
"typescript": "^5.0.0"
|
|
30
|
+
},
|
|
31
|
+
"peerDependencies": {
|
|
32
|
+
"ai": ">=3.0.0"
|
|
33
|
+
}
|
|
34
|
+
}
|