modelfusion 0.114.1 → 0.116.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/CHANGELOG.md +52 -0
  2. package/README.md +5 -6
  3. package/classifier/SemanticClassifier.cjs +75 -0
  4. package/classifier/SemanticClassifier.d.ts +28 -0
  5. package/classifier/SemanticClassifier.js +71 -0
  6. package/classifier/index.cjs +17 -0
  7. package/classifier/index.d.ts +1 -0
  8. package/classifier/index.js +1 -0
  9. package/index.cjs +1 -0
  10. package/index.d.ts +1 -0
  11. package/index.js +1 -0
  12. package/model-provider/index.cjs +0 -1
  13. package/model-provider/index.d.ts +0 -1
  14. package/model-provider/index.js +0 -1
  15. package/model-provider/mistral/MistralTextEmbeddingModel.d.ts +13 -13
  16. package/model-provider/ollama/OllamaChatModel.d.ts +9 -9
  17. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +12 -12
  18. package/package.json +1 -1
  19. package/model-provider/anthropic/AnthropicApiConfiguration.cjs +0 -31
  20. package/model-provider/anthropic/AnthropicApiConfiguration.d.ts +0 -10
  21. package/model-provider/anthropic/AnthropicApiConfiguration.js +0 -27
  22. package/model-provider/anthropic/AnthropicError.cjs +0 -16
  23. package/model-provider/anthropic/AnthropicError.d.ts +0 -26
  24. package/model-provider/anthropic/AnthropicError.js +0 -13
  25. package/model-provider/anthropic/AnthropicFacade.cjs +0 -24
  26. package/model-provider/anthropic/AnthropicFacade.d.ts +0 -18
  27. package/model-provider/anthropic/AnthropicFacade.js +0 -19
  28. package/model-provider/anthropic/AnthropicPromptTemplate.cjs +0 -82
  29. package/model-provider/anthropic/AnthropicPromptTemplate.d.ts +0 -17
  30. package/model-provider/anthropic/AnthropicPromptTemplate.js +0 -76
  31. package/model-provider/anthropic/AnthropicPromptTemplate.test.cjs +0 -49
  32. package/model-provider/anthropic/AnthropicPromptTemplate.test.d.ts +0 -1
  33. package/model-provider/anthropic/AnthropicPromptTemplate.test.js +0 -47
  34. package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +0 -254
  35. package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +0 -153
  36. package/model-provider/anthropic/AnthropicTextGenerationModel.js +0 -250
  37. package/model-provider/anthropic/AnthropicTextGenerationModel.test.cjs +0 -44
  38. package/model-provider/anthropic/AnthropicTextGenerationModel.test.d.ts +0 -1
  39. package/model-provider/anthropic/AnthropicTextGenerationModel.test.js +0 -42
  40. package/model-provider/anthropic/index.cjs +0 -33
  41. package/model-provider/anthropic/index.d.ts +0 -5
  42. package/model-provider/anthropic/index.js +0 -4
@@ -1,153 +0,0 @@
1
- import { z } from "zod";
2
- import { FunctionCallOptions } from "../../core/FunctionOptions.js";
3
- import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
- import { ResponseHandler } from "../../core/api/postToApi.js";
5
- import { AbstractModel } from "../../model-function/AbstractModel.js";
6
- import { Delta } from "../../model-function/Delta.js";
7
- import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
8
- import { TextGenerationModelSettings, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
9
- import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
10
- import { TextGenerationFinishReason } from "../../model-function/generate-text/TextGenerationResult.js";
11
- export declare const ANTHROPIC_TEXT_GENERATION_MODELS: {
12
- "claude-instant-1": {
13
- contextWindowSize: number;
14
- };
15
- "claude-instant-1.2": {
16
- contextWindowSize: number;
17
- };
18
- "claude-2": {
19
- contextWindowSize: number;
20
- };
21
- "claude-2.0": {
22
- contextWindowSize: number;
23
- };
24
- "claude-2.1": {
25
- contextWindowSize: number;
26
- };
27
- };
28
- export type AnthropicTextGenerationModelType = keyof typeof ANTHROPIC_TEXT_GENERATION_MODELS;
29
- export interface AnthropicTextGenerationModelSettings extends TextGenerationModelSettings {
30
- api?: ApiConfiguration;
31
- model: AnthropicTextGenerationModelType;
32
- temperature?: number;
33
- topP?: number;
34
- topK?: number;
35
- userId?: number;
36
- }
37
- /**
38
- * Create a text generation model that calls the Anthropic API.
39
- *
40
- * @see https://docs.anthropic.com/claude/reference/complete_post
41
- */
42
- export declare class AnthropicTextGenerationModel extends AbstractModel<AnthropicTextGenerationModelSettings> implements TextStreamingModel<string, AnthropicTextGenerationModelSettings> {
43
- constructor(settings: AnthropicTextGenerationModelSettings);
44
- readonly provider: "anthropic";
45
- get modelName(): "claude-instant-1" | "claude-instant-1.2" | "claude-2" | "claude-2.0" | "claude-2.1";
46
- readonly contextWindowSize: number;
47
- readonly tokenizer: undefined;
48
- readonly countPromptTokens: undefined;
49
- callAPI<RESPONSE>(prompt: string, callOptions: FunctionCallOptions, options: {
50
- responseFormat: AnthropicTextGenerationResponseFormatType<RESPONSE>;
51
- }): Promise<RESPONSE>;
52
- get settingsForEvent(): Partial<AnthropicTextGenerationModelSettings>;
53
- doGenerateTexts(prompt: string, options: FunctionCallOptions): Promise<{
54
- response: {
55
- model: string;
56
- completion: string;
57
- stop_reason: string;
58
- };
59
- textGenerationResults: {
60
- text: string;
61
- finishReason: TextGenerationFinishReason;
62
- }[];
63
- }>;
64
- restoreGeneratedTexts(rawResponse: unknown): {
65
- response: {
66
- model: string;
67
- completion: string;
68
- stop_reason: string;
69
- };
70
- textGenerationResults: {
71
- text: string;
72
- finishReason: TextGenerationFinishReason;
73
- }[];
74
- };
75
- processTextGenerationResponse(response: AnthropicTextGenerationResponse): {
76
- response: {
77
- model: string;
78
- completion: string;
79
- stop_reason: string;
80
- };
81
- textGenerationResults: {
82
- text: string;
83
- finishReason: TextGenerationFinishReason;
84
- }[];
85
- };
86
- private translateFinishReason;
87
- doStreamText(prompt: string, options: FunctionCallOptions): Promise<AsyncIterable<Delta<{
88
- model: string;
89
- completion: string;
90
- stop_reason: string | null;
91
- }>>>;
92
- extractTextDelta(delta: unknown): string;
93
- /**
94
- * Returns this model with a text prompt template.
95
- */
96
- withTextPrompt(): PromptTemplateTextStreamingModel<string, string, AnthropicTextGenerationModelSettings, this>;
97
- /**
98
- * Returns this model with an instruction prompt template.
99
- */
100
- withInstructionPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").InstructionPrompt, string, AnthropicTextGenerationModelSettings, this>;
101
- /**
102
- * Returns this model with a chat prompt template.
103
- */
104
- withChatPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").ChatPrompt, string, AnthropicTextGenerationModelSettings, this>;
105
- withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextStreamingModel<INPUT_PROMPT, string, AnthropicTextGenerationModelSettings, this>;
106
- withSettings(additionalSettings: Partial<AnthropicTextGenerationModelSettings>): this;
107
- }
108
- declare const anthropicTextGenerationResponseSchema: z.ZodObject<{
109
- completion: z.ZodString;
110
- stop_reason: z.ZodString;
111
- model: z.ZodString;
112
- }, "strip", z.ZodTypeAny, {
113
- model: string;
114
- completion: string;
115
- stop_reason: string;
116
- }, {
117
- model: string;
118
- completion: string;
119
- stop_reason: string;
120
- }>;
121
- export type AnthropicTextGenerationResponse = z.infer<typeof anthropicTextGenerationResponseSchema>;
122
- export type AnthropicTextGenerationResponseFormatType<T> = {
123
- stream: boolean;
124
- handler: ResponseHandler<T>;
125
- };
126
- export declare const AnthropicTextGenerationResponseFormat: {
127
- /**
128
- * Returns the response as a JSON object.
129
- */
130
- json: {
131
- stream: false;
132
- handler: ResponseHandler<{
133
- model: string;
134
- completion: string;
135
- stop_reason: string;
136
- }>;
137
- };
138
- /**
139
- * Returns an async iterable over the full deltas (all choices, including full current state at time of event)
140
- * of the response stream.
141
- */
142
- deltaIterable: {
143
- stream: true;
144
- handler: ({ response }: {
145
- response: Response;
146
- }) => Promise<AsyncIterable<Delta<{
147
- model: string;
148
- completion: string;
149
- stop_reason: string | null;
150
- }>>>;
151
- };
152
- };
153
- export {};
@@ -1,250 +0,0 @@
1
- import { z } from "zod";
2
- import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
3
- import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
- import { zodSchema } from "../../core/schema/ZodSchema.js";
5
- import { parseJSON } from "../../core/schema/parseJSON.js";
6
- import { validateTypes } from "../../core/schema/validateTypes.js";
7
- import { AbstractModel } from "../../model-function/AbstractModel.js";
8
- import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
9
- import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
10
- import { AsyncQueue } from "../../util/AsyncQueue.js";
11
- import { parseEventSourceStream } from "../../util/streaming/parseEventSourceStream.js";
12
- import { AnthropicApiConfiguration } from "./AnthropicApiConfiguration.js";
13
- import { failedAnthropicCallResponseHandler } from "./AnthropicError.js";
14
- import { chat, instruction, text } from "./AnthropicPromptTemplate.js";
15
- export const ANTHROPIC_TEXT_GENERATION_MODELS = {
16
- "claude-instant-1": {
17
- contextWindowSize: 100000,
18
- },
19
- "claude-instant-1.2": {
20
- contextWindowSize: 100000,
21
- },
22
- "claude-2": {
23
- contextWindowSize: 200000,
24
- },
25
- "claude-2.0": {
26
- contextWindowSize: 100000,
27
- },
28
- "claude-2.1": {
29
- contextWindowSize: 200000,
30
- },
31
- };
32
- /**
33
- * Create a text generation model that calls the Anthropic API.
34
- *
35
- * @see https://docs.anthropic.com/claude/reference/complete_post
36
- */
37
- export class AnthropicTextGenerationModel extends AbstractModel {
38
- constructor(settings) {
39
- super({ settings });
40
- Object.defineProperty(this, "provider", {
41
- enumerable: true,
42
- configurable: true,
43
- writable: true,
44
- value: "anthropic"
45
- });
46
- Object.defineProperty(this, "contextWindowSize", {
47
- enumerable: true,
48
- configurable: true,
49
- writable: true,
50
- value: void 0
51
- });
52
- Object.defineProperty(this, "tokenizer", {
53
- enumerable: true,
54
- configurable: true,
55
- writable: true,
56
- value: undefined
57
- });
58
- Object.defineProperty(this, "countPromptTokens", {
59
- enumerable: true,
60
- configurable: true,
61
- writable: true,
62
- value: undefined
63
- });
64
- this.contextWindowSize =
65
- ANTHROPIC_TEXT_GENERATION_MODELS[this.settings.model].contextWindowSize;
66
- }
67
- get modelName() {
68
- return this.settings.model;
69
- }
70
- async callAPI(prompt, callOptions, options) {
71
- const api = this.settings.api ?? new AnthropicApiConfiguration();
72
- const responseFormat = options.responseFormat;
73
- const abortSignal = callOptions.run?.abortSignal;
74
- const userId = this.settings.userId;
75
- return callWithRetryAndThrottle({
76
- retry: api.retry,
77
- throttle: api.throttle,
78
- call: async () => postJsonToApi({
79
- url: api.assembleUrl(`/complete`),
80
- headers: api.headers({
81
- functionType: callOptions.functionType,
82
- functionId: callOptions.functionId,
83
- run: callOptions.run,
84
- callId: callOptions.callId,
85
- }),
86
- body: {
87
- model: this.settings.model,
88
- prompt,
89
- stream: responseFormat.stream,
90
- max_tokens_to_sample: this.settings.maxGenerationTokens ?? 100,
91
- temperature: this.settings.temperature,
92
- top_k: this.settings.topK,
93
- top_p: this.settings.topP,
94
- stop_sequences: this.settings.stopSequences,
95
- metadata: userId != null ? { user_id: userId } : undefined,
96
- },
97
- failedResponseHandler: failedAnthropicCallResponseHandler,
98
- successfulResponseHandler: responseFormat.handler,
99
- abortSignal,
100
- }),
101
- });
102
- }
103
- get settingsForEvent() {
104
- const eventSettingProperties = [
105
- ...textGenerationModelProperties,
106
- "temperature",
107
- "topK",
108
- "topP",
109
- "userId",
110
- ];
111
- return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
112
- }
113
- async doGenerateTexts(prompt, options) {
114
- return this.processTextGenerationResponse(await this.callAPI(prompt, options, {
115
- responseFormat: AnthropicTextGenerationResponseFormat.json,
116
- }));
117
- }
118
- restoreGeneratedTexts(rawResponse) {
119
- return this.processTextGenerationResponse(validateTypes({
120
- structure: rawResponse,
121
- schema: zodSchema(anthropicTextGenerationResponseSchema),
122
- }));
123
- }
124
- processTextGenerationResponse(response) {
125
- return {
126
- response,
127
- textGenerationResults: [
128
- {
129
- text: response.completion,
130
- finishReason: this.translateFinishReason(response.stop_reason),
131
- },
132
- ],
133
- };
134
- }
135
- translateFinishReason(finishReason) {
136
- switch (finishReason) {
137
- case "stop_sequence":
138
- return "stop";
139
- case "max_tokens":
140
- return "length";
141
- default:
142
- return "unknown";
143
- }
144
- }
145
- doStreamText(prompt, options) {
146
- return this.callAPI(prompt, options, {
147
- responseFormat: AnthropicTextGenerationResponseFormat.deltaIterable,
148
- });
149
- }
150
- extractTextDelta(delta) {
151
- const chunk = delta;
152
- return chunk.completion;
153
- }
154
- /**
155
- * Returns this model with a text prompt template.
156
- */
157
- withTextPrompt() {
158
- return this.withPromptTemplate(text());
159
- }
160
- /**
161
- * Returns this model with an instruction prompt template.
162
- */
163
- withInstructionPrompt() {
164
- return this.withPromptTemplate(instruction());
165
- }
166
- /**
167
- * Returns this model with a chat prompt template.
168
- */
169
- withChatPrompt() {
170
- return this.withPromptTemplate(chat());
171
- }
172
- withPromptTemplate(promptTemplate) {
173
- return new PromptTemplateTextStreamingModel({
174
- model: this.withSettings({
175
- stopSequences: [
176
- ...(this.settings.stopSequences ?? []),
177
- ...promptTemplate.stopSequences,
178
- ],
179
- }),
180
- promptTemplate,
181
- });
182
- }
183
- withSettings(additionalSettings) {
184
- return new AnthropicTextGenerationModel(Object.assign({}, this.settings, additionalSettings));
185
- }
186
- }
187
- const anthropicTextGenerationResponseSchema = z.object({
188
- completion: z.string(),
189
- stop_reason: z.string(),
190
- model: z.string(),
191
- });
192
- const anthropicTextStreamChunkSchema = z.object({
193
- completion: z.string(),
194
- stop_reason: z.string().nullable(),
195
- model: z.string(),
196
- });
197
- async function createAnthropicFullDeltaIterableQueue(stream) {
198
- const queue = new AsyncQueue();
199
- // process the stream asynchonously (no 'await' on purpose):
200
- parseEventSourceStream({ stream })
201
- .then(async (events) => {
202
- try {
203
- for await (const event of events) {
204
- if (event.event === "error") {
205
- queue.push({ type: "error", error: event.data });
206
- queue.close();
207
- return;
208
- }
209
- if (event.event !== "completion") {
210
- continue;
211
- }
212
- const data = event.data;
213
- const eventData = parseJSON({
214
- text: data,
215
- schema: zodSchema(anthropicTextStreamChunkSchema),
216
- });
217
- queue.push({ type: "delta", deltaValue: eventData });
218
- if (eventData.stop_reason != null) {
219
- queue.close();
220
- }
221
- }
222
- }
223
- catch (error) {
224
- queue.push({ type: "error", error });
225
- queue.close();
226
- }
227
- })
228
- .catch((error) => {
229
- queue.push({ type: "error", error });
230
- queue.close();
231
- });
232
- return queue;
233
- }
234
- export const AnthropicTextGenerationResponseFormat = {
235
- /**
236
- * Returns the response as a JSON object.
237
- */
238
- json: {
239
- stream: false,
240
- handler: createJsonResponseHandler(zodSchema(anthropicTextGenerationResponseSchema)),
241
- },
242
- /**
243
- * Returns an async iterable over the full deltas (all choices, including full current state at time of event)
244
- * of the response stream.
245
- */
246
- deltaIterable: {
247
- stream: true,
248
- handler: async ({ response }) => createAnthropicFullDeltaIterableQueue(response.body),
249
- },
250
- };
@@ -1,44 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- const streamText_js_1 = require("../../model-function/generate-text/streamText.cjs");
4
- const StreamingTestServer_js_1 = require("../../test/StreamingTestServer.cjs");
5
- const arrayFromAsync_js_1 = require("../../test/arrayFromAsync.cjs");
6
- const AnthropicApiConfiguration_js_1 = require("./AnthropicApiConfiguration.cjs");
7
- const AnthropicTextGenerationModel_js_1 = require("./AnthropicTextGenerationModel.cjs");
8
- describe("streamText", () => {
9
- const server = new StreamingTestServer_js_1.StreamingTestServer("https://api.anthropic.com/v1/complete");
10
- server.setupTestEnvironment();
11
- it("should return a text stream", async () => {
12
- server.responseChunks = [
13
- `event: completion\n` +
14
- `data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
15
- `"completion":" Hello","stop_reason":null,"model":"claude-instant-1.2",` +
16
- `"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
17
- `event: completion\n` +
18
- `data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
19
- `"completion":", ","stop_reason":null,"model":"claude-instant-1.2",` +
20
- `"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
21
- `event: completion\n` +
22
- `data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
23
- `"completion":"world!","stop_reason":null,"model":"claude-instant-1.2",` +
24
- `"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
25
- `event: ping\ndata: {"type": "ping"}\n\n`,
26
- `event: completion\n` +
27
- `data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
28
- `"completion":"","stop_reason":"stop_sequence","model":"claude-instant-1.2",` +
29
- `"stop":"\\n\\nHuman:","log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
30
- ];
31
- const stream = await (0, streamText_js_1.streamText)(new AnthropicTextGenerationModel_js_1.AnthropicTextGenerationModel({
32
- api: new AnthropicApiConfiguration_js_1.AnthropicApiConfiguration({
33
- apiKey: "test-key",
34
- }),
35
- model: "claude-instant-1",
36
- }).withTextPrompt(), "hello");
37
- // note: space moved to last chunk bc of trimming
38
- expect(await (0, arrayFromAsync_js_1.arrayFromAsync)(stream)).toStrictEqual([
39
- "Hello",
40
- ",",
41
- " world!",
42
- ]);
43
- });
44
- });
@@ -1,42 +0,0 @@
1
- import { streamText } from "../../model-function/generate-text/streamText.js";
2
- import { StreamingTestServer } from "../../test/StreamingTestServer.js";
3
- import { arrayFromAsync } from "../../test/arrayFromAsync.js";
4
- import { AnthropicApiConfiguration } from "./AnthropicApiConfiguration.js";
5
- import { AnthropicTextGenerationModel } from "./AnthropicTextGenerationModel.js";
6
- describe("streamText", () => {
7
- const server = new StreamingTestServer("https://api.anthropic.com/v1/complete");
8
- server.setupTestEnvironment();
9
- it("should return a text stream", async () => {
10
- server.responseChunks = [
11
- `event: completion\n` +
12
- `data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
13
- `"completion":" Hello","stop_reason":null,"model":"claude-instant-1.2",` +
14
- `"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
15
- `event: completion\n` +
16
- `data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
17
- `"completion":", ","stop_reason":null,"model":"claude-instant-1.2",` +
18
- `"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
19
- `event: completion\n` +
20
- `data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
21
- `"completion":"world!","stop_reason":null,"model":"claude-instant-1.2",` +
22
- `"stop":null,"log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
23
- `event: ping\ndata: {"type": "ping"}\n\n`,
24
- `event: completion\n` +
25
- `data: {"type":"completion","id":"compl_01Vtux5THXXB3eXhFSA5eTY6",` +
26
- `"completion":"","stop_reason":"stop_sequence","model":"claude-instant-1.2",` +
27
- `"stop":"\\n\\nHuman:","log_id":"compl_01Vtux5THXXB3eXhFSA5eTY6"}\n\n`,
28
- ];
29
- const stream = await streamText(new AnthropicTextGenerationModel({
30
- api: new AnthropicApiConfiguration({
31
- apiKey: "test-key",
32
- }),
33
- model: "claude-instant-1",
34
- }).withTextPrompt(), "hello");
35
- // note: space moved to last chunk bc of trimming
36
- expect(await arrayFromAsync(stream)).toStrictEqual([
37
- "Hello",
38
- ",",
39
- " world!",
40
- ]);
41
- });
42
- });
@@ -1,33 +0,0 @@
1
- "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- var desc = Object.getOwnPropertyDescriptor(m, k);
5
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
- desc = { enumerable: true, get: function() { return m[k]; } };
7
- }
8
- Object.defineProperty(o, k2, desc);
9
- }) : (function(o, m, k, k2) {
10
- if (k2 === undefined) k2 = k;
11
- o[k2] = m[k];
12
- }));
13
- var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
- Object.defineProperty(o, "default", { enumerable: true, value: v });
15
- }) : function(o, v) {
16
- o["default"] = v;
17
- });
18
- var __exportStar = (this && this.__exportStar) || function(m, exports) {
19
- for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
20
- };
21
- var __importStar = (this && this.__importStar) || function (mod) {
22
- if (mod && mod.__esModule) return mod;
23
- var result = {};
24
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
25
- __setModuleDefault(result, mod);
26
- return result;
27
- };
28
- Object.defineProperty(exports, "__esModule", { value: true });
29
- exports.AnthropicPrompt = exports.anthropic = void 0;
30
- __exportStar(require("./AnthropicApiConfiguration.cjs"), exports);
31
- exports.anthropic = __importStar(require("./AnthropicFacade.cjs"));
32
- exports.AnthropicPrompt = __importStar(require("./AnthropicPromptTemplate.cjs"));
33
- __exportStar(require("./AnthropicTextGenerationModel.cjs"), exports);
@@ -1,5 +0,0 @@
1
- export * from "./AnthropicApiConfiguration.js";
2
- export { AnthropicErrorData } from "./AnthropicError.js";
3
- export * as anthropic from "./AnthropicFacade.js";
4
- export * as AnthropicPrompt from "./AnthropicPromptTemplate.js";
5
- export * from "./AnthropicTextGenerationModel.js";
@@ -1,4 +0,0 @@
1
- export * from "./AnthropicApiConfiguration.js";
2
- export * as anthropic from "./AnthropicFacade.js";
3
- export * as AnthropicPrompt from "./AnthropicPromptTemplate.js";
4
- export * from "./AnthropicTextGenerationModel.js";