@mastra/core 0.2.0-alpha.84 → 0.2.0-alpha.87
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/action/index.d.ts +2 -2
- package/dist/agent/index.d.ts +6 -6
- package/dist/agent/index.js +1 -1
- package/dist/chunk-2ISN3AA7.js +392 -0
- package/dist/chunk-2J5OHBUG.js +24 -0
- package/dist/{chunk-Y7TKCKRI.js → chunk-5NQ3MEZM.js} +8 -8
- package/dist/{chunk-SAXFXAKK.js → chunk-73XDWPXJ.js} +41 -24
- package/dist/{chunk-3IV6WDJY.js → chunk-D66E7L7R.js} +1 -1
- package/dist/{chunk-3THCTISX.js → chunk-EVYBUFXB.js} +12 -8
- package/dist/{chunk-PRYZIZXD.js → chunk-I3MJB67Z.js} +8 -8
- package/dist/{chunk-42THOFKJ.js → chunk-RCS7AVH6.js} +1 -1
- package/dist/chunk-RLPH6TDJ.js +30 -0
- package/dist/embeddings/index.d.ts +2 -2
- package/dist/embeddings/index.js +1 -1
- package/dist/eval/index.d.ts +2 -2
- package/dist/filter/index.d.ts +6 -6
- package/dist/{index-62DyKJRU.d.ts → index-Duqv1Yom.d.ts} +340 -322
- package/dist/index.d.ts +6 -6
- package/dist/index.js +11 -11
- package/dist/integration/index.d.ts +7 -7
- package/dist/llm/index.d.ts +3 -3
- package/dist/llm/model/providers/anthropic-vertex.d.ts +31 -0
- package/dist/llm/model/providers/anthropic-vertex.js +23 -0
- package/dist/llm/model/providers/anthropic.d.ts +29 -0
- package/dist/llm/model/providers/anthropic.js +21 -0
- package/dist/llm/model/providers/azure.d.ts +48 -0
- package/dist/llm/model/providers/azure.js +50 -0
- package/dist/llm/model/providers/baseten.d.ts +33 -0
- package/dist/llm/model/providers/baseten.js +29 -0
- package/dist/llm/model/providers/bedrock.d.ts +32 -0
- package/dist/llm/model/providers/bedrock.js +24 -0
- package/dist/llm/model/providers/cerebras.d.ts +30 -0
- package/dist/llm/model/providers/cerebras.js +22 -0
- package/dist/llm/model/providers/cohere.d.ts +30 -0
- package/dist/llm/model/providers/cohere.js +22 -0
- package/dist/llm/model/providers/deepinfra.d.ts +30 -0
- package/dist/llm/model/providers/deepinfra.js +22 -0
- package/dist/llm/model/providers/deepseek.d.ts +30 -0
- package/dist/llm/model/providers/deepseek.js +22 -0
- package/dist/llm/model/providers/fireworks.d.ts +35 -0
- package/dist/llm/model/providers/fireworks.js +40 -0
- package/dist/llm/model/providers/google-vertex.d.ts +48 -0
- package/dist/llm/model/providers/google-vertex.js +22 -0
- package/dist/llm/model/providers/google.d.ts +54 -0
- package/dist/llm/model/providers/google.js +23 -0
- package/dist/llm/model/providers/grok.d.ts +32 -0
- package/dist/llm/model/providers/grok.js +22 -0
- package/dist/llm/model/providers/groq.d.ts +37 -0
- package/dist/llm/model/providers/groq.js +42 -0
- package/dist/llm/model/providers/lmstudio.d.ts +29 -0
- package/dist/llm/model/providers/lmstudio.js +22 -0
- package/dist/llm/model/providers/mistral.d.ts +30 -0
- package/dist/llm/model/providers/mistral.js +22 -0
- package/dist/llm/model/providers/mock.d.ts +30 -0
- package/dist/llm/model/providers/mock.js +83 -0
- package/dist/llm/model/providers/ollama.d.ts +31 -0
- package/dist/llm/model/providers/ollama.js +23 -0
- package/dist/llm/model/providers/openai-compat.d.ts +39 -0
- package/dist/llm/model/providers/openai-compat.js +6 -0
- package/dist/llm/model/providers/openai.d.ts +34 -0
- package/dist/llm/model/providers/openai.js +6 -0
- package/dist/llm/model/providers/openai.test.d.ts +2 -0
- package/dist/llm/model/providers/openai.test.js +220 -0
- package/dist/llm/model/providers/perplexity.d.ts +30 -0
- package/dist/llm/model/providers/perplexity.js +22 -0
- package/dist/llm/model/providers/portkey.d.ts +34 -0
- package/dist/llm/model/providers/portkey.js +22 -0
- package/dist/llm/model/providers/togetherai.d.ts +30 -0
- package/dist/llm/model/providers/togetherai.js +22 -0
- package/dist/mastra/index.d.ts +3 -3
- package/dist/memory/index.d.ts +1 -1
- package/dist/memory/index.js +1 -1
- package/dist/model-QGWIMOSx.d.ts +31 -0
- package/dist/relevance/index.d.ts +2 -2
- package/dist/relevance/index.js +2 -2
- package/dist/storage/index.d.ts +3 -3
- package/dist/storage/index.js +1 -1
- package/dist/tools/index.d.ts +3 -3
- package/dist/vector/index.js +2 -2
- package/dist/{workflow-DGktrYAL.d.ts → workflow-DQ8CtzzU.d.ts} +1 -1
- package/dist/workflows/index.d.ts +4 -4
- package/package.json +30 -14
package/dist/index.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { M as Metric } from './metric-D2V4CR8D.js';
|
|
2
2
|
export { a as MetricResult } from './metric-D2V4CR8D.js';
|
|
3
|
-
import {
|
|
4
|
-
export {
|
|
3
|
+
import { q as ToolAction, A as Agent$1, r as AgentConfig, M as MastraStorage$1, a as MastraMemory$1, s as ToolExecutionContext, W as WorkflowOptions } from './index-Duqv1Yom.js';
|
|
4
|
+
export { az as ActionContext, aq as AiMessageType, ah as AmazonConfig, ag as AmazonModel, Q as AnthropicConfig, P as AnthropicModel, aj as AnthropicVertexConfig, ai as AnthropicVertexModel, ad as AzureConfig, ac as AzureModel, ay as BaseCondition, al as BaseStructuredOutputType, a2 as BaseTenConfig, a1 as BasetenModel, ab as CohereConfig, aa as CohereModel, z as CoreAssistantMessage, x as CoreMessage, y as CoreSystemMessage, C as CoreTool, D as CoreToolMessage, B as CoreUserMessage, a9 as CustomModelConfig, af as DeepseekConfig, ae as DeepseekModel, aI as DependencyCheckOutput, F as EmbedManyResult, E as EmbedResult, aO as ExtractSchemaFromStep, aR as ExtractSchemaType, aP as ExtractStepResult, a4 as FireworksConfig, a3 as FireworksModel, G as GenerateReturn, K as GoogleConfig, N as GoogleGenerativeAISettings, J as GoogleModel, V as GroqConfig, U as GroqModel, g as LLMInnerStreamOptions, ak as LLMProvider, h as LLMStreamObjectOptions, i as LLMStreamOptions, f as LLMTextObjectOptions, e as LLMTextOptions, a0 as LMStudioConfig, $ as LMStudioModel, au as MemoryConfig, at as MessageResponse, ar as MessageType, a6 as MistralConfig, a5 as MistralModel, b as ModelConfig, H as OpenAIConfig, O as OpenAIModel, ap as OutputType, aS as PathsToStringProps, Y as PerplexityConfig, X as PerplexityModel, aF as ResolverFunctionInput, aG as ResolverFunctionOutput, R as RetryConfig, av as SharedMemoryConfig, k as StepAction, aB as StepCondition, m as StepConfig, aA as StepDef, j as StepExecutionContext, p as StepGraph, aN as StepId, aQ as StepInputType, aw as StepNode, n as StepResult, l as StepVariableType, u as StorageColumn, w as StorageGetMessagesArg, as as StorageThreadType, S as StreamReturn, ao as StructuredOutput, an as StructuredOutputArrayItem, am as StructuredOutputType, aH as SubscriberFunctionOutput, t as TABLE_NAMES, _ as TogetherAiConfig, Z as TogetherAiModel, ax as VariableReference, aK as WorkflowActionParams, aL as WorkflowActions, aJ as WorkflowActors, aC as WorkflowContext, aE as WorkflowEvent, aD as WorkflowLogMessage, v as WorkflowRow, o as WorkflowRunState, aM as WorkflowState, a8 as XGrokConfig, a7 as XGrokModel } from './index-Duqv1Yom.js';
|
|
5
5
|
export { O as OtelConfig, S as SamplingStrategy, T as Telemetry } from './telemetry-oCUM52DG.js';
|
|
6
6
|
import { MastraBase as MastraBase$1 } from './base.js';
|
|
7
7
|
import { R as RegisteredLogger, a as LogLevel, T as TransportMap, L as Logger } from './index-B0-NXUYv.js';
|
|
@@ -25,8 +25,8 @@ import { MastraTTS as MastraTTS$1, TTSConfig } from './tts/index.js';
|
|
|
25
25
|
export { delay, jsonSchemaPropertiesToTSTypes, jsonSchemaToModel } from './utils.js';
|
|
26
26
|
import { MastraVector as MastraVector$1 } from './vector/index.js';
|
|
27
27
|
export { IndexStats, QueryResult } from './vector/index.js';
|
|
28
|
-
import { S as Step, W as Workflow$1 } from './workflow-
|
|
29
|
-
export { c as createStep } from './workflow-
|
|
28
|
+
import { S as Step, W as Workflow$1 } from './workflow-DQ8CtzzU.js';
|
|
29
|
+
export { c as createStep } from './workflow-DQ8CtzzU.js';
|
|
30
30
|
export { getStepResult, isErrorEvent, isTransitionEvent, isVariableReference } from './workflows/index.js';
|
|
31
31
|
export { AvailableHooks, executeHook, registerHook } from './hooks/index.js';
|
|
32
32
|
export { ArrayOperator, BaseFilterTranslator, BasicOperator, ElementOperator, FieldCondition, Filter, LogicalOperator, NumericOperator, OperatorCondition, OperatorSupport, QueryOperator, RegexOperator } from './filter/index.js';
|
|
@@ -59,8 +59,8 @@ declare abstract class MastraDeployer extends MastraDeployer$1 {
|
|
|
59
59
|
});
|
|
60
60
|
}
|
|
61
61
|
|
|
62
|
-
declare function embed(value: string,
|
|
63
|
-
declare function embedMany(value: string[],
|
|
62
|
+
declare function embed(value: string, embedding: EmbeddingOptions): Promise<ai.EmbedResult<string>>;
|
|
63
|
+
declare function embedMany(value: string[], embedding: EmbeddingOptions): Promise<ai.EmbedManyResult<string>>;
|
|
64
64
|
|
|
65
65
|
declare abstract class MastraStorage extends MastraStorage$1 {
|
|
66
66
|
constructor({ name }: {
|
package/dist/index.js
CHANGED
|
@@ -1,18 +1,18 @@
|
|
|
1
|
-
import { MastraMemory } from './chunk-
|
|
2
|
-
export { CohereRelevanceScorer, MastraAgentRelevanceScorer, createSimilarityPrompt } from './chunk-
|
|
3
|
-
import { MastraStorage, MastraStorageLibSql } from './chunk-
|
|
1
|
+
import { MastraMemory } from './chunk-I3MJB67Z.js';
|
|
2
|
+
export { CohereRelevanceScorer, MastraAgentRelevanceScorer, createSimilarityPrompt } from './chunk-D66E7L7R.js';
|
|
3
|
+
import { MastraStorage, MastraStorageLibSql } from './chunk-EVYBUFXB.js';
|
|
4
4
|
import { MastraTTS } from './chunk-SLWM3SEH.js';
|
|
5
|
-
import { MastraVector } from './chunk-
|
|
5
|
+
import { MastraVector } from './chunk-RCS7AVH6.js';
|
|
6
6
|
import { Workflow } from './chunk-OOZGW5CY.js';
|
|
7
7
|
export { Step, createStep, getStepResult, isErrorEvent, isTransitionEvent, isVariableReference } from './chunk-OOZGW5CY.js';
|
|
8
|
-
export { FilterOperators, MastraEngine, MockMastraEngine, PropertyType } from './chunk-U25DBVJZ.js';
|
|
9
8
|
export { Metric, evaluate } from './chunk-QLN26TPI.js';
|
|
9
|
+
export { FilterOperators, MastraEngine, MockMastraEngine, PropertyType } from './chunk-U25DBVJZ.js';
|
|
10
10
|
export { BaseFilterTranslator } from './chunk-RZYBOMIG.js';
|
|
11
11
|
import { Integration, OpenAPIToolset } from './chunk-42DYOLDV.js';
|
|
12
12
|
import { Tool } from './chunk-VOUPGVRD.js';
|
|
13
13
|
export { createTool } from './chunk-VOUPGVRD.js';
|
|
14
14
|
export { Mastra } from './chunk-EH2PG7Q6.js';
|
|
15
|
-
import { Agent } from './chunk-
|
|
15
|
+
import { Agent } from './chunk-73XDWPXJ.js';
|
|
16
16
|
export { AvailableHooks, executeHook, registerHook } from './chunk-HBTQNIAX.js';
|
|
17
17
|
export { Telemetry } from './chunk-SDKEPBBH.js';
|
|
18
18
|
import './chunk-6WJREZ5F.js';
|
|
@@ -23,7 +23,7 @@ import './chunk-CQYUMNLZ.js';
|
|
|
23
23
|
import { MastraBase } from './chunk-JCRGAEY6.js';
|
|
24
24
|
import { createLogger } from './chunk-TJK6TGSR.js';
|
|
25
25
|
export { LogLevel, Logger, LoggerTransport, MultiLogger, RegisteredLogger, combineLoggers, noopLogger } from './chunk-TJK6TGSR.js';
|
|
26
|
-
import { embed, embedMany } from './chunk-
|
|
26
|
+
import { embed, embedMany } from './chunk-5NQ3MEZM.js';
|
|
27
27
|
import { __name } from './chunk-AJJZUHB4.js';
|
|
28
28
|
|
|
29
29
|
// src/agent/index.warning.ts
|
|
@@ -57,14 +57,14 @@ __name(_MastraDeployer, "MastraDeployer");
|
|
|
57
57
|
var MastraDeployer2 = _MastraDeployer;
|
|
58
58
|
|
|
59
59
|
// src/embeddings/index.warning.ts
|
|
60
|
-
async function embed2(value,
|
|
60
|
+
async function embed2(value, embedding) {
|
|
61
61
|
console.warn('Please import "embed" from "@mastra/core/embeddings" instead of "@mastra/core"');
|
|
62
|
-
return embed(value,
|
|
62
|
+
return embed(value, embedding);
|
|
63
63
|
}
|
|
64
64
|
__name(embed2, "embed");
|
|
65
|
-
async function embedMany2(value,
|
|
65
|
+
async function embedMany2(value, embedding) {
|
|
66
66
|
console.warn('Please import "embedMany" from "@mastra/core/embeddings" instead of "@mastra/core"');
|
|
67
|
-
return embedMany(value,
|
|
67
|
+
return embedMany(value, embedding);
|
|
68
68
|
}
|
|
69
69
|
__name(embedMany2, "embedMany");
|
|
70
70
|
|
|
@@ -1,23 +1,23 @@
|
|
|
1
1
|
import '../telemetry-oCUM52DG.js';
|
|
2
|
-
import {
|
|
3
|
-
import {
|
|
2
|
+
import { q as ToolAction } from '../index-Duqv1Yom.js';
|
|
3
|
+
import { W as Workflow } from '../workflow-DQ8CtzzU.js';
|
|
4
4
|
import '@opentelemetry/api';
|
|
5
5
|
import '@opentelemetry/sdk-node';
|
|
6
6
|
import '@opentelemetry/sdk-trace-base';
|
|
7
|
-
import '
|
|
7
|
+
import 'ai';
|
|
8
|
+
import 'json-schema';
|
|
8
9
|
import 'zod';
|
|
9
10
|
import '../base.js';
|
|
10
11
|
import '../index-B0-NXUYv.js';
|
|
11
12
|
import 'pino';
|
|
12
13
|
import 'stream';
|
|
13
|
-
import '../engine-EwEG-4Iv.js';
|
|
14
|
-
import 'ai';
|
|
15
|
-
import 'json-schema';
|
|
16
14
|
import '../metric-D2V4CR8D.js';
|
|
17
|
-
import '../types-M16hSruO.js';
|
|
18
15
|
import 'sift';
|
|
16
|
+
import '../types-M16hSruO.js';
|
|
19
17
|
import '../vector/index.js';
|
|
18
|
+
import '../engine-EwEG-4Iv.js';
|
|
20
19
|
import '../tts/index.js';
|
|
20
|
+
import 'xstate';
|
|
21
21
|
|
|
22
22
|
declare class Integration<ToolsParams = void, ApiClient = void> {
|
|
23
23
|
name: string;
|
package/dist/llm/index.d.ts
CHANGED
|
@@ -1,18 +1,18 @@
|
|
|
1
1
|
import 'ai';
|
|
2
2
|
import 'json-schema';
|
|
3
3
|
import 'zod';
|
|
4
|
-
export { L as LLM } from '../index-
|
|
4
|
+
export { L as LLM } from '../index-Duqv1Yom.js';
|
|
5
5
|
import '../base.js';
|
|
6
6
|
import '../metric-D2V4CR8D.js';
|
|
7
|
-
import '../types-M16hSruO.js';
|
|
8
7
|
import '../telemetry-oCUM52DG.js';
|
|
9
8
|
import '@opentelemetry/api';
|
|
10
9
|
import '@opentelemetry/sdk-node';
|
|
11
10
|
import '@opentelemetry/sdk-trace-base';
|
|
12
|
-
import 'sift';
|
|
13
11
|
import '../index-B0-NXUYv.js';
|
|
14
12
|
import 'pino';
|
|
15
13
|
import 'stream';
|
|
14
|
+
import 'sift';
|
|
15
|
+
import '../types-M16hSruO.js';
|
|
16
16
|
import '../vector/index.js';
|
|
17
17
|
import '../engine-EwEG-4Iv.js';
|
|
18
18
|
import '../tts/index.js';
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { M as MastraLLM } from '../../../model-QGWIMOSx.js';
|
|
2
|
+
import 'ai';
|
|
3
|
+
import 'json-schema';
|
|
4
|
+
import 'zod';
|
|
5
|
+
import '../../../index-Duqv1Yom.js';
|
|
6
|
+
import '../../../base.js';
|
|
7
|
+
import '@opentelemetry/api';
|
|
8
|
+
import '../../../index-B0-NXUYv.js';
|
|
9
|
+
import 'pino';
|
|
10
|
+
import 'stream';
|
|
11
|
+
import '../../../telemetry-oCUM52DG.js';
|
|
12
|
+
import '@opentelemetry/sdk-node';
|
|
13
|
+
import '@opentelemetry/sdk-trace-base';
|
|
14
|
+
import '../../../metric-D2V4CR8D.js';
|
|
15
|
+
import 'sift';
|
|
16
|
+
import '../../../types-M16hSruO.js';
|
|
17
|
+
import '../../../vector/index.js';
|
|
18
|
+
import '../../../engine-EwEG-4Iv.js';
|
|
19
|
+
import '../../../tts/index.js';
|
|
20
|
+
|
|
21
|
+
type AnthropicVertexModel = 'claude-3-5-sonnet@20240620' | 'claude-3-opus@20240229' | 'claude-3-sonnet@20240229' | 'claude-3-haiku@20240307' | (string & {});
|
|
22
|
+
declare class AnthropicVertex extends MastraLLM {
|
|
23
|
+
constructor({ name, region, projectId, apiKey, }: {
|
|
24
|
+
name?: AnthropicVertexModel;
|
|
25
|
+
region?: string;
|
|
26
|
+
projectId?: string;
|
|
27
|
+
apiKey?: string;
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export { AnthropicVertex, type AnthropicVertexModel };
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { MastraLLM } from '../../../chunk-2ISN3AA7.js';
|
|
2
|
+
import '../../../chunk-LUULSM4U.js';
|
|
3
|
+
import '../../../chunk-JCRGAEY6.js';
|
|
4
|
+
import '../../../chunk-TJK6TGSR.js';
|
|
5
|
+
import { __name } from '../../../chunk-AJJZUHB4.js';
|
|
6
|
+
import { createAnthropicVertex } from 'anthropic-vertex-ai';
|
|
7
|
+
|
|
8
|
+
var _AnthropicVertex = class _AnthropicVertex extends MastraLLM {
|
|
9
|
+
constructor({ name = "claude-3-5-sonnet@20240620", region = process.env.GOOGLE_VERTEX_REGION, projectId = process.env.GOOGLE_VERTEX_PROJECT_ID, apiKey = process.env.ANTHROPIC_API_KEY ?? "" }) {
|
|
10
|
+
const anthropicVertex = createAnthropicVertex({
|
|
11
|
+
region,
|
|
12
|
+
projectId,
|
|
13
|
+
apiKey
|
|
14
|
+
});
|
|
15
|
+
super({
|
|
16
|
+
model: anthropicVertex(name)
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
__name(_AnthropicVertex, "AnthropicVertex");
|
|
21
|
+
var AnthropicVertex = _AnthropicVertex;
|
|
22
|
+
|
|
23
|
+
export { AnthropicVertex };
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { M as MastraLLM } from '../../../model-QGWIMOSx.js';
|
|
2
|
+
import 'ai';
|
|
3
|
+
import 'json-schema';
|
|
4
|
+
import 'zod';
|
|
5
|
+
import '../../../index-Duqv1Yom.js';
|
|
6
|
+
import '../../../base.js';
|
|
7
|
+
import '@opentelemetry/api';
|
|
8
|
+
import '../../../index-B0-NXUYv.js';
|
|
9
|
+
import 'pino';
|
|
10
|
+
import 'stream';
|
|
11
|
+
import '../../../telemetry-oCUM52DG.js';
|
|
12
|
+
import '@opentelemetry/sdk-node';
|
|
13
|
+
import '@opentelemetry/sdk-trace-base';
|
|
14
|
+
import '../../../metric-D2V4CR8D.js';
|
|
15
|
+
import 'sift';
|
|
16
|
+
import '../../../types-M16hSruO.js';
|
|
17
|
+
import '../../../vector/index.js';
|
|
18
|
+
import '../../../engine-EwEG-4Iv.js';
|
|
19
|
+
import '../../../tts/index.js';
|
|
20
|
+
|
|
21
|
+
type AnthropicModel = 'claude-3-5-sonnet-20241022' | 'claude-3-5-sonnet-20240620' | 'claude-3-5-haiku-20241022' | 'claude-3-opus-20240229' | 'claude-3-sonnet-20240229' | 'claude-3-haiku-20240307' | (string & {});
|
|
22
|
+
declare class Anthropic extends MastraLLM {
|
|
23
|
+
constructor({ name, apiKey }: {
|
|
24
|
+
name?: AnthropicModel;
|
|
25
|
+
apiKey?: string;
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export { Anthropic, type AnthropicModel };
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { MastraLLM } from '../../../chunk-2ISN3AA7.js';
|
|
2
|
+
import '../../../chunk-LUULSM4U.js';
|
|
3
|
+
import '../../../chunk-JCRGAEY6.js';
|
|
4
|
+
import '../../../chunk-TJK6TGSR.js';
|
|
5
|
+
import { __name } from '../../../chunk-AJJZUHB4.js';
|
|
6
|
+
import { createAnthropic } from '@ai-sdk/anthropic';
|
|
7
|
+
|
|
8
|
+
var _Anthropic = class _Anthropic extends MastraLLM {
|
|
9
|
+
constructor({ name, apiKey }) {
|
|
10
|
+
const anthropicModel = createAnthropic({
|
|
11
|
+
apiKey: apiKey || process.env.ANTHROPIC_API_KEY
|
|
12
|
+
});
|
|
13
|
+
super({
|
|
14
|
+
model: anthropicModel(name || "claude-3-5-sonnet-20240620")
|
|
15
|
+
});
|
|
16
|
+
}
|
|
17
|
+
};
|
|
18
|
+
__name(_Anthropic, "Anthropic");
|
|
19
|
+
var Anthropic = _Anthropic;
|
|
20
|
+
|
|
21
|
+
export { Anthropic };
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import { OpenAIChatSettings } from '@ai-sdk/openai/internal';
|
|
2
|
+
import { M as MastraLLM } from '../../../model-QGWIMOSx.js';
|
|
3
|
+
import 'ai';
|
|
4
|
+
import 'json-schema';
|
|
5
|
+
import 'zod';
|
|
6
|
+
import '../../../index-Duqv1Yom.js';
|
|
7
|
+
import '../../../base.js';
|
|
8
|
+
import '@opentelemetry/api';
|
|
9
|
+
import '../../../index-B0-NXUYv.js';
|
|
10
|
+
import 'pino';
|
|
11
|
+
import 'stream';
|
|
12
|
+
import '../../../telemetry-oCUM52DG.js';
|
|
13
|
+
import '@opentelemetry/sdk-node';
|
|
14
|
+
import '@opentelemetry/sdk-trace-base';
|
|
15
|
+
import '../../../metric-D2V4CR8D.js';
|
|
16
|
+
import 'sift';
|
|
17
|
+
import '../../../types-M16hSruO.js';
|
|
18
|
+
import '../../../vector/index.js';
|
|
19
|
+
import '../../../engine-EwEG-4Iv.js';
|
|
20
|
+
import '../../../tts/index.js';
|
|
21
|
+
|
|
22
|
+
type AzureModel = 'gpt-35-turbo-instruct' | (string & {});
|
|
23
|
+
declare class Azure extends MastraLLM {
|
|
24
|
+
constructor({ name, resourceName, apiKey, apiVersion, baseURL, headers, fetch, settings, }: {
|
|
25
|
+
name?: AzureModel;
|
|
26
|
+
resourceName?: string;
|
|
27
|
+
apiKey?: string;
|
|
28
|
+
apiVersion?: string;
|
|
29
|
+
baseURL?: string;
|
|
30
|
+
headers?: Record<string, string>;
|
|
31
|
+
fetch?: typeof globalThis.fetch;
|
|
32
|
+
settings?: OpenAIChatSettings;
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
declare class AzureReasoning extends MastraLLM {
|
|
36
|
+
constructor({ name, resourceName, apiKey, apiVersion, baseURL, headers, fetch, settings, }: {
|
|
37
|
+
name?: AzureModel;
|
|
38
|
+
resourceName?: string;
|
|
39
|
+
apiKey?: string;
|
|
40
|
+
apiVersion?: string;
|
|
41
|
+
baseURL?: string;
|
|
42
|
+
headers?: Record<string, string>;
|
|
43
|
+
fetch?: typeof globalThis.fetch;
|
|
44
|
+
settings?: OpenAIChatSettings;
|
|
45
|
+
});
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export { Azure, type AzureModel, AzureReasoning };
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import { MastraLLM } from '../../../chunk-2ISN3AA7.js';
|
|
2
|
+
import '../../../chunk-LUULSM4U.js';
|
|
3
|
+
import '../../../chunk-JCRGAEY6.js';
|
|
4
|
+
import '../../../chunk-TJK6TGSR.js';
|
|
5
|
+
import { __name } from '../../../chunk-AJJZUHB4.js';
|
|
6
|
+
import { createAzure } from '@ai-sdk/azure';
|
|
7
|
+
import { wrapLanguageModel, extractReasoningMiddleware } from 'ai';
|
|
8
|
+
|
|
9
|
+
var _Azure = class _Azure extends MastraLLM {
|
|
10
|
+
constructor({ name = "gpt-35-turbo-instruct", resourceName = process.env.AZURE_RESOURCE_NAME || "", apiKey = process.env.AZURE_API_KEY || "", apiVersion, baseURL, headers, fetch, settings }) {
|
|
11
|
+
const azure = createAzure({
|
|
12
|
+
resourceName,
|
|
13
|
+
apiKey,
|
|
14
|
+
apiVersion,
|
|
15
|
+
baseURL,
|
|
16
|
+
headers,
|
|
17
|
+
fetch
|
|
18
|
+
});
|
|
19
|
+
super({
|
|
20
|
+
model: azure(name, settings)
|
|
21
|
+
});
|
|
22
|
+
}
|
|
23
|
+
};
|
|
24
|
+
__name(_Azure, "Azure");
|
|
25
|
+
var Azure = _Azure;
|
|
26
|
+
var _AzureReasoning = class _AzureReasoning extends MastraLLM {
|
|
27
|
+
constructor({ name = "gpt-35-turbo-instruct", resourceName = process.env.AZURE_RESOURCE_NAME || "", apiKey = process.env.AZURE_API_KEY || "", apiVersion, baseURL, headers, fetch, settings }) {
|
|
28
|
+
const azure = createAzure({
|
|
29
|
+
resourceName,
|
|
30
|
+
apiKey,
|
|
31
|
+
apiVersion,
|
|
32
|
+
baseURL,
|
|
33
|
+
headers,
|
|
34
|
+
fetch
|
|
35
|
+
});
|
|
36
|
+
const enhancedModel = wrapLanguageModel({
|
|
37
|
+
model: azure(name, settings),
|
|
38
|
+
middleware: extractReasoningMiddleware({
|
|
39
|
+
tagName: "think"
|
|
40
|
+
})
|
|
41
|
+
});
|
|
42
|
+
super({
|
|
43
|
+
model: enhancedModel
|
|
44
|
+
});
|
|
45
|
+
}
|
|
46
|
+
};
|
|
47
|
+
__name(_AzureReasoning, "AzureReasoning");
|
|
48
|
+
var AzureReasoning = _AzureReasoning;
|
|
49
|
+
|
|
50
|
+
export { Azure, AzureReasoning };
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { OpenAIChatSettings } from '@ai-sdk/openai/internal';
|
|
2
|
+
import { M as MastraLLM } from '../../../model-QGWIMOSx.js';
|
|
3
|
+
import 'ai';
|
|
4
|
+
import 'json-schema';
|
|
5
|
+
import 'zod';
|
|
6
|
+
import '../../../index-Duqv1Yom.js';
|
|
7
|
+
import '../../../base.js';
|
|
8
|
+
import '@opentelemetry/api';
|
|
9
|
+
import '../../../index-B0-NXUYv.js';
|
|
10
|
+
import 'pino';
|
|
11
|
+
import 'stream';
|
|
12
|
+
import '../../../telemetry-oCUM52DG.js';
|
|
13
|
+
import '@opentelemetry/sdk-node';
|
|
14
|
+
import '@opentelemetry/sdk-trace-base';
|
|
15
|
+
import '../../../metric-D2V4CR8D.js';
|
|
16
|
+
import 'sift';
|
|
17
|
+
import '../../../types-M16hSruO.js';
|
|
18
|
+
import '../../../vector/index.js';
|
|
19
|
+
import '../../../engine-EwEG-4Iv.js';
|
|
20
|
+
import '../../../tts/index.js';
|
|
21
|
+
|
|
22
|
+
type BasetenModel = 'llama-3.1-70b-instruct' | 'qwen2.5-7b-math-instruct' | 'qwen2.5-14b-instruct' | 'qwen2.5-32b-coder-instruct' | 'llama-3.1-8b-instruct' | 'llama-3.1-nemetron-70b' | 'llama-3.2-90b-vision-instruct' | 'llama-3.1-405b-instruct' | 'ultravox-v0.4' | 'llama-3.2-1b-vision-instruct' | 'llama-3-70b-instruct' | 'llama-3-8b-instruct' | 'mistral-7b-instruct' | 'qwen2.5-14b-coder-instruct' | 'qwen2.5-7b-coder-instruct' | 'qwen2.5-72b-math-instruct' | 'qwen2.5-72b-instruct' | 'qwen2.5-32b-instruct' | 'qwen2.5-7b-instruct' | 'qwen2.5-3b-instruct' | 'pixtral-12b' | 'phi-3.5-mini-instruct' | 'gemma-2-9b' | 'gemma-2-27b' | 'phi-3-mini-128k-instruct' | 'phi-3-mini-4k-instruct' | 'zephyr-7b-alpha' | 'mixtral-8x7b-instruct' | 'mixtral-8x22b' | (string & {});
|
|
23
|
+
declare class BaseTen extends MastraLLM {
|
|
24
|
+
constructor({ name, apiKey, baseURL, fetch, settings, }?: {
|
|
25
|
+
name?: BasetenModel;
|
|
26
|
+
apiKey?: string;
|
|
27
|
+
baseURL?: string;
|
|
28
|
+
fetch?: typeof globalThis.fetch;
|
|
29
|
+
settings?: OpenAIChatSettings;
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
export { BaseTen, type BasetenModel };
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { openaiCompat } from '../../../chunk-RLPH6TDJ.js';
|
|
2
|
+
import { MastraLLM } from '../../../chunk-2ISN3AA7.js';
|
|
3
|
+
import '../../../chunk-LUULSM4U.js';
|
|
4
|
+
import '../../../chunk-JCRGAEY6.js';
|
|
5
|
+
import '../../../chunk-TJK6TGSR.js';
|
|
6
|
+
import { __name } from '../../../chunk-AJJZUHB4.js';
|
|
7
|
+
|
|
8
|
+
// src/llm/model/providers/baseten.ts
|
|
9
|
+
var _BaseTen = class _BaseTen extends MastraLLM {
|
|
10
|
+
constructor({ name = "llama-3.1-70b-instruct", apiKey = process.env.BASETEN_API_KEY || "", baseURL = "https://bridge.baseten.co/v1/direct", fetch, settings } = {}) {
|
|
11
|
+
if (fetch) {
|
|
12
|
+
throw new Error("Custom fetch is required to use BaseTen. See https://docs.baseten.co/api-reference/openai for more information");
|
|
13
|
+
}
|
|
14
|
+
const baseten = openaiCompat({
|
|
15
|
+
baseURL,
|
|
16
|
+
modelName: name,
|
|
17
|
+
apiKey,
|
|
18
|
+
fetch,
|
|
19
|
+
settings
|
|
20
|
+
});
|
|
21
|
+
super({
|
|
22
|
+
model: baseten
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
};
|
|
26
|
+
__name(_BaseTen, "BaseTen");
|
|
27
|
+
var BaseTen = _BaseTen;
|
|
28
|
+
|
|
29
|
+
export { BaseTen };
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { M as MastraLLM } from '../../../model-QGWIMOSx.js';
|
|
2
|
+
import 'ai';
|
|
3
|
+
import 'json-schema';
|
|
4
|
+
import 'zod';
|
|
5
|
+
import '../../../index-Duqv1Yom.js';
|
|
6
|
+
import '../../../base.js';
|
|
7
|
+
import '@opentelemetry/api';
|
|
8
|
+
import '../../../index-B0-NXUYv.js';
|
|
9
|
+
import 'pino';
|
|
10
|
+
import 'stream';
|
|
11
|
+
import '../../../telemetry-oCUM52DG.js';
|
|
12
|
+
import '@opentelemetry/sdk-node';
|
|
13
|
+
import '@opentelemetry/sdk-trace-base';
|
|
14
|
+
import '../../../metric-D2V4CR8D.js';
|
|
15
|
+
import 'sift';
|
|
16
|
+
import '../../../types-M16hSruO.js';
|
|
17
|
+
import '../../../vector/index.js';
|
|
18
|
+
import '../../../engine-EwEG-4Iv.js';
|
|
19
|
+
import '../../../tts/index.js';
|
|
20
|
+
|
|
21
|
+
type AmazonModel = 'amazon-titan-tg1-large' | 'amazon-titan-text-express-v1' | 'anthropic-claude-3-5-sonnet-20241022-v2:0' | 'anthropic-claude-3-5-sonnet-20240620-v1:0' | 'anthropic-claude-3-5-haiku-20241022-v1:0' | 'anthropic-claude-3-opus-20240229-v1:0' | 'anthropic-claude-3-sonnet-20240229-v1:0' | 'anthropic-claude-3-haiku-20240307-v1:0' | 'anthropic-claude-v2:1' | 'cohere-command-r-v1:0' | 'cohere-command-r-plus-v1:0' | 'meta-llama2-13b-chat-v1' | 'meta-llama2-70b-chat-v1' | 'meta-llama3-8b-instruct-v1:0' | 'meta-llama3-70b-instruct-v1:0' | 'meta-llama3-1-8b-instruct-v1:0' | 'meta-llama3-1-70b-instruct-v1:0' | 'meta-llama3-1-405b-instruct-v1:0' | 'meta-llama3-2-1b-instruct-v1:0' | 'meta-llama3-2-3b-instruct-v1:0' | 'meta-llama3-2-11b-instruct-v1:0' | 'meta-llama3-2-90b-instruct-v1:0' | 'mistral-mistral-7b-instruct-v0:2' | 'mistral-mixtral-8x7b-instruct-v0:1' | 'mistral-mistral-large-2402-v1:0' | 'mistral-mistral-small-2402-v1:0' | (string & {});
|
|
22
|
+
declare class AmazonBedrock extends MastraLLM {
|
|
23
|
+
constructor({ name, region, accessKeyId, secretAccessKey, sessionToken, }: {
|
|
24
|
+
name?: AmazonModel;
|
|
25
|
+
region?: string;
|
|
26
|
+
accessKeyId?: string;
|
|
27
|
+
secretAccessKey?: string;
|
|
28
|
+
sessionToken?: string;
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export { AmazonBedrock, type AmazonModel };
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { MastraLLM } from '../../../chunk-2ISN3AA7.js';
|
|
2
|
+
import '../../../chunk-LUULSM4U.js';
|
|
3
|
+
import '../../../chunk-JCRGAEY6.js';
|
|
4
|
+
import '../../../chunk-TJK6TGSR.js';
|
|
5
|
+
import { __name } from '../../../chunk-AJJZUHB4.js';
|
|
6
|
+
import { createAmazonBedrock } from '@ai-sdk/amazon-bedrock';
|
|
7
|
+
|
|
8
|
+
var _AmazonBedrock = class _AmazonBedrock extends MastraLLM {
|
|
9
|
+
constructor({ name = "amazon-titan-tg1-large", region = process.env.AWS_REGION || "", accessKeyId = process.env.AWS_ACCESS_KEY_ID || "", secretAccessKey = process.env.AWS_SECRET_ACCESS_KEY || "", sessionToken = process.env.AWS_SESSION_TOKEN || "" }) {
|
|
10
|
+
const amazon = createAmazonBedrock({
|
|
11
|
+
region,
|
|
12
|
+
accessKeyId,
|
|
13
|
+
secretAccessKey,
|
|
14
|
+
sessionToken
|
|
15
|
+
});
|
|
16
|
+
super({
|
|
17
|
+
model: amazon(name)
|
|
18
|
+
});
|
|
19
|
+
}
|
|
20
|
+
};
|
|
21
|
+
__name(_AmazonBedrock, "AmazonBedrock");
|
|
22
|
+
var AmazonBedrock = _AmazonBedrock;
|
|
23
|
+
|
|
24
|
+
export { AmazonBedrock };
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { M as MastraLLM } from '../../../model-QGWIMOSx.js';
|
|
2
|
+
import 'ai';
|
|
3
|
+
import 'json-schema';
|
|
4
|
+
import 'zod';
|
|
5
|
+
import '../../../index-Duqv1Yom.js';
|
|
6
|
+
import '../../../base.js';
|
|
7
|
+
import '@opentelemetry/api';
|
|
8
|
+
import '../../../index-B0-NXUYv.js';
|
|
9
|
+
import 'pino';
|
|
10
|
+
import 'stream';
|
|
11
|
+
import '../../../telemetry-oCUM52DG.js';
|
|
12
|
+
import '@opentelemetry/sdk-node';
|
|
13
|
+
import '@opentelemetry/sdk-trace-base';
|
|
14
|
+
import '../../../metric-D2V4CR8D.js';
|
|
15
|
+
import 'sift';
|
|
16
|
+
import '../../../types-M16hSruO.js';
|
|
17
|
+
import '../../../vector/index.js';
|
|
18
|
+
import '../../../engine-EwEG-4Iv.js';
|
|
19
|
+
import '../../../tts/index.js';
|
|
20
|
+
|
|
21
|
+
type CerebrasModel = 'llama3.1-8b' | 'llama3.1-70b' | 'llama3.3-70b' | (string & {});
|
|
22
|
+
declare class Cerebras extends MastraLLM {
|
|
23
|
+
constructor({ name, apiKey, baseURL, }?: {
|
|
24
|
+
name?: CerebrasModel;
|
|
25
|
+
apiKey?: string;
|
|
26
|
+
baseURL?: string;
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export { Cerebras, type CerebrasModel };
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { MastraLLM } from '../../../chunk-2ISN3AA7.js';
|
|
2
|
+
import '../../../chunk-LUULSM4U.js';
|
|
3
|
+
import '../../../chunk-JCRGAEY6.js';
|
|
4
|
+
import '../../../chunk-TJK6TGSR.js';
|
|
5
|
+
import { __name } from '../../../chunk-AJJZUHB4.js';
|
|
6
|
+
import { createCerebras } from '@ai-sdk/cerebras';
|
|
7
|
+
|
|
8
|
+
var _Cerebras = class _Cerebras extends MastraLLM {
|
|
9
|
+
constructor({ name = "llama3.1-8b", apiKey = process.env.CEREBRAS_API_KEY || "", baseURL = "https://api.cerebras.ai/v1" } = {}) {
|
|
10
|
+
const cerebrasModel = createCerebras({
|
|
11
|
+
baseURL,
|
|
12
|
+
apiKey
|
|
13
|
+
});
|
|
14
|
+
super({
|
|
15
|
+
model: cerebrasModel(name)
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
};
|
|
19
|
+
__name(_Cerebras, "Cerebras");
|
|
20
|
+
var Cerebras = _Cerebras;
|
|
21
|
+
|
|
22
|
+
export { Cerebras };
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { M as MastraLLM } from '../../../model-QGWIMOSx.js';
|
|
2
|
+
import 'ai';
|
|
3
|
+
import 'json-schema';
|
|
4
|
+
import 'zod';
|
|
5
|
+
import '../../../index-Duqv1Yom.js';
|
|
6
|
+
import '../../../base.js';
|
|
7
|
+
import '@opentelemetry/api';
|
|
8
|
+
import '../../../index-B0-NXUYv.js';
|
|
9
|
+
import 'pino';
|
|
10
|
+
import 'stream';
|
|
11
|
+
import '../../../telemetry-oCUM52DG.js';
|
|
12
|
+
import '@opentelemetry/sdk-node';
|
|
13
|
+
import '@opentelemetry/sdk-trace-base';
|
|
14
|
+
import '../../../metric-D2V4CR8D.js';
|
|
15
|
+
import 'sift';
|
|
16
|
+
import '../../../types-M16hSruO.js';
|
|
17
|
+
import '../../../vector/index.js';
|
|
18
|
+
import '../../../engine-EwEG-4Iv.js';
|
|
19
|
+
import '../../../tts/index.js';
|
|
20
|
+
|
|
21
|
+
type CohereModel = 'command-r-plus' | (string & {});
|
|
22
|
+
declare class Cohere extends MastraLLM {
|
|
23
|
+
constructor({ name, apiKey, baseURL, }: {
|
|
24
|
+
name?: CohereModel;
|
|
25
|
+
apiKey?: string;
|
|
26
|
+
baseURL?: string;
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export { Cohere, type CohereModel };
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { MastraLLM } from '../../../chunk-2ISN3AA7.js';
|
|
2
|
+
import '../../../chunk-LUULSM4U.js';
|
|
3
|
+
import '../../../chunk-JCRGAEY6.js';
|
|
4
|
+
import '../../../chunk-TJK6TGSR.js';
|
|
5
|
+
import { __name } from '../../../chunk-AJJZUHB4.js';
|
|
6
|
+
import { createCohere } from '@ai-sdk/cohere';
|
|
7
|
+
|
|
8
|
+
var _Cohere = class _Cohere extends MastraLLM {
|
|
9
|
+
constructor({ name = "command-r-plus", apiKey = process.env.COHERE_API_KEY || "", baseURL = "https://api.cohere.com/v2" }) {
|
|
10
|
+
const cohere = createCohere({
|
|
11
|
+
baseURL,
|
|
12
|
+
apiKey
|
|
13
|
+
});
|
|
14
|
+
super({
|
|
15
|
+
model: cohere(name)
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
};
|
|
19
|
+
__name(_Cohere, "Cohere");
|
|
20
|
+
var Cohere = _Cohere;
|
|
21
|
+
|
|
22
|
+
export { Cohere };
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { M as MastraLLM } from '../../../model-QGWIMOSx.js';
|
|
2
|
+
import 'ai';
|
|
3
|
+
import 'json-schema';
|
|
4
|
+
import 'zod';
|
|
5
|
+
import '../../../index-Duqv1Yom.js';
|
|
6
|
+
import '../../../base.js';
|
|
7
|
+
import '@opentelemetry/api';
|
|
8
|
+
import '../../../index-B0-NXUYv.js';
|
|
9
|
+
import 'pino';
|
|
10
|
+
import 'stream';
|
|
11
|
+
import '../../../telemetry-oCUM52DG.js';
|
|
12
|
+
import '@opentelemetry/sdk-node';
|
|
13
|
+
import '@opentelemetry/sdk-trace-base';
|
|
14
|
+
import '../../../metric-D2V4CR8D.js';
|
|
15
|
+
import 'sift';
|
|
16
|
+
import '../../../types-M16hSruO.js';
|
|
17
|
+
import '../../../vector/index.js';
|
|
18
|
+
import '../../../engine-EwEG-4Iv.js';
|
|
19
|
+
import '../../../tts/index.js';
|
|
20
|
+
|
|
21
|
+
type DeepInfraModel = 'meta-llama/Llama-3.3-70B-Instruct-Turbo' | 'meta-llama/Llama-3.3-70B-Instruct' | 'meta-llama/Meta-Llama-3.1-405B-Instruct' | 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-70B-Instruct' | 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-8B-Instruct' | 'meta-llama/Llama-3.2-11B-Vision-Instruct' | 'meta-llama/Llama-3.2-90B-Vision-Instruct' | 'mistralai/Mixtral-8x7B-Instruct-v0.1' | 'deepseek-ai/DeepSeek-V3' | 'nvidia/Llama-3.1-Nemotron-70B-Instruct' | 'Qwen/Qwen2-7B-Instruct' | 'Qwen/Qwen2.5-72B-Instruct' | 'Qwen/Qwen2.5-Coder-32B-Instruct' | 'Qwen/QwQ-32B-Preview' | 'google/codegemma-7b-it' | 'google/gemma-2-9b-it' | 'microsoft/WizardLM-2-8x22B' | (string & {});
|
|
22
|
+
declare class DeepInfra extends MastraLLM {
|
|
23
|
+
constructor({ name, apiKey, baseURL, }?: {
|
|
24
|
+
name?: DeepInfraModel;
|
|
25
|
+
apiKey?: string;
|
|
26
|
+
baseURL?: string;
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export { DeepInfra, type DeepInfraModel };
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { MastraLLM } from '../../../chunk-2ISN3AA7.js';
|
|
2
|
+
import '../../../chunk-LUULSM4U.js';
|
|
3
|
+
import '../../../chunk-JCRGAEY6.js';
|
|
4
|
+
import '../../../chunk-TJK6TGSR.js';
|
|
5
|
+
import { __name } from '../../../chunk-AJJZUHB4.js';
|
|
6
|
+
import { createDeepInfra } from '@ai-sdk/deepinfra';
|
|
7
|
+
|
|
8
|
+
var _DeepInfra = class _DeepInfra extends MastraLLM {
|
|
9
|
+
constructor({ name = "meta-llama/Meta-Llama-3.1-70B-Instruct", apiKey = process.env.DEEPINFRA_API_KEY || "", baseURL = "https://api.deepinfra.com/v1/openai" } = {}) {
|
|
10
|
+
const deepinfraModel = createDeepInfra({
|
|
11
|
+
baseURL,
|
|
12
|
+
apiKey
|
|
13
|
+
});
|
|
14
|
+
super({
|
|
15
|
+
model: deepinfraModel(name)
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
};
|
|
19
|
+
__name(_DeepInfra, "DeepInfra");
|
|
20
|
+
var DeepInfra = _DeepInfra;
|
|
21
|
+
|
|
22
|
+
export { DeepInfra };
|