@mastra/core 0.1.27-alpha.21 → 0.1.27-alpha.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,4 @@
1
- import { CoreMessage, CoreTool as CT, LanguageModelV1 } from 'ai';
1
+ import { CoreMessage, CoreTool as CT, LanguageModelV1, StreamObjectResult, StreamTextResult, GenerateObjectResult, GenerateTextResult } from 'ai';
2
2
  import { ZodSchema } from 'zod';
3
3
  import { Integration } from '../integration';
4
4
  import { Logger } from '../logger';
@@ -6,9 +6,12 @@ import { Run } from '../run/types';
6
6
  import { Telemetry } from '../telemetry';
7
7
  import { AllTools, CoreTool, ToolApi } from '../tools/types';
8
8
  import { CustomModelConfig, EmbeddingModelConfig, GoogleGenerativeAISettings, ModelConfig, StructuredOutput } from './types';
9
+ type GenerateReturn<S extends boolean, Z> = S extends true ? Z extends ZodSchema ? StreamObjectResult<any, any, any> : StreamTextResult<any> : Z extends ZodSchema ? GenerateObjectResult<any> : GenerateTextResult<any, any>;
9
10
  export declare class LLM<TTools, TIntegrations extends Integration[] | undefined = undefined, TKeys extends keyof AllTools<TTools, TIntegrations> = keyof AllTools<TTools, TIntegrations>> {
10
11
  #private;
11
- constructor();
12
+ constructor({ model }: {
13
+ model: ModelConfig;
14
+ });
12
15
  /**
13
16
  * Set the concrete tools for the agent
14
17
  * @param tools
@@ -28,7 +31,7 @@ export declare class LLM<TTools, TIntegrations extends Integration[] | undefined
28
31
  tracer: import("@opentelemetry/api").Tracer;
29
32
  isEnabled: boolean;
30
33
  } | undefined;
31
- getModelType(model: ModelConfig): string;
34
+ getModelType(): string;
32
35
  createOpenAICompatibleModel({ baseURL, apiKey, defaultModelName, modelName, fetch, }: {
33
36
  baseURL: string;
34
37
  apiKey: string;
@@ -75,36 +78,47 @@ export declare class LLM<TTools, TIntegrations extends Integration[] | undefined
75
78
  private isBaseOutputType;
76
79
  private baseOutputTypeSchema;
77
80
  private createOutputSchema;
78
- text({ model, messages, onStepFinish, maxSteps, enabledTools, runId, }: {
81
+ generate<S extends boolean = false, Z extends ZodSchema | undefined = undefined>(messages: string | CoreMessage[], { schema, stream, maxSteps, onFinish, onStepFinish, enabledTools, convertedTools, runId, }?: {
82
+ runId?: string;
83
+ stream?: S;
84
+ schema?: Z;
85
+ onFinish?: (result: string) => Promise<void> | void;
86
+ onStepFinish?: (step: string) => void;
87
+ maxSteps?: number;
79
88
  enabledTools?: Partial<Record<TKeys, boolean>>;
80
- model: ModelConfig;
89
+ convertedTools?: Record<TKeys, CoreTool>;
90
+ }): Promise<GenerateReturn<S, Z>>;
91
+ __text({ messages, onStepFinish, maxSteps, enabledTools, runId, convertedTools, }: {
92
+ enabledTools?: Partial<Record<TKeys, boolean>>;
93
+ convertedTools?: Record<TKeys, CoreTool>;
81
94
  messages: CoreMessage[];
82
95
  onStepFinish?: (step: string) => void;
83
96
  maxSteps?: number;
84
- } & Run): Promise<import("ai").GenerateTextResult<{}, never>>;
85
- textObject({ model, messages, onStepFinish, maxSteps, enabledTools, structuredOutput, runId, }: {
97
+ } & Run): Promise<GenerateTextResult<{}, never>>;
98
+ __textObject({ messages, onStepFinish, maxSteps, enabledTools, convertedTools, structuredOutput, runId, }: {
86
99
  structuredOutput: StructuredOutput | ZodSchema;
87
100
  enabledTools?: Partial<Record<TKeys, boolean>>;
88
- model: ModelConfig;
101
+ convertedTools?: Record<TKeys, CoreTool>;
89
102
  messages: CoreMessage[];
90
103
  onStepFinish?: (step: string) => void;
91
104
  maxSteps?: number;
92
- } & Run): Promise<import("ai").GenerateObjectResult<any>>;
93
- stream({ model, messages, onStepFinish, onFinish, maxSteps, enabledTools, runId, }: {
94
- model: ModelConfig;
105
+ } & Run): Promise<GenerateObjectResult<any>>;
106
+ __stream({ messages, onStepFinish, onFinish, maxSteps, enabledTools, runId, convertedTools, }: {
95
107
  enabledTools?: Partial<Record<TKeys, boolean>>;
108
+ convertedTools?: Record<TKeys, CoreTool>;
96
109
  messages: CoreMessage[];
97
110
  onStepFinish?: (step: string) => void;
98
111
  onFinish?: (result: string) => Promise<void> | void;
99
112
  maxSteps?: number;
100
- } & Run): Promise<import("ai").StreamTextResult<{}>>;
101
- streamObject({ model, messages, onStepFinish, onFinish, maxSteps, enabledTools, structuredOutput, runId, }: {
113
+ } & Run): Promise<StreamTextResult<{}>>;
114
+ __streamObject({ messages, onStepFinish, onFinish, maxSteps, enabledTools, convertedTools, structuredOutput, runId, }: {
102
115
  structuredOutput: StructuredOutput | ZodSchema;
103
- model: ModelConfig;
104
- enabledTools: Partial<Record<TKeys, boolean>>;
116
+ enabledTools?: Partial<Record<TKeys, boolean>>;
117
+ convertedTools?: Record<TKeys, CoreTool>;
105
118
  messages: CoreMessage[];
106
119
  onStepFinish?: (step: string) => void;
107
120
  onFinish?: (result: string) => Promise<void> | void;
108
121
  maxSteps?: number;
109
- } & Run): Promise<import("ai").StreamObjectResult<any, any, never>>;
122
+ } & Run): Promise<StreamObjectResult<any, any, never>>;
110
123
  }
124
+ export {};
@@ -7,7 +7,7 @@ export type OpenAIEmbeddingModelNames = 'text-embedding-3-small' | 'text-embeddi
7
7
  export type OpenAIConfig = {
8
8
  provider: 'OPEN_AI';
9
9
  name: OpenAIModel | (string & {});
10
- toolChoice: 'auto' | 'required';
10
+ toolChoice?: 'auto' | 'required';
11
11
  apiKey?: string;
12
12
  };
13
13
  export type OpenAIEmbeddingConfig = EmbeddingModelConfigBase & {
@@ -18,7 +18,7 @@ export type GoogleModel = 'gemini-1.5-pro-latest' | 'gemini-1.5-pro' | 'gemini-1
18
18
  export type GoogleConfig = {
19
19
  provider: 'GOOGLE';
20
20
  name: GoogleModel | (string & {});
21
- toolChoice: 'auto' | 'required';
21
+ toolChoice?: 'auto' | 'required';
22
22
  apiKey?: string;
23
23
  };
24
24
  export interface GoogleGenerativeAISettings {
@@ -49,7 +49,7 @@ export type AnthropicModel = 'claude-3-5-sonnet-20241022' | 'claude-3-5-sonnet-2
49
49
  export type AnthropicConfig = {
50
50
  provider: 'ANTHROPIC';
51
51
  name: AnthropicModel | (string & {});
52
- toolChoice: 'auto' | 'required';
52
+ toolChoice?: 'auto' | 'required';
53
53
  apiKey?: string;
54
54
  };
55
55
  export type GroqModel = 'llama3-groq-70b-8192-tool-use-preview' | 'llama3-groq-8b-8192-tool-use-preview' | 'gemma2-9b-it' | 'gemma-7b-it';
@@ -57,7 +57,7 @@ export type GroqConfig = {
57
57
  provider: 'GROQ';
58
58
  name: GroqModel | (string & {});
59
59
  apiKey?: string;
60
- toolChoice: 'auto' | 'required';
60
+ toolChoice?: 'auto' | 'required';
61
61
  };
62
62
  export type CohereEmbeddingModelNames = 'embed-english-v3.0' | 'embed-multilingual-v3.0' | 'embed-english-light-v3.0' | 'embed-multilingual-light-v3.0' | 'embed-english-v2.0' | 'embed-english-light-v2.0' | 'embed-multilingual-v2.0' | (string & {});
63
63
  export type CohereEmbeddingConfig = EmbeddingModelConfigBase & {
@@ -70,20 +70,20 @@ export type PerplexityConfig = {
70
70
  provider: 'PERPLEXITY';
71
71
  name: PerplexityModel | (string & {});
72
72
  apiKey?: string;
73
- toolChoice: 'auto' | 'required';
73
+ toolChoice?: 'auto' | 'required';
74
74
  };
75
75
  export type TogetherAiModel = 'codellama/CodeLlama-34b-Instruct-hf' | 'upstage/SOLAR-10.7B-Instruct-v1.0' | 'mistralai/Mixtral-8x7B-v0.1' | 'WhereIsAI/UAE-Large-V1' | 'black-forest-labs/FLUX.1-depth' | 'togethercomputer/m2-bert-80M-32k-retrieval' | 'black-forest-labs/FLUX.1-canny' | 'black-forest-labs/FLUX.1-dev' | 'black-forest-labs/FLUX.1-redux' | 'BAAI/bge-large-en-v1.5' | 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo' | 'togethercomputer/Llama-3-8b-chat-hf-int4' | 'stabilityai/stable-diffusion-xl-base-1.0' | 'Gryphe/MythoMax-L2-13b' | 'meta-llama/Meta-Llama-3-8B' | 'mistralai/Mixtral-8x7B-Instruct-v0.1' | 'deepseek-ai/deepseek-llm-67b-chat' | 'togethercomputer/m2-bert-80M-8k-retrieval' | 'llava-hf/llava-v1.6-mistral-7b-hf' | 'Qwen/Qwen2.5-Coder-32B-Instruct' | 'databricks/dbrx-instruct' | 'meta-llama/Meta-Llama-3.1-405B-Instruct-Lite-Pro' | 'meta-llama/Meta-Llama-3-8B-Instruct-Lite' | 'scb10x/scb10x-llama3-typhoon-v1-5-8b-instruct' | 'microsoft/WizardLM-2-8x22B' | 'BAAI/bge-base-en-v1.5' | 'togethercomputer/m2-bert-80M-2k-retrieval' | 'google/gemma-2b-it' | 'meta-llama/Llama-2-70b-hf' | 'mistralai/Mistral-7B-Instruct-v0.2' | 'meta-llama/LlamaGuard-2-8b' | 'mistralai/Mistral-7B-Instruct-v0.1' | 'mistralai/Mistral-7B-v0.1' | 'black-forest-labs/FLUX.1-pro' | 'black-forest-labs/FLUX.1-schnell' | 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo' | 'nvidia/Llama-3.1-Nemotron-70B-Instruct-HF' | 'mistralai/Mistral-7B-Instruct-v0.3' | 'Meta-Llama/Llama-Guard-7b' | 'meta-llama/Meta-Llama-3-70B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3-70B-Instruct-Lite' | 'google/gemma-2-27b-it' | 'meta-llama/Llama-3-8b-chat-hf' | 'Qwen/Qwen2.5-72B-Instruct-Turbo' | 'Salesforce/Llama-Rank-V1' | 'meta-llama/Llama-Guard-3-11B-Vision-Turbo' | 'google/gemma-2-9b-it' | 'meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo' | 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO' | 'meta-llama/Llama-3-70b-chat-hf' | 'Qwen/Qwen2.5-7B-Instruct-Turbo' | 'scb10x/scb10x-llama3-typhoon-v1-5x-4f316' | 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' | 'Gryphe/MythoMax-L2-13b-Lite' | 'black-forest-labs/FLUX.1-schnell-Free' | 'meta-llama/Llama-2-7b-chat-hf' | 'meta-llama/Meta-Llama-Guard-3-8B' | 'togethercomputer/Llama-3-8b-chat-hf-int8' | 'meta-llama/Llama-Vision-Free' | 'Qwen/Qwen2-72B-Instruct' | 'mistralai/Mixtral-8x22B-Instruct-v0.1' | 'black-forest-labs/FLUX.1.1-pro' | 'meta-llama/Llama-3.2-3B-Instruct-Turbo' | 'meta-llama/Llama-2-13b-chat-hf' | 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' | 'Nexusflow/NexusRaven-V2-13B' | 'bert-base-uncased' | 'WizardLM/WizardLM-13B-V1.2' | 'google/gemma-7b' | 'togethercomputer/Koala-7B' | 'zero-one-ai/Yi-34B' | 'togethercomputer/StripedHyena-Hessian-7B' | 'teknium/OpenHermes-2-Mistral-7B' | 'Qwen/Qwen2-7B-Instruct' | 'togethercomputer/guanaco-65b' | 'togethercomputer/llama-2-7b' | 'hazyresearch/M2-BERT-2k-Retrieval-Encoder-V1' | 'huggyllama/llama-7b' | 'Undi95/ReMM-SLERP-L2-13B' | 'NousResearch/Nous-Capybara-7B-V1p9' | 'lmsys/vicuna-7b-v1.3' | 'Undi95/Toppy-M-7B' | 'Qwen/Qwen2-72B' | 'NousResearch/Nous-Hermes-Llama2-70b' | 'WizardLM/WizardLM-70B-V1.0' | 'huggyllama/llama-65b' | 'lmsys/vicuna-13b-v1.5-16k' | 'openchat/openchat-3.5-1210' | 'Qwen/Qwen1.5-0.5B' | 'Qwen/Qwen1.5-4B' | 'Qwen/Qwen1.5-7B' | 'snorkelai/Snorkel-Mistral-PairRM-DPO' | 'Qwen/Qwen1.5-14B-Chat' | 'Qwen/Qwen1.5-1.8B-Chat' | 'Snowflake/snowflake-arctic-instruct' | 'togethercomputer/llama-2-13b' | 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-SFT' | 'deepseek-ai/deepseek-coder-33b-instruct' | 'togethercomputer/CodeLlama-7b-Instruct' | 'NousResearch/Nous-Hermes-Llama2-13b' | 'lmsys/vicuna-13b-v1.5' | 'togethercomputer/guanaco-13b' | 'togethercomputer/CodeLlama-34b-Instruct' | 'togethercomputer/llama-2-70b' | 'codellama/CodeLlama-13b-Instruct-hf' | 'Qwen/Qwen2-7B' | 'Qwen/Qwen2-1.5B' | 'togethercomputer/CodeLlama-13b-Instruct' | 'meta-llama/Llama-2-13b-hf' | 'togethercomputer/llama-2-13b-chat' | 'huggyllama/llama-30b' | 'NousResearch/Nous-Hermes-2-Mistral-7B-DPO' | 'togethercomputer/alpaca-7b' | 'google/gemma-7b-it' | 'allenai/OLMo-7B' | 'togethercomputer/guanaco-33b' | 'togethercomputer/llama-2-7b-chat' | 'togethercomputer/SOLAR-10.7B-Instruct-v1.0-int4' | 'togethercomputer/guanaco-7b' | 'Open-Orca/Mistral-7B-OpenOrca' | 'Qwen/Qwen1.5-32B' | 'EleutherAI/llemma_7b' | 'NousResearch/Nous-Hermes-llama-2-7b' | 'Qwen/Qwen1.5-32B-Chat' | 'meta-llama/Meta-Llama-3-70B' | 'meta-llama/Llama-3-8b-hf' | 'sentence-transformers/msmarco-bert-base-dot-v5' | 'zero-one-ai/Yi-6B' | 'meta-llama/Meta-Llama-3-8B-Instruct' | 'teknium/OpenHermes-2p5-Mistral-7B' | 'Qwen/Qwen1.5-4B-Chat' | 'wavymulder/Analog-Diffusion' | 'runwayml/stable-diffusion-v1-5' | 'prompthero/openjourney' | 'meta-llama/Llama-2-7b-hf' | 'SG161222/Realistic_Vision_V3.0_VAE' | 'Qwen/Qwen1.5-0.5B-Chat' | 'codellama/CodeLlama-7b-Instruct-hf' | 'google/gemma-2b' | 'mistralai/Mixtral-8x22B' | 'meta-llama/Llama-2-70b-chat-hf' | 'zero-one-ai/Yi-34B-Chat' | 'google/gemma-2-9b' | 'meta-llama/Meta-Llama-3-70B-Instruct' | 'togethercomputer/LLaMA-2-7B-32K' | 'codellama/CodeLlama-70b-Instruct-hf' | 'NousResearch/Hermes-2-Theta-Llama-3-70B' | 'test/test11' | 'stabilityai/stable-diffusion-2-1' | 'microsoft/phi-2' | 'Qwen/Qwen1.5-7B-Chat' | 'cognitivecomputations/dolphin-2.5-mixtral-8x7b' | 'togethercomputer/evo-1-131k-base' | 'togethercomputer/evo-1-8k-base' | 'togethercomputer/llama-2-70b-chat' | 'Qwen/Qwen1.5-14B' | 'carson/ml318br' | 'meta-llama/Meta-Llama-3.1-8B-Instruct-Reference' | 'meta-llama/Meta-Llama-3.1-8B-Reference' | 'gradientai/Llama-3-70B-Instruct-Gradient-1048k' | 'meta-llama/Meta-Llama-3.1-70B-Reference' | 'meta-llama/Meta-Llama-3.1-70B-Instruct-Reference' | 'meta-llama/Llama-3-70b-hf' | 'Qwen/Qwen2-1.5B-Instruct' | 'NousResearch/Nous-Hermes-13b' | 'HuggingFaceH4/zephyr-7b-beta' | 'Austism/chronos-hermes-13b' | 'Qwen/Qwen1.5-1.8B' | 'Qwen/Qwen1.5-72B' | 'lmsys/vicuna-13b-v1.3' | 'huggyllama/llama-13b' | 'garage-bAInd/Platypus2-70B-instruct' | 'allenai/OLMo-7B-Instruct' | 'togethercomputer/Koala-13B' | 'lmsys/vicuna-7b-v1.5';
76
76
  export type TogetherAiConfig = {
77
77
  provider: 'TOGETHER_AI';
78
78
  name: TogetherAiModel | (string & {});
79
79
  apiKey?: string;
80
- toolChoice: 'auto' | 'required';
80
+ toolChoice?: 'auto' | 'required';
81
81
  };
82
82
  export type LMStudioModel = 'qwen2-7b-instruct-4bit' | 'qwen2-math-1.5b' | 'qwen2-0.5b' | 'aya-23-8b' | 'mistral-7b-v0.3' | 'stablecode' | 'cohere-command-r-v01-4bit' | 'command-r' | 'starcoder2-7b' | 'deepseek-math-7b' | 'qwen2.5-coder-14b' | 'qwen2.5-coder-32b' | 'qwen2.5-coder-3b' | 'llama-3.2-3b-instruct-4bit' | 'llama-3.2-1b' | 'llama-3.2-3b' | 'qwen2.5-coder-7b' | 'qwen2.5-14b' | 'yi-coder-9b' | 'hermes-3-llama-3.1-8b' | 'internlm-2.5-20b' | 'llava-v1.5' | 'llama-3.1-8b-instruct-4bit' | 'meta-llama-3.1-8b' | 'mistral-nemo-2407' | 'mistral-nemo-instruct-2407-4bit' | 'gemma-2-2b' | 'mathstral-7b' | 'gemma-2-9b' | 'deepseek-coder-v2-lite-instruct-4bit' | 'smollm-360m-v0.2' | 'phi-3-mini-4k-instruct-4bit' | 'gemma-2-27b' | 'codestral-22b' | 'phi-3.1-mini-128k' | 'deepseek-coder-v2-lite';
83
83
  export type LMStudioConfig = {
84
84
  provider: 'LM_STUDIO';
85
85
  name: LMStudioModel | (string & {});
86
- toolChoice: 'auto' | 'required';
86
+ toolChoice?: 'auto' | 'required';
87
87
  baseURL: string;
88
88
  };
89
89
  export type BasetenModel = 'llama-3.1-70b-instruct' | 'qwen2.5-7b-math-instruct' | 'qwen2.5-14b-instruct' | 'qwen2.5-32b-coder-instruct' | 'llama-3.1-8b-instruct' | 'llama-3.1-nemetron-70b' | 'llama-3.2-90b-vision-instruct' | 'llama-3.1-405b-instruct' | 'ultravox-v0.4' | 'llama-3.2-1b-vision-instruct' | 'llama-3-70b-instruct' | 'llama-3-8b-instruct' | 'mistral-7b-instruct' | 'qwen2.5-14b-coder-instruct' | 'qwen2.5-7b-coder-instruct' | 'qwen2.5-72b-math-instruct' | 'qwen2.5-72b-instruct' | 'qwen2.5-32b-instruct' | 'qwen2.5-7b-instruct' | 'qwen2.5-3b-instruct' | 'pixtral-12b' | 'phi-3.5-mini-instruct' | 'gemma-2-9b' | 'gemma-2-27b' | 'phi-3-mini-128k-instruct' | 'phi-3-mini-4k-instruct' | 'zephyr-7b-alpha' | 'mixtral-8x7b-instruct' | 'mixtral-8x22b';
@@ -91,7 +91,7 @@ export type BaseTenConfig = {
91
91
  provider: 'BASETEN';
92
92
  name: BasetenModel | (string & {});
93
93
  apiKey?: string;
94
- toolChoice: 'auto' | 'required';
94
+ toolChoice?: 'auto' | 'required';
95
95
  fetch?: typeof globalThis.fetch;
96
96
  };
97
97
  export type FireworksModel = 'llama-3.1-405b-instruct' | 'llama-3.1-70b-instruct' | 'llama-3.1-8b-instruct' | 'llama-3.2-3b-instruct';
@@ -99,55 +99,55 @@ export type FireworksConfig = {
99
99
  provider: 'FIREWORKS';
100
100
  name: FireworksModel | (string & {});
101
101
  apiKey?: string;
102
- toolChoice: 'auto' | 'required';
102
+ toolChoice?: 'auto' | 'required';
103
103
  };
104
104
  export type MistralModel = 'pixtral-large-latest' | 'mistral-large-latest' | 'mistral-small-latest' | 'ministral-3b-latest' | 'ministral-8b-latest' | 'pixtral-12b-2409';
105
105
  export type MistralConfig = {
106
106
  provider: 'MISTRAL';
107
107
  name: MistralModel | (string & {});
108
108
  apiKey?: string;
109
- toolChoice: 'auto' | 'required';
109
+ toolChoice?: 'auto' | 'required';
110
110
  };
111
111
  export type XGrokModel = 'grok-beta' | 'grok-vision-beta';
112
112
  export type XGrokConfig = {
113
113
  provider: 'X_GROK';
114
114
  name: XGrokModel | (string & {});
115
- toolChoice: 'auto' | 'required';
115
+ toolChoice?: 'auto' | 'required';
116
116
  apiKey?: string;
117
117
  };
118
118
  export type CustomModelConfig = {
119
119
  model: LanguageModelV1 | (() => Promise<LanguageModelV1>);
120
120
  provider: string;
121
121
  apiKey?: string;
122
- toolChoice: 'auto' | 'required';
122
+ toolChoice?: 'auto' | 'required';
123
123
  };
124
124
  export type CohereModel = 'command-r-plus';
125
125
  export type CohereConfig = {
126
126
  provider: 'COHERE';
127
127
  name: CohereModel | (string & {});
128
128
  apiKey?: string;
129
- toolChoice: 'auto' | 'required';
129
+ toolChoice?: 'auto' | 'required';
130
130
  };
131
131
  export type AzureModel = 'gpt-35-turbo-instruct';
132
132
  export type AzureConfig = {
133
133
  provider: 'AZURE';
134
134
  name: AzureModel & (string | {});
135
135
  apiKey?: string;
136
- toolChoice: 'auto' | 'required';
136
+ toolChoice?: 'auto' | 'required';
137
137
  };
138
138
  export type AmazonModel = 'amazon-titan-tg1-large' | 'amazon-titan-text-express-v1' | 'anthropic-claude-3-5-sonnet-20241022-v2:0' | 'anthropic-claude-3-5-sonnet-20240620-v1:0' | 'anthropic-claude-3-5-haiku-20241022-v1:0' | 'anthropic-claude-3-opus-20240229-v1:0' | 'anthropic-claude-3-sonnet-20240229-v1:0' | 'anthropic-claude-3-haiku-20240307-v1:0' | 'anthropic-claude-v2:1' | 'cohere-command-r-v1:0' | 'cohere-command-r-plus-v1:0' | 'meta-llama2-13b-chat-v1' | 'meta-llama2-70b-chat-v1' | 'meta-llama3-8b-instruct-v1:0' | 'meta-llama3-70b-instruct-v1:0' | 'meta-llama3-1-8b-instruct-v1:0' | 'meta-llama3-1-70b-instruct-v1:0' | 'meta-llama3-1-405b-instruct-v1:0' | 'meta-llama3-2-1b-instruct-v1:0' | 'meta-llama3-2-3b-instruct-v1:0' | 'meta-llama3-2-11b-instruct-v1:0' | 'meta-llama3-2-90b-instruct-v1:0' | 'mistral-mistral-7b-instruct-v0:2' | 'mistral-mixtral-8x7b-instruct-v0:1' | 'mistral-mistral-large-2402-v1:0' | 'mistral-mistral-small-2402-v1:0';
139
139
  export type AmazonConfig = {
140
140
  provider: 'AMAZON';
141
141
  name: AmazonModel | (string & {});
142
142
  apiKey?: string;
143
- toolChoice: 'auto' | 'required';
143
+ toolChoice?: 'auto' | 'required';
144
144
  };
145
145
  export type AnthropicVertexModel = 'claude-3-5-sonnet@20240620' | 'claude-3-opus@20240229' | 'claude-3-sonnet@20240229' | 'claude-3-haiku@20240307';
146
146
  export type AnthropicVertexConfig = {
147
147
  provider: 'ANTHROPIC_VERTEX';
148
148
  name: AnthropicVertexModel | (string & {});
149
149
  apiKey?: string;
150
- toolChoice: 'auto' | 'required';
150
+ toolChoice?: 'auto' | 'required';
151
151
  };
152
152
  type BuiltInModelConfig = OpenAIConfig | AnthropicConfig | GroqConfig | GoogleConfig | PerplexityConfig | TogetherAiConfig | LMStudioConfig | BaseTenConfig | FireworksConfig | MistralConfig | XGrokConfig | CohereConfig | AzureConfig | AmazonConfig | AnthropicVertexConfig;
153
153
  export type ModelConfig = BuiltInModelConfig | CustomModelConfig;
@@ -3,7 +3,9 @@ import { Agent } from '../agent';
3
3
  import { MastraEngine } from '../engine';
4
4
  import { Integration } from '../integration';
5
5
  import { LLM } from '../llm';
6
+ import { ModelConfig } from '../llm/types';
6
7
  import { BaseLogger } from '../logger';
8
+ import { MastraMemory } from '../memory';
7
9
  import { Run } from '../run/types';
8
10
  import { syncApi } from '../sync/types';
9
11
  import { Telemetry, OtelConfig } from '../telemetry';
@@ -15,13 +17,14 @@ export declare class Mastra<TIntegrations extends Integration[], MastraTools ext
15
17
  private vectors?;
16
18
  private tools;
17
19
  private agents;
18
- llm: LLM<MastraTools, TIntegrations, keyof AllTools<MastraTools, TIntegrations>>;
19
20
  private integrations;
20
21
  private logger;
21
22
  private syncs;
22
23
  private telemetry?;
24
+ memory?: MastraMemory;
23
25
  constructor(config: {
24
26
  tools?: MastraTools;
27
+ memory?: MastraMemory;
25
28
  syncs?: TSyncs;
26
29
  agents?: Agent<MastraTools, TIntegrations>[];
27
30
  integrations?: TIntegrations;
@@ -30,12 +33,12 @@ export declare class Mastra<TIntegrations extends Integration[], MastraTools ext
30
33
  logger?: TLogger;
31
34
  telemetry?: OtelConfig;
32
35
  });
36
+ LLM(modelConfig: ModelConfig): LLM<MastraTools, TIntegrations, keyof (MastraTools extends Record<string, ToolApi<any, any>> ? MastraTools : {}) | keyof (TIntegrations extends Integration[] ? (import("..").IntegrationTools<StripUndefined<TIntegrations>[number]> extends infer T ? T extends import("..").IntegrationTools<StripUndefined<TIntegrations>[number]> ? T extends any ? (k: T) => void : never : never : never) extends (k: infer I) => void ? I : never : {})>;
33
37
  sync<K extends keyof TSyncs>(key: K, params: TSyncs[K]['schema']['_input'], runId?: Run['runId']): Promise<StripUndefined<TSyncs[K]['outputShema']>['_input']>;
34
38
  getAgent(name: string): Agent<MastraTools, TIntegrations, keyof (MastraTools extends Record<string, ToolApi<any, any>> ? MastraTools : {}) | keyof (TIntegrations extends Integration[] ? (import("..").IntegrationTools<StripUndefined<TIntegrations>[number]> extends infer T ? T extends import("..").IntegrationTools<StripUndefined<TIntegrations>[number]> ? T extends any ? (k: T) => void : never : never : never) extends (k: infer I) => void ? I : never : {})>;
35
39
  getIntegration<I extends TIntegrations[number]['name']>(name: I): Extract<TIntegrations[number], {
36
40
  name: I;
37
41
  }>;
38
- getLLM(): LLM<MastraTools, TIntegrations, keyof (MastraTools extends Record<string, ToolApi<any, any>> ? MastraTools : {}) | keyof (TIntegrations extends Integration[] ? (import("..").IntegrationTools<StripUndefined<TIntegrations>[number]> extends infer T ? T extends import("..").IntegrationTools<StripUndefined<TIntegrations>[number]> ? T extends any ? (k: T) => void : never : never : never) extends (k: infer I) => void ? I : never : {})>;
39
42
  getTool<T extends keyof MastraTools>(name: T): MastraTools[T] & {
40
43
  execute: <IN extends MastraTools[T]["schema"], OUT extends StripUndefined<MastraTools[T]["outputSchema"]>>(params: z.infer<IN>, runId?: Run["runId"]) => Promise<z.infer<OUT>>;
41
44
  };
@@ -1,13 +1,19 @@
1
+ import { AssistantContent, ToolContent, ToolResultPart, UserContent, Message as AiMessage } from 'ai';
1
2
  export type MessageType = {
2
3
  id: string;
3
- content: string;
4
- role: 'user' | 'assistant';
4
+ content: UserContent | AssistantContent | ToolContent;
5
+ role: 'user' | 'assistant' | 'tool';
5
6
  createdAt: Date;
6
7
  threadId: string;
8
+ toolCallIds?: string[];
9
+ toolCallArgs?: Record<string, unknown>[];
10
+ toolNames?: string[];
11
+ type: 'text' | 'tool-call' | 'tool-result';
7
12
  };
8
13
  export type ThreadType = {
9
14
  id: string;
10
15
  title?: string;
16
+ resourceid: string;
11
17
  createdAt: Date;
12
18
  updatedAt: Date;
13
19
  metadata?: Record<string, unknown>;
@@ -22,43 +28,111 @@ export declare abstract class MastraMemory {
22
28
  * @param threadId - The unique identifier of the thread
23
29
  * @returns Promise resolving to the thread or null if not found
24
30
  */
25
- abstract getThreadById(threadId: string): Promise<ThreadType | null>;
31
+ abstract getThreadById({ threadId }: {
32
+ threadId: string;
33
+ }): Promise<ThreadType | null>;
34
+ abstract getThreadsByResourceId({ resourceid }: {
35
+ resourceid: string;
36
+ }): Promise<ThreadType[]>;
26
37
  /**
27
38
  * Saves or updates a thread
28
39
  * @param thread - The thread data to save
29
40
  * @returns Promise resolving to the saved thread
30
41
  */
31
- abstract saveThread(thread: ThreadType): Promise<ThreadType>;
42
+ abstract saveThread({ thread }: {
43
+ thread: ThreadType;
44
+ }): Promise<ThreadType>;
32
45
  /**
33
46
  * Saves messages to a thread
34
47
  * @param messages - Array of messages to save
35
48
  * @returns Promise resolving to the saved messages
36
49
  */
37
- abstract saveMessages(messages: MessageType[]): Promise<MessageType[]>;
50
+ abstract saveMessages({ messages }: {
51
+ messages: MessageType[];
52
+ }): Promise<MessageType[]>;
38
53
  /**
39
54
  * Retrieves all messages for a specific thread
40
55
  * @param threadId - The unique identifier of the thread
56
+ * @returns Promise resolving to array of messages and uiMessages
57
+ */
58
+ abstract getMessages({ threadId, }: {
59
+ threadId: string;
60
+ }): Promise<{
61
+ messages: MessageType[];
62
+ uiMessages: AiMessage[];
63
+ }>;
64
+ /**
65
+ * Retrieves all messages for a specific thread within a context window
66
+ * @param threadId - The unique identifier of the thread
67
+ * @param startDate - Optional start date to filter the context window
68
+ * @param endDate - Optional end date to filter the context window
41
69
  * @returns Promise resolving to an array of messages
42
70
  */
43
- abstract getMessages(threadId: string): Promise<MessageType[]>;
71
+ abstract getContextWindow({ threadId, startDate, endDate, }: {
72
+ threadId: string;
73
+ startDate?: Date;
74
+ endDate?: Date;
75
+ }): Promise<MessageType[]>;
76
+ /**
77
+ * Retrieves cached tool result for a specific arg in a thread
78
+ * @param threadId - The unique identifier of the thread
79
+ * @param toolArgs - The tool arguments to retrieve the cached result for
80
+ * @param toolName - The name of the tool that was called
81
+ * @returns Promise resolving to the cached tool result or null if not found
82
+ */
83
+ abstract getCachedToolResult({ threadId, toolArgs, toolName, }: {
84
+ threadId: string;
85
+ toolArgs: Record<string, unknown>;
86
+ toolName: string;
87
+ }): Promise<ToolResultPart['result'] | null>;
88
+ /**
89
+ * Checks if an un-expired tool call arg exists in a thread
90
+ * @param hashedToolCallArgs - The hashed tool call information (args, threadId, toolName) to check for
91
+ * @returns Promise resolving to true if the un-expired tool call arg exists, false otherwise
92
+ */
93
+ abstract checkIfValidArgExists({ hashedToolCallArgs }: {
94
+ hashedToolCallArgs: string;
95
+ }): Promise<boolean>;
44
96
  /**
45
97
  * Helper method to create a new thread
46
98
  * @param title - Optional title for the thread
47
99
  * @param metadata - Optional metadata for the thread
48
100
  * @returns Promise resolving to the created thread
49
101
  */
50
- createThread(title?: string, metadata?: Record<string, unknown>): Promise<ThreadType>;
102
+ createThread({ threadId, resourceid, title, metadata, }: {
103
+ resourceid: string;
104
+ threadId?: string;
105
+ title?: string;
106
+ metadata?: Record<string, unknown>;
107
+ }): Promise<ThreadType>;
108
+ /**
109
+ * Helper method to delete a thread
110
+ * @param threadId - the id of the thread to delete
111
+ */
112
+ abstract deleteThread(threadId: string): Promise<void>;
51
113
  /**
52
114
  * Helper method to add a single message to a thread
53
115
  * @param threadId - The thread to add the message to
54
116
  * @param content - The message content
55
117
  * @param role - The role of the message sender
118
+ * @param type - The type of the message
119
+ * @param toolNames - Optional array of tool names that were called
120
+ * @param toolCallArgs - Optional array of tool call arguments
121
+ * @param toolCallIds - Optional array of tool call ids
56
122
  * @returns Promise resolving to the saved message
57
123
  */
58
- addMessage(threadId: string, content: string, role: 'user' | 'assistant'): Promise<MessageType>;
124
+ addMessage({ threadId, content, role, type, toolNames, toolCallArgs, toolCallIds, }: {
125
+ threadId: string;
126
+ content: UserContent | AssistantContent;
127
+ role: 'user' | 'assistant';
128
+ type: 'text' | 'tool-call' | 'tool-result';
129
+ toolNames?: string[];
130
+ toolCallArgs?: Record<string, unknown>[];
131
+ toolCallIds?: string[];
132
+ }): Promise<MessageType>;
59
133
  /**
60
134
  * Generates a unique identifier
61
135
  * @returns A unique string ID
62
136
  */
63
- protected generateId(): string;
137
+ generateId(): string;
64
138
  }
@@ -3,6 +3,7 @@ import { Agent } from '../agent';
3
3
  import { MastraEngine } from '../engine';
4
4
  import { Integration } from '../integration';
5
5
  import { LLM } from '../llm';
6
+ import { ModelConfig } from '../llm/types';
6
7
  import { Run } from '../run/types';
7
8
  import { AllTools, ToolApi } from '../tools/types';
8
9
  import { MastraVector } from '../vector';
@@ -25,7 +26,7 @@ export interface syncApi<IN extends Record<string, unknown>, OUT extends Record<
25
26
  engine: MastraEngine;
26
27
  agents: Map<string, Agent<Integration[], any>>;
27
28
  vectors: Record<string, MastraVector> | undefined;
28
- llm: LLM<AllTools<any, Integration[]>, Integration[], any>;
29
+ llm: (model: ModelConfig) => LLM<AllTools<any, Integration[]>, Integration[], any>;
29
30
  integrationsRegistry: <I extends Integration[]>() => SyncIntegrationRegistry<I>;
30
31
  toolsRegistry: <T extends Record<string, ToolApi<any, any>>>() => SyncToolRegistry<T>;
31
32
  }) => Promise<OUT>;
@@ -3,6 +3,7 @@ import { Agent } from '../agent';
3
3
  import { MastraEngine } from '../engine';
4
4
  import { Integration } from '../integration';
5
5
  import { LLM } from '../llm';
6
+ import { ModelConfig } from '../llm/types';
6
7
  import { StripUndefined } from '../mastra/types';
7
8
  import { Run } from '../run/types';
8
9
  export type CoreTool = {
@@ -15,14 +16,6 @@ interface ToolIntegrations<I extends Integration[]> {
15
16
  name: N;
16
17
  }>;
17
18
  }
18
- export interface IntegrationApiExcutorParams<T extends Record<string, unknown>> {
19
- data: T;
20
- runId?: Run['runId'];
21
- integrationsRegistry: <I extends Integration[]>() => ToolIntegrations<I>;
22
- llm: LLM<any>;
23
- engine?: MastraEngine | undefined;
24
- agents: Map<string, Agent<any>>;
25
- }
26
19
  export type ToolApi<IN extends Record<string, unknown> = Record<string, unknown>, OUT extends Record<string, unknown> = Record<string, unknown>> = {
27
20
  schema: ZodSchema<IN>;
28
21
  label: string;
@@ -30,9 +23,18 @@ export type ToolApi<IN extends Record<string, unknown> = Record<string, unknown>
30
23
  documentation?: string;
31
24
  outputSchema?: ZodSchema<OUT>;
32
25
  executor: (params: IntegrationApiExcutorParams<IN>) => Promise<OUT>;
26
+ enableCache?: boolean;
33
27
  };
34
28
  export type IntegrationTools<T extends Integration> = T['tools'];
35
29
  type UnionToIntersection<U> = (U extends any ? (k: U) => void : never) extends (k: infer I) => void ? I : never;
36
30
  export type MergeIntegrationTools<T extends Integration[]> = UnionToIntersection<IntegrationTools<T[number]>>;
37
31
  export type AllTools<TTools, TIntegrations extends Integration[] | undefined = undefined> = (TTools extends Record<string, ToolApi<any, any>> ? TTools : {}) & (TIntegrations extends Integration[] ? MergeIntegrationTools<StripUndefined<TIntegrations>> : {});
32
+ export interface IntegrationApiExcutorParams<T extends Record<string, unknown>> {
33
+ data: T;
34
+ runId?: Run['runId'];
35
+ integrationsRegistry: <I extends Integration[]>() => ToolIntegrations<I>;
36
+ llm: (model: ModelConfig) => LLM<AllTools<any, Integration[]>, Integration[], any>;
37
+ engine?: MastraEngine | undefined;
38
+ agents: Map<string, Agent<any>>;
39
+ }
38
40
  export {};
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mastra/core",
3
- "version": "0.1.27-alpha.21",
3
+ "version": "0.1.27-alpha.23",
4
4
  "license": "MIT",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",