@mastra/core 0.4.1 → 0.4.2-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/index.d.ts +1 -1
- package/dist/agent/index.js +1 -1
- package/dist/{base-BVXgbD4Q.d.ts → base-HowPMmsb.d.ts} +24 -19
- package/dist/{chunk-ICUX73VZ.js → chunk-T5B2DATI.js} +114 -27
- package/dist/{chunk-QW25LZSR.js → chunk-VGVST36A.js} +1 -1
- package/dist/eval/index.d.ts +1 -1
- package/dist/index.d.ts +4 -4
- package/dist/index.js +2 -2
- package/dist/integration/index.d.ts +2 -2
- package/dist/llm/index.d.ts +1 -1
- package/dist/mastra/index.d.ts +2 -2
- package/dist/memory/index.d.ts +1 -1
- package/dist/relevance/index.js +1 -1
- package/dist/storage/index.d.ts +2 -2
- package/dist/telemetry/index.d.ts +1 -1
- package/dist/tools/index.d.ts +2 -2
- package/dist/{workflow-B_sRFHFT.d.ts → workflow-CPz2D0Wz.d.ts} +1 -1
- package/dist/workflows/index.d.ts +3 -3
- package/package.json +1 -1
package/dist/agent/index.d.ts
CHANGED
package/dist/agent/index.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export { Agent } from '../chunk-
|
|
1
|
+
export { Agent } from '../chunk-T5B2DATI.js';
|
|
@@ -475,6 +475,7 @@ interface AgentGenerateOptions<Z extends ZodSchema | JSONSchema7 | undefined = u
|
|
|
475
475
|
output?: OutputType | Z;
|
|
476
476
|
temperature?: number;
|
|
477
477
|
toolChoice?: 'auto' | 'required';
|
|
478
|
+
experimental_output?: Z;
|
|
478
479
|
}
|
|
479
480
|
interface AgentStreamOptions<Z extends ZodSchema | JSONSchema7 | undefined = undefined> {
|
|
480
481
|
toolsets?: ToolsetsInput;
|
|
@@ -489,6 +490,7 @@ interface AgentStreamOptions<Z extends ZodSchema | JSONSchema7 | undefined = und
|
|
|
489
490
|
output?: OutputType | Z;
|
|
490
491
|
temperature?: number;
|
|
491
492
|
toolChoice?: 'auto' | 'required';
|
|
493
|
+
experimental_output?: Z;
|
|
492
494
|
}
|
|
493
495
|
|
|
494
496
|
type LanguageModel = LanguageModelV1;
|
|
@@ -520,18 +522,8 @@ type StructuredOutput = {
|
|
|
520
522
|
};
|
|
521
523
|
type GenerateReturn<Z extends ZodSchema | JSONSchema7 | undefined = undefined> = Z extends undefined ? GenerateTextResult<any, any> : GenerateObjectResult<any>;
|
|
522
524
|
type StreamReturn<Z extends ZodSchema | JSONSchema7 | undefined = undefined> = Z extends undefined ? StreamTextResult<any, any> : StreamObjectResult<any, any, any>;
|
|
523
|
-
type OutputType = 'text' | StructuredOutput;
|
|
524
|
-
type
|
|
525
|
-
runId?: string;
|
|
526
|
-
onFinish?: (result: string) => Promise<void> | void;
|
|
527
|
-
onStepFinish?: (step: string) => void;
|
|
528
|
-
maxSteps?: number;
|
|
529
|
-
tools?: ToolsInput;
|
|
530
|
-
convertedTools?: Record<string, CoreTool>;
|
|
531
|
-
output?: OutputType | Z;
|
|
532
|
-
temperature?: number;
|
|
533
|
-
};
|
|
534
|
-
type LLMTextOptions = {
|
|
525
|
+
type OutputType = 'text' | StructuredOutput | ZodSchema | JSONSchema7 | undefined;
|
|
526
|
+
type LLMTextOptions<Z extends ZodSchema | JSONSchema7 | undefined = undefined> = {
|
|
535
527
|
tools?: ToolsInput;
|
|
536
528
|
convertedTools?: Record<string, CoreTool>;
|
|
537
529
|
messages: CoreMessage[];
|
|
@@ -539,11 +531,23 @@ type LLMTextOptions = {
|
|
|
539
531
|
toolChoice?: 'auto' | 'required';
|
|
540
532
|
maxSteps?: number;
|
|
541
533
|
temperature?: number;
|
|
534
|
+
experimental_output?: Z;
|
|
542
535
|
} & Run;
|
|
543
536
|
type LLMTextObjectOptions<T> = LLMTextOptions & {
|
|
544
537
|
structuredOutput: JSONSchema7 | z.ZodType<T> | StructuredOutput;
|
|
545
538
|
};
|
|
546
|
-
type
|
|
539
|
+
type LLMStreamOptions<Z extends ZodSchema | JSONSchema7 | undefined = undefined> = {
|
|
540
|
+
runId?: string;
|
|
541
|
+
onFinish?: (result: string) => Promise<void> | void;
|
|
542
|
+
onStepFinish?: (step: string) => void;
|
|
543
|
+
maxSteps?: number;
|
|
544
|
+
tools?: ToolsInput;
|
|
545
|
+
convertedTools?: Record<string, CoreTool>;
|
|
546
|
+
output?: OutputType | Z;
|
|
547
|
+
temperature?: number;
|
|
548
|
+
experimental_output?: Z;
|
|
549
|
+
};
|
|
550
|
+
type LLMInnerStreamOptions<Z extends ZodSchema | JSONSchema7 | undefined = undefined> = {
|
|
547
551
|
tools?: ToolsInput;
|
|
548
552
|
convertedTools?: Record<string, CoreTool>;
|
|
549
553
|
messages: CoreMessage[];
|
|
@@ -552,6 +556,7 @@ type LLMInnerStreamOptions = {
|
|
|
552
556
|
maxSteps?: number;
|
|
553
557
|
temperature?: number;
|
|
554
558
|
toolChoice?: 'auto' | 'required';
|
|
559
|
+
experimental_output?: Z;
|
|
555
560
|
} & Run;
|
|
556
561
|
type LLMStreamObjectOptions<T> = LLMInnerStreamOptions & {
|
|
557
562
|
structuredOutput: JSONSchema7 | z.ZodType<T> | StructuredOutput;
|
|
@@ -568,10 +573,10 @@ declare class MastraLLMBase extends MastraBase {
|
|
|
568
573
|
getModel(): ai.LanguageModelV1;
|
|
569
574
|
convertToMessages(messages: string | string[] | CoreMessage$1[]): CoreMessage$1[];
|
|
570
575
|
__registerPrimitives(p: MastraPrimitives): void;
|
|
571
|
-
__text(input: LLMTextOptions): Promise<GenerateTextResult<any, any>>;
|
|
576
|
+
__text<Z extends ZodSchema | JSONSchema7 | undefined>(input: LLMTextOptions<Z>): Promise<GenerateTextResult<any, any>>;
|
|
572
577
|
__textObject<T>(input: LLMTextObjectOptions<T>): Promise<GenerateObjectResult<T>>;
|
|
573
578
|
generate<Z extends ZodSchema | JSONSchema7 | undefined = undefined>(messages: string | string[] | CoreMessage$1[], options?: LLMStreamOptions<Z>): Promise<GenerateReturn<Z>>;
|
|
574
|
-
__stream(input: LLMInnerStreamOptions): Promise<StreamTextResult<any, any>>;
|
|
579
|
+
__stream<Z extends ZodSchema | JSONSchema7 | undefined = undefined>(input: LLMInnerStreamOptions<Z>): Promise<StreamTextResult<any, any>>;
|
|
575
580
|
__streamObject<T>(input: LLMStreamObjectOptions<T>): Promise<StreamObjectResult<DeepPartial<T>, T, never>>;
|
|
576
581
|
stream<Z extends ZodSchema | JSONSchema7 | undefined = undefined>(messages: string | string[] | CoreMessage$1[], options?: LLMStreamOptions<Z>): Promise<StreamReturn<Z>>;
|
|
577
582
|
}
|
|
@@ -612,7 +617,7 @@ declare class Agent<TTools extends Record<string, ToolAction<any, any, any, any>
|
|
|
612
617
|
runId?: string;
|
|
613
618
|
}): Promise<{
|
|
614
619
|
threadId: string;
|
|
615
|
-
messages:
|
|
620
|
+
messages: NonNullable<CoreMessage$1 | null>[];
|
|
616
621
|
}>;
|
|
617
622
|
saveResponse({ result, threadId, resourceId, runId, memoryConfig, }: {
|
|
618
623
|
runId: string;
|
|
@@ -659,8 +664,8 @@ declare class Agent<TTools extends Record<string, ToolAction<any, any, any, any>
|
|
|
659
664
|
outputText: string;
|
|
660
665
|
}) => Promise<void>;
|
|
661
666
|
};
|
|
662
|
-
generate<Z extends ZodSchema | JSONSchema7 | undefined = undefined>(messages: string | string[] | CoreMessage$1[], { context, threadId: threadIdInFn, memoryOptions, resourceId, maxSteps, onStepFinish, runId, toolsets, output, temperature, toolChoice, }?: AgentGenerateOptions<Z>): Promise<GenerateReturn<Z>>;
|
|
663
|
-
stream<Z extends ZodSchema | JSONSchema7 | undefined = undefined>(messages: string | string[] | CoreMessage$1[], { context, threadId: threadIdInFn, memoryOptions, resourceId, maxSteps, onFinish, onStepFinish, runId, toolsets, output, temperature, toolChoice, }?: AgentStreamOptions<Z>): Promise<StreamReturn<Z>>;
|
|
667
|
+
generate<Z extends ZodSchema | JSONSchema7 | undefined = undefined>(messages: string | string[] | CoreMessage$1[], { context, threadId: threadIdInFn, memoryOptions, resourceId, maxSteps, onStepFinish, runId, toolsets, output, temperature, toolChoice, experimental_output, }?: AgentGenerateOptions<Z>): Promise<GenerateReturn<Z>>;
|
|
668
|
+
stream<Z extends ZodSchema | JSONSchema7 | undefined = undefined>(messages: string | string[] | CoreMessage$1[], { context, threadId: threadIdInFn, memoryOptions, resourceId, maxSteps, onFinish, onStepFinish, runId, toolsets, output, temperature, toolChoice, experimental_output, }?: AgentStreamOptions<Z>): Promise<StreamReturn<Z>>;
|
|
664
669
|
/**
|
|
665
670
|
* Convert text to speech using the configured voice provider
|
|
666
671
|
* @param input Text or text stream to convert to speech
|
|
@@ -891,4 +896,4 @@ declare abstract class MastraStorage extends MastraBase {
|
|
|
891
896
|
__getEvalsByAgentName(agentName: string, type?: 'test' | 'live'): Promise<EvalRow[]>;
|
|
892
897
|
}
|
|
893
898
|
|
|
894
|
-
export { type WorkflowContext as $, Agent as A, type BaseStructuredOutputType as B, type CoreMessage as C, type LLMInnerStreamOptions as D, type EvalRow as E, type LLMStreamObjectOptions as F, type GenerateReturn as G, type MessageType as H, type IAction as I, type StorageThreadType as J, type MessageResponse as K, type LanguageModel as L, MastraStorage as M, type MemoryConfig as N, type OutputType as O, type SharedMemoryConfig as P, type CoreTool as Q, type RetryConfig as R, type StepExecutionContext as S, type ToolAction as T, type StepNode as U, type VariableReference as V, type WorkflowOptions as W, type BaseCondition as X, type ActionContext as Y, type StepDef as Z, type StepCondition as _, MastraMemory as a, type WorkflowLogMessage as a0, type WorkflowEvent as a1, type ResolverFunctionInput as a2, type ResolverFunctionOutput as a3, type SubscriberFunctionOutput as a4, type DependencyCheckOutput as a5, type WorkflowActors as a6, type WorkflowActionParams as a7, type WorkflowActions as a8, type WorkflowState as a9, type StepId as aa, type ExtractSchemaFromStep as ab, type ExtractStepResult as ac, type StepInputType as ad, type ExtractSchemaType as ae, type PathsToStringProps as af, type TABLE_NAMES as ag, TABLE_WORKFLOW_SNAPSHOT as ah, TABLE_EVALS as ai, TABLE_MESSAGES as aj, TABLE_THREADS as ak, TABLE_TRACES as al, type StepAction as b, type MastraPrimitives as c, type StepVariableType as d, type StepConfig as e, type StepResult as f, type WorkflowRunState as g, type StepGraph as h, type AgentConfig as i, type ToolExecutionContext as j, type StorageColumn as k, type WorkflowRow as l, type StorageGetMessagesArg as m, type CoreSystemMessage as n, type CoreAssistantMessage as o, type CoreUserMessage as p, type CoreToolMessage as q, type EmbedResult as r, type EmbedManyResult as s, type StructuredOutputType as t, type StructuredOutputArrayItem as u, type StructuredOutput as v, type StreamReturn as w, type
|
|
899
|
+
export { type WorkflowContext as $, Agent as A, type BaseStructuredOutputType as B, type CoreMessage as C, type LLMInnerStreamOptions as D, type EvalRow as E, type LLMStreamObjectOptions as F, type GenerateReturn as G, type MessageType as H, type IAction as I, type StorageThreadType as J, type MessageResponse as K, type LanguageModel as L, MastraStorage as M, type MemoryConfig as N, type OutputType as O, type SharedMemoryConfig as P, type CoreTool as Q, type RetryConfig as R, type StepExecutionContext as S, type ToolAction as T, type StepNode as U, type VariableReference as V, type WorkflowOptions as W, type BaseCondition as X, type ActionContext as Y, type StepDef as Z, type StepCondition as _, MastraMemory as a, type WorkflowLogMessage as a0, type WorkflowEvent as a1, type ResolverFunctionInput as a2, type ResolverFunctionOutput as a3, type SubscriberFunctionOutput as a4, type DependencyCheckOutput as a5, type WorkflowActors as a6, type WorkflowActionParams as a7, type WorkflowActions as a8, type WorkflowState as a9, type StepId as aa, type ExtractSchemaFromStep as ab, type ExtractStepResult as ac, type StepInputType as ad, type ExtractSchemaType as ae, type PathsToStringProps as af, type TABLE_NAMES as ag, TABLE_WORKFLOW_SNAPSHOT as ah, TABLE_EVALS as ai, TABLE_MESSAGES as aj, TABLE_THREADS as ak, TABLE_TRACES as al, type StepAction as b, type MastraPrimitives as c, type StepVariableType as d, type StepConfig as e, type StepResult as f, type WorkflowRunState as g, type StepGraph as h, type AgentConfig as i, type ToolExecutionContext as j, type StorageColumn as k, type WorkflowRow as l, type StorageGetMessagesArg as m, type CoreSystemMessage as n, type CoreAssistantMessage as o, type CoreUserMessage as p, type CoreToolMessage as q, type EmbedResult as r, type EmbedManyResult as s, type StructuredOutputType as t, type StructuredOutputArrayItem as u, type StructuredOutput as v, type StreamReturn as w, type LLMTextOptions as x, type LLMTextObjectOptions as y, type LLMStreamOptions as z };
|
|
@@ -6,7 +6,7 @@ import { executeHook } from './chunk-BB4KXGBU.js';
|
|
|
6
6
|
import { __decoratorStart, __decorateElement, __runInitializers } from './chunk-C6A6W6XS.js';
|
|
7
7
|
import { randomUUID } from 'crypto';
|
|
8
8
|
import { z } from 'zod';
|
|
9
|
-
import { generateText,
|
|
9
|
+
import { jsonSchema, generateText, Output, generateObject, streamText, streamObject } from 'ai';
|
|
10
10
|
|
|
11
11
|
// src/llm/model/base.ts
|
|
12
12
|
var MastraLLMBase = class extends MastraBase {
|
|
@@ -181,7 +181,8 @@ var MastraLLM = class extends MastraLLMBase {
|
|
|
181
181
|
convertedTools,
|
|
182
182
|
temperature,
|
|
183
183
|
toolChoice = "auto",
|
|
184
|
-
onStepFinish
|
|
184
|
+
onStepFinish,
|
|
185
|
+
experimental_output
|
|
185
186
|
}) {
|
|
186
187
|
const model = this.#model;
|
|
187
188
|
this.logger.debug(`[LLM] - Generating text`, {
|
|
@@ -213,15 +214,34 @@ var MastraLLM = class extends MastraLLMBase {
|
|
|
213
214
|
runId
|
|
214
215
|
});
|
|
215
216
|
if (props?.response?.headers?.["x-ratelimit-remaining-tokens"] && parseInt(props?.response?.headers?.["x-ratelimit-remaining-tokens"], 10) < 2e3) {
|
|
216
|
-
this.logger.warn("Rate limit approaching, waiting 10 seconds"
|
|
217
|
+
this.logger.warn("Rate limit approaching, waiting 10 seconds", {
|
|
218
|
+
runId
|
|
219
|
+
});
|
|
217
220
|
await delay(10 * 1e3);
|
|
218
221
|
}
|
|
219
222
|
}
|
|
220
223
|
};
|
|
224
|
+
let schema;
|
|
225
|
+
if (experimental_output) {
|
|
226
|
+
this.logger.debug("[LLM] - Using experimental output", {
|
|
227
|
+
runId
|
|
228
|
+
});
|
|
229
|
+
if (typeof experimental_output.parse === "function") {
|
|
230
|
+
schema = experimental_output;
|
|
231
|
+
if (schema instanceof z.ZodArray) {
|
|
232
|
+
schema = schema._def.type;
|
|
233
|
+
}
|
|
234
|
+
} else {
|
|
235
|
+
schema = jsonSchema(experimental_output);
|
|
236
|
+
}
|
|
237
|
+
}
|
|
221
238
|
return await generateText({
|
|
222
239
|
messages,
|
|
223
240
|
...argsForExecute,
|
|
224
|
-
experimental_telemetry: this.experimental_telemetry
|
|
241
|
+
experimental_telemetry: this.experimental_telemetry,
|
|
242
|
+
experimental_output: schema ? Output.object({
|
|
243
|
+
schema
|
|
244
|
+
}) : void 0
|
|
225
245
|
});
|
|
226
246
|
}
|
|
227
247
|
async __textObject({
|
|
@@ -297,7 +317,8 @@ var MastraLLM = class extends MastraLLMBase {
|
|
|
297
317
|
convertedTools,
|
|
298
318
|
runId,
|
|
299
319
|
temperature,
|
|
300
|
-
toolChoice = "auto"
|
|
320
|
+
toolChoice = "auto",
|
|
321
|
+
experimental_output
|
|
301
322
|
}) {
|
|
302
323
|
const model = this.#model;
|
|
303
324
|
this.logger.debug(`[LLM] - Streaming text`, {
|
|
@@ -347,10 +368,27 @@ var MastraLLM = class extends MastraLLMBase {
|
|
|
347
368
|
});
|
|
348
369
|
}
|
|
349
370
|
};
|
|
371
|
+
let schema;
|
|
372
|
+
if (experimental_output) {
|
|
373
|
+
this.logger.debug("[LLM] - Using experimental output", {
|
|
374
|
+
runId
|
|
375
|
+
});
|
|
376
|
+
if (typeof experimental_output.parse === "function") {
|
|
377
|
+
schema = experimental_output;
|
|
378
|
+
if (schema instanceof z.ZodArray) {
|
|
379
|
+
schema = schema._def.type;
|
|
380
|
+
}
|
|
381
|
+
} else {
|
|
382
|
+
schema = jsonSchema(experimental_output);
|
|
383
|
+
}
|
|
384
|
+
}
|
|
350
385
|
return await streamText({
|
|
351
386
|
messages,
|
|
352
387
|
...argsForExecute,
|
|
353
|
-
experimental_telemetry: this.experimental_telemetry
|
|
388
|
+
experimental_telemetry: this.experimental_telemetry,
|
|
389
|
+
experimental_output: schema ? Output.object({
|
|
390
|
+
schema
|
|
391
|
+
}) : void 0
|
|
354
392
|
});
|
|
355
393
|
}
|
|
356
394
|
async __streamObject({
|
|
@@ -712,24 +750,10 @@ var Agent = class extends (_a = MastraBase) {
|
|
|
712
750
|
}) : null;
|
|
713
751
|
return {
|
|
714
752
|
threadId: thread.id,
|
|
715
|
-
messages: [{
|
|
753
|
+
messages: [memorySystemMessage ? {
|
|
716
754
|
role: "system",
|
|
717
|
-
content:
|
|
718
|
-
|
|
719
|
-
Analyze this message to determine if the user is referring to a previous conversation with the LLM.
|
|
720
|
-
Specifically, identify if the user wants to reference specific information from that chat or if they want the LLM to use the previous chat messages as context for the current conversation.
|
|
721
|
-
Extract any date ranges mentioned in the user message that could help identify the previous chat.
|
|
722
|
-
Return dates in ISO format.
|
|
723
|
-
If no specific dates are mentioned but time periods are (like "last week" or "past month"), calculate the appropriate date range.
|
|
724
|
-
For the end date, return the date 1 day after the end of the time period.
|
|
725
|
-
Today's date is ${(/* @__PURE__ */new Date()).toISOString()} and the time is ${(/* @__PURE__ */new Date()).toLocaleTimeString("en-US", {
|
|
726
|
-
hour: "numeric",
|
|
727
|
-
minute: "numeric",
|
|
728
|
-
hour12: true
|
|
729
|
-
})} ${memorySystemMessage ? `
|
|
730
|
-
|
|
731
|
-
${memorySystemMessage}` : ""}`
|
|
732
|
-
}, ...this.sanitizeResponseMessages(memoryMessages), ...newMessages]
|
|
755
|
+
content: memorySystemMessage
|
|
756
|
+
} : null, ...this.sanitizeResponseMessages(memoryMessages), ...newMessages].filter(message => Boolean(message))
|
|
733
757
|
};
|
|
734
758
|
}
|
|
735
759
|
return {
|
|
@@ -994,7 +1018,7 @@ ${memorySystemMessage}` : ""}`
|
|
|
994
1018
|
}
|
|
995
1019
|
const systemMessage = {
|
|
996
1020
|
role: "system",
|
|
997
|
-
content: `${this.instructions}
|
|
1021
|
+
content: `${this.instructions}.`
|
|
998
1022
|
};
|
|
999
1023
|
let coreMessages = messages;
|
|
1000
1024
|
let threadIdToUse = threadId;
|
|
@@ -1118,7 +1142,8 @@ ${memorySystemMessage}` : ""}`
|
|
|
1118
1142
|
toolsets,
|
|
1119
1143
|
output = "text",
|
|
1120
1144
|
temperature,
|
|
1121
|
-
toolChoice = "auto"
|
|
1145
|
+
toolChoice = "auto",
|
|
1146
|
+
experimental_output
|
|
1122
1147
|
} = {}) {
|
|
1123
1148
|
let messagesToUse = [];
|
|
1124
1149
|
if (typeof messages === `string`) {
|
|
@@ -1155,6 +1180,30 @@ ${memorySystemMessage}` : ""}`
|
|
|
1155
1180
|
messageObjects,
|
|
1156
1181
|
convertedTools
|
|
1157
1182
|
} = await before();
|
|
1183
|
+
if (output === "text" && experimental_output) {
|
|
1184
|
+
const result2 = await this.llm.__text({
|
|
1185
|
+
messages: messageObjects,
|
|
1186
|
+
tools: this.tools,
|
|
1187
|
+
convertedTools,
|
|
1188
|
+
onStepFinish,
|
|
1189
|
+
maxSteps,
|
|
1190
|
+
runId: runIdToUse,
|
|
1191
|
+
temperature,
|
|
1192
|
+
toolChoice,
|
|
1193
|
+
experimental_output
|
|
1194
|
+
});
|
|
1195
|
+
const outputText2 = result2.text;
|
|
1196
|
+
await after({
|
|
1197
|
+
result: result2,
|
|
1198
|
+
threadId,
|
|
1199
|
+
memoryConfig: memoryOptions,
|
|
1200
|
+
outputText: outputText2,
|
|
1201
|
+
runId: runIdToUse
|
|
1202
|
+
});
|
|
1203
|
+
const newResult = result2;
|
|
1204
|
+
newResult.object = result2.experimental_output;
|
|
1205
|
+
return newResult;
|
|
1206
|
+
}
|
|
1158
1207
|
if (output === "text") {
|
|
1159
1208
|
const result2 = await this.llm.__text({
|
|
1160
1209
|
messages: messageObjects,
|
|
@@ -1209,7 +1258,8 @@ ${memorySystemMessage}` : ""}`
|
|
|
1209
1258
|
toolsets,
|
|
1210
1259
|
output = "text",
|
|
1211
1260
|
temperature,
|
|
1212
|
-
toolChoice = "auto"
|
|
1261
|
+
toolChoice = "auto",
|
|
1262
|
+
experimental_output
|
|
1213
1263
|
} = {}) {
|
|
1214
1264
|
const runIdToUse = runId || randomUUID();
|
|
1215
1265
|
let messagesToUse = [];
|
|
@@ -1246,7 +1296,44 @@ ${memorySystemMessage}` : ""}`
|
|
|
1246
1296
|
messageObjects,
|
|
1247
1297
|
convertedTools
|
|
1248
1298
|
} = await before();
|
|
1249
|
-
if (output === "text") {
|
|
1299
|
+
if (output === "text" && experimental_output) {
|
|
1300
|
+
this.logger.debug(`Starting agent ${this.name} llm stream call`, {
|
|
1301
|
+
runId
|
|
1302
|
+
});
|
|
1303
|
+
const streamResult = await this.llm.__stream({
|
|
1304
|
+
messages: messageObjects,
|
|
1305
|
+
temperature,
|
|
1306
|
+
tools: this.tools,
|
|
1307
|
+
convertedTools,
|
|
1308
|
+
onStepFinish,
|
|
1309
|
+
onFinish: async result => {
|
|
1310
|
+
try {
|
|
1311
|
+
const res = JSON.parse(result) || {};
|
|
1312
|
+
const outputText = res.text;
|
|
1313
|
+
await after({
|
|
1314
|
+
result: res,
|
|
1315
|
+
threadId,
|
|
1316
|
+
memoryConfig: memoryOptions,
|
|
1317
|
+
outputText,
|
|
1318
|
+
runId: runIdToUse
|
|
1319
|
+
});
|
|
1320
|
+
} catch (e) {
|
|
1321
|
+
this.logger.error("Error saving memory on finish", {
|
|
1322
|
+
error: e,
|
|
1323
|
+
runId
|
|
1324
|
+
});
|
|
1325
|
+
}
|
|
1326
|
+
onFinish?.(result);
|
|
1327
|
+
},
|
|
1328
|
+
maxSteps,
|
|
1329
|
+
runId: runIdToUse,
|
|
1330
|
+
toolChoice,
|
|
1331
|
+
experimental_output
|
|
1332
|
+
});
|
|
1333
|
+
const newStreamResult = streamResult;
|
|
1334
|
+
newStreamResult.partialObjectStream = streamResult.experimental_partialOutputStream;
|
|
1335
|
+
return newStreamResult;
|
|
1336
|
+
} else if (output === "text") {
|
|
1250
1337
|
this.logger.debug(`Starting agent ${this.name} llm stream call`, {
|
|
1251
1338
|
runId
|
|
1252
1339
|
});
|
package/dist/eval/index.d.ts
CHANGED
package/dist/index.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { a as Metric } from './types-m9RryK9a.js';
|
|
2
2
|
export { M as MetricResult, T as TestInfo } from './types-m9RryK9a.js';
|
|
3
|
-
import { T as ToolAction, A as Agent$1, i as AgentConfig, M as MastraStorage$1, a as MastraMemory$1, j as ToolExecutionContext, W as WorkflowOptions } from './base-
|
|
4
|
-
export { Y as ActionContext, X as BaseCondition, B as BaseStructuredOutputType, o as CoreAssistantMessage, C as CoreMessage, n as CoreSystemMessage, Q as CoreTool, q as CoreToolMessage, p as CoreUserMessage, a5 as DependencyCheckOutput, s as EmbedManyResult, r as EmbedResult, E as EvalRow, ab as ExtractSchemaFromStep, ae as ExtractSchemaType, ac as ExtractStepResult, G as GenerateReturn, D as LLMInnerStreamOptions, F as LLMStreamObjectOptions,
|
|
3
|
+
import { T as ToolAction, A as Agent$1, i as AgentConfig, M as MastraStorage$1, a as MastraMemory$1, j as ToolExecutionContext, W as WorkflowOptions } from './base-HowPMmsb.js';
|
|
4
|
+
export { Y as ActionContext, X as BaseCondition, B as BaseStructuredOutputType, o as CoreAssistantMessage, C as CoreMessage, n as CoreSystemMessage, Q as CoreTool, q as CoreToolMessage, p as CoreUserMessage, a5 as DependencyCheckOutput, s as EmbedManyResult, r as EmbedResult, E as EvalRow, ab as ExtractSchemaFromStep, ae as ExtractSchemaType, ac as ExtractStepResult, G as GenerateReturn, D as LLMInnerStreamOptions, F as LLMStreamObjectOptions, z as LLMStreamOptions, y as LLMTextObjectOptions, x as LLMTextOptions, L as LanguageModel, N as MemoryConfig, K as MessageResponse, H as MessageType, O as OutputType, af as PathsToStringProps, a2 as ResolverFunctionInput, a3 as ResolverFunctionOutput, R as RetryConfig, P as SharedMemoryConfig, b as StepAction, _ as StepCondition, e as StepConfig, Z as StepDef, S as StepExecutionContext, h as StepGraph, aa as StepId, ad as StepInputType, U as StepNode, f as StepResult, d as StepVariableType, k as StorageColumn, m as StorageGetMessagesArg, J as StorageThreadType, w as StreamReturn, v as StructuredOutput, u as StructuredOutputArrayItem, t as StructuredOutputType, a4 as SubscriberFunctionOutput, V as VariableReference, a7 as WorkflowActionParams, a8 as WorkflowActions, a6 as WorkflowActors, $ as WorkflowContext, a1 as WorkflowEvent, a0 as WorkflowLogMessage, l as WorkflowRow, g as WorkflowRunState, a9 as WorkflowState } from './base-HowPMmsb.js';
|
|
5
5
|
import { M as MastraBase$1 } from './base-eWkcLLSb.js';
|
|
6
6
|
export { O as OtelConfig, S as SamplingStrategy, T as Telemetry } from './base-eWkcLLSb.js';
|
|
7
7
|
import { R as RegisteredLogger, a as LogLevel, T as TransportMap, L as Logger } from './index-C5uPdbs4.js';
|
|
@@ -20,8 +20,8 @@ import { MastraTTS as MastraTTS$1, TTSConfig } from './tts/index.js';
|
|
|
20
20
|
export { TagMaskOptions, deepMerge, delay, jsonSchemaPropertiesToTSTypes, jsonSchemaToModel, maskStreamTags } from './utils.js';
|
|
21
21
|
import { MastraVector as MastraVector$1 } from './vector/index.js';
|
|
22
22
|
export { IndexStats, QueryResult, defaultEmbedder } from './vector/index.js';
|
|
23
|
-
import { S as Step, W as Workflow$1 } from './workflow-
|
|
24
|
-
export { c as createStep } from './workflow-
|
|
23
|
+
import { S as Step, W as Workflow$1 } from './workflow-CPz2D0Wz.js';
|
|
24
|
+
export { c as createStep } from './workflow-CPz2D0Wz.js';
|
|
25
25
|
export { getStepResult, isErrorEvent, isTransitionEvent, isVariableReference } from './workflows/index.js';
|
|
26
26
|
export { AvailableHooks, executeHook, registerHook } from './hooks/index.js';
|
|
27
27
|
export { ArrayOperator, BaseFilterTranslator, BasicOperator, ElementOperator, FieldCondition, Filter, LogicalOperator, NumericOperator, OperatorCondition, OperatorSupport, QueryOperator, RegexOperator } from './filter/index.js';
|
package/dist/index.js
CHANGED
|
@@ -6,13 +6,13 @@ import { Tool } from './chunk-ZINPRHAN.js';
|
|
|
6
6
|
export { createTool } from './chunk-ZINPRHAN.js';
|
|
7
7
|
export { Mastra } from './chunk-HPIB5X7E.js';
|
|
8
8
|
import { MastraMemory } from './chunk-XD7K4XPP.js';
|
|
9
|
-
export { CohereRelevanceScorer, MastraAgentRelevanceScorer, createSimilarityPrompt } from './chunk-
|
|
9
|
+
export { CohereRelevanceScorer, MastraAgentRelevanceScorer, createSimilarityPrompt } from './chunk-VGVST36A.js';
|
|
10
10
|
import { MastraStorage, DefaultStorage } from './chunk-JIV6PDIN.js';
|
|
11
11
|
export { DefaultStorage } from './chunk-JIV6PDIN.js';
|
|
12
12
|
export { DefaultVectorDB, DefaultVectorDB as LibSQLVector } from './chunk-LKOVXFLE.js';
|
|
13
13
|
import { MastraVector } from './chunk-7NPRQT5A.js';
|
|
14
14
|
export { defaultEmbedder } from './chunk-7NPRQT5A.js';
|
|
15
|
-
import { Agent } from './chunk-
|
|
15
|
+
import { Agent } from './chunk-T5B2DATI.js';
|
|
16
16
|
export { InstrumentClass, OTLPTraceExporter as OTLPStorageExporter, Telemetry, hasActiveTelemetry, withSpan } from './chunk-4ZRHVG25.js';
|
|
17
17
|
export { deepMerge, delay, jsonSchemaPropertiesToTSTypes, jsonSchemaToModel, maskStreamTags } from './chunk-WIBGG4X6.js';
|
|
18
18
|
import { MastraDeployer } from './chunk-Z735LDV7.js';
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { W as Workflow } from '../workflow-
|
|
1
|
+
import { W as Workflow } from '../workflow-CPz2D0Wz.js';
|
|
2
2
|
import '../base-eWkcLLSb.js';
|
|
3
|
-
import { T as ToolAction } from '../base-
|
|
3
|
+
import { T as ToolAction } from '../base-HowPMmsb.js';
|
|
4
4
|
import 'xstate';
|
|
5
5
|
import 'zod';
|
|
6
6
|
import '@opentelemetry/api';
|
package/dist/llm/index.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import 'ai';
|
|
2
2
|
import 'json-schema';
|
|
3
3
|
import 'zod';
|
|
4
|
-
export { B as BaseStructuredOutputType, o as CoreAssistantMessage, C as CoreMessage, n as CoreSystemMessage, q as CoreToolMessage, p as CoreUserMessage, s as EmbedManyResult, r as EmbedResult, G as GenerateReturn, D as LLMInnerStreamOptions, F as LLMStreamObjectOptions,
|
|
4
|
+
export { B as BaseStructuredOutputType, o as CoreAssistantMessage, C as CoreMessage, n as CoreSystemMessage, q as CoreToolMessage, p as CoreUserMessage, s as EmbedManyResult, r as EmbedResult, G as GenerateReturn, D as LLMInnerStreamOptions, F as LLMStreamObjectOptions, z as LLMStreamOptions, y as LLMTextObjectOptions, x as LLMTextOptions, L as LanguageModel, O as OutputType, w as StreamReturn, v as StructuredOutput, u as StructuredOutputArrayItem, t as StructuredOutputType } from '../base-HowPMmsb.js';
|
|
5
5
|
import '../index-C5uPdbs4.js';
|
|
6
6
|
import '../base-eWkcLLSb.js';
|
|
7
7
|
import '@opentelemetry/api';
|
package/dist/mastra/index.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { A as Agent, M as MastraStorage, a as MastraMemory } from '../base-
|
|
1
|
+
import { A as Agent, M as MastraStorage, a as MastraMemory } from '../base-HowPMmsb.js';
|
|
2
2
|
import { L as Logger, B as BaseLogMessage } from '../index-C5uPdbs4.js';
|
|
3
|
-
import { W as Workflow } from '../workflow-
|
|
3
|
+
import { W as Workflow } from '../workflow-CPz2D0Wz.js';
|
|
4
4
|
import { MastraVector } from '../vector/index.js';
|
|
5
5
|
import { O as OtelConfig, T as Telemetry } from '../base-eWkcLLSb.js';
|
|
6
6
|
import { MastraTTS } from '../tts/index.js';
|
package/dist/memory/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export { a as MastraMemory, N as MemoryConfig, K as MessageResponse, H as MessageType, P as SharedMemoryConfig, J as StorageThreadType } from '../base-
|
|
1
|
+
export { a as MastraMemory, N as MemoryConfig, K as MessageResponse, H as MessageType, P as SharedMemoryConfig, J as StorageThreadType } from '../base-HowPMmsb.js';
|
|
2
2
|
export { Message as AiMessageType } from 'ai';
|
|
3
3
|
import '../base-eWkcLLSb.js';
|
|
4
4
|
import '@opentelemetry/api';
|
package/dist/relevance/index.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export { CohereRelevanceScorer, MastraAgentRelevanceScorer, createSimilarityPrompt } from '../chunk-
|
|
1
|
+
export { CohereRelevanceScorer, MastraAgentRelevanceScorer, createSimilarityPrompt } from '../chunk-VGVST36A.js';
|
package/dist/storage/index.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { M as MastraStorage, ag as TABLE_NAMES, k as StorageColumn, J as StorageThreadType, H as MessageType, m as StorageGetMessagesArg, E as EvalRow } from '../base-
|
|
2
|
-
export { ai as TABLE_EVALS, aj as TABLE_MESSAGES, ak as TABLE_THREADS, al as TABLE_TRACES, ah as TABLE_WORKFLOW_SNAPSHOT, l as WorkflowRow } from '../base-
|
|
1
|
+
import { M as MastraStorage, ag as TABLE_NAMES, k as StorageColumn, J as StorageThreadType, H as MessageType, m as StorageGetMessagesArg, E as EvalRow } from '../base-HowPMmsb.js';
|
|
2
|
+
export { ai as TABLE_EVALS, aj as TABLE_MESSAGES, ak as TABLE_THREADS, al as TABLE_TRACES, ah as TABLE_WORKFLOW_SNAPSHOT, l as WorkflowRow } from '../base-HowPMmsb.js';
|
|
3
3
|
import '../base-eWkcLLSb.js';
|
|
4
4
|
export { LibSQLVector as DefaultVectorDB, LibSQLVector } from '../vector/libsql/index.js';
|
|
5
5
|
import 'ai';
|
|
@@ -3,7 +3,7 @@ import { SpanKind } from '@opentelemetry/api';
|
|
|
3
3
|
import { ExportResult } from '@opentelemetry/core';
|
|
4
4
|
import { SpanExporter, ReadableSpan } from '@opentelemetry/sdk-trace-base';
|
|
5
5
|
import { L as Logger } from '../index-C5uPdbs4.js';
|
|
6
|
-
import { M as MastraStorage } from '../base-
|
|
6
|
+
import { M as MastraStorage } from '../base-HowPMmsb.js';
|
|
7
7
|
import 'pino';
|
|
8
8
|
import 'stream';
|
|
9
9
|
import 'ai';
|
package/dist/tools/index.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
-
import { j as ToolExecutionContext, T as ToolAction, c as MastraPrimitives } from '../base-
|
|
3
|
-
export { Q as CoreTool } from '../base-
|
|
2
|
+
import { j as ToolExecutionContext, T as ToolAction, c as MastraPrimitives } from '../base-HowPMmsb.js';
|
|
3
|
+
export { Q as CoreTool } from '../base-HowPMmsb.js';
|
|
4
4
|
import 'ai';
|
|
5
5
|
import '../base-eWkcLLSb.js';
|
|
6
6
|
import '@opentelemetry/api';
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { Snapshot } from 'xstate';
|
|
2
2
|
import { z } from 'zod';
|
|
3
|
-
import { S as StepExecutionContext, b as StepAction, R as RetryConfig, c as MastraPrimitives, W as WorkflowOptions, I as IAction, d as StepVariableType, e as StepConfig, f as StepResult, g as WorkflowRunState, h as StepGraph } from './base-
|
|
3
|
+
import { S as StepExecutionContext, b as StepAction, R as RetryConfig, c as MastraPrimitives, W as WorkflowOptions, I as IAction, d as StepVariableType, e as StepConfig, f as StepResult, g as WorkflowRunState, h as StepGraph } from './base-HowPMmsb.js';
|
|
4
4
|
import { M as MastraBase } from './base-eWkcLLSb.js';
|
|
5
5
|
|
|
6
6
|
declare class Step<TStepId extends string = any, TSchemaIn extends z.ZodSchema | undefined = undefined, TSchemaOut extends z.ZodSchema | undefined = undefined, TContext extends StepExecutionContext<TSchemaIn> = StepExecutionContext<TSchemaIn>> implements StepAction<TStepId, TSchemaIn, TSchemaOut, TContext> {
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
export { S as Step, W as Workflow, c as createStep } from '../workflow-
|
|
2
|
-
import { V as VariableReference, f as StepResult } from '../base-
|
|
3
|
-
export { Y as ActionContext, X as BaseCondition, a5 as DependencyCheckOutput, ab as ExtractSchemaFromStep, ae as ExtractSchemaType, ac as ExtractStepResult, af as PathsToStringProps, a2 as ResolverFunctionInput, a3 as ResolverFunctionOutput, R as RetryConfig, b as StepAction, _ as StepCondition, e as StepConfig, Z as StepDef, S as StepExecutionContext, h as StepGraph, aa as StepId, ad as StepInputType, U as StepNode, d as StepVariableType, a4 as SubscriberFunctionOutput, a7 as WorkflowActionParams, a8 as WorkflowActions, a6 as WorkflowActors, $ as WorkflowContext, a1 as WorkflowEvent, a0 as WorkflowLogMessage, W as WorkflowOptions, g as WorkflowRunState, a9 as WorkflowState } from '../base-
|
|
1
|
+
export { S as Step, W as Workflow, c as createStep } from '../workflow-CPz2D0Wz.js';
|
|
2
|
+
import { V as VariableReference, f as StepResult } from '../base-HowPMmsb.js';
|
|
3
|
+
export { Y as ActionContext, X as BaseCondition, a5 as DependencyCheckOutput, ab as ExtractSchemaFromStep, ae as ExtractSchemaType, ac as ExtractStepResult, af as PathsToStringProps, a2 as ResolverFunctionInput, a3 as ResolverFunctionOutput, R as RetryConfig, b as StepAction, _ as StepCondition, e as StepConfig, Z as StepDef, S as StepExecutionContext, h as StepGraph, aa as StepId, ad as StepInputType, U as StepNode, d as StepVariableType, a4 as SubscriberFunctionOutput, a7 as WorkflowActionParams, a8 as WorkflowActions, a6 as WorkflowActors, $ as WorkflowContext, a1 as WorkflowEvent, a0 as WorkflowLogMessage, W as WorkflowOptions, g as WorkflowRunState, a9 as WorkflowState } from '../base-HowPMmsb.js';
|
|
4
4
|
import 'xstate';
|
|
5
5
|
import 'zod';
|
|
6
6
|
import '../base-eWkcLLSb.js';
|