@mastra/rag 0.1.19-alpha.2 → 0.1.19-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,40 @@
1
+ import { createOpenAI } from '@ai-sdk/openai';
2
+ import type { MastraLanguageModel } from '@mastra/core/agent';
3
+ import type {
4
+ KeywordExtractPrompt,
5
+ QuestionExtractPrompt,
6
+ SummaryPrompt,
7
+ TitleExtractorPrompt,
8
+ TitleCombinePrompt,
9
+ } from 'llamaindex';
10
+
11
+ export type KeywordExtractArgs = {
12
+ llm?: MastraLanguageModel;
13
+ keywords?: number;
14
+ promptTemplate?: KeywordExtractPrompt['template'];
15
+ };
16
+
17
+ export type QuestionAnswerExtractArgs = {
18
+ llm?: MastraLanguageModel;
19
+ questions?: number;
20
+ promptTemplate?: QuestionExtractPrompt['template'];
21
+ embeddingOnly?: boolean;
22
+ };
23
+
24
+ export type SummaryExtractArgs = {
25
+ llm?: MastraLanguageModel;
26
+ summaries?: string[];
27
+ promptTemplate?: SummaryPrompt['template'];
28
+ };
29
+
30
+ export type TitleExtractorsArgs = {
31
+ llm?: MastraLanguageModel;
32
+ nodes?: number;
33
+ nodeTemplate?: TitleExtractorPrompt['template'];
34
+ combineTemplate?: TitleCombinePrompt['template'];
35
+ };
36
+
37
+ export const STRIP_REGEX = /(\r\n|\n|\r)/gm;
38
+
39
+ const openai = createOpenAI({ apiKey: process.env.OPENAI_API_KEY });
40
+ export const baseLLM: MastraLanguageModel = openai('gpt-4o');
@@ -1,12 +1,10 @@
1
1
  import type { TiktokenEncoding, TiktokenModel } from 'js-tiktoken';
2
2
  import type {
3
- LLM,
4
- TitleCombinePrompt,
5
- TitleExtractorPrompt,
6
- SummaryPrompt,
7
- QuestionExtractPrompt,
8
- KeywordExtractPrompt,
9
- } from 'llamaindex';
3
+ TitleExtractorsArgs,
4
+ SummaryExtractArgs,
5
+ QuestionAnswerExtractArgs,
6
+ KeywordExtractArgs,
7
+ } from './extractors';
10
8
 
11
9
  export enum Language {
12
10
  CPP = 'cpp',
@@ -69,32 +67,6 @@ export type ChunkOptions = {
69
67
  stripHeaders?: boolean;
70
68
  };
71
69
 
72
- export type TitleExtractorsArgs = {
73
- llm?: LLM;
74
- nodes?: number;
75
- nodeTemplate?: TitleExtractorPrompt['template'];
76
- combineTemplate?: TitleCombinePrompt['template'];
77
- };
78
-
79
- export type SummaryExtractArgs = {
80
- llm?: LLM;
81
- summaries?: string[];
82
- promptTemplate?: SummaryPrompt['template'];
83
- };
84
-
85
- export type QuestionAnswerExtractArgs = {
86
- llm?: LLM;
87
- questions?: number;
88
- promptTemplate?: QuestionExtractPrompt['template'];
89
- embeddingOnly?: boolean;
90
- };
91
-
92
- export type KeywordExtractArgs = {
93
- llm?: LLM;
94
- keywords?: number;
95
- promptTemplate?: KeywordExtractPrompt['template'];
96
- };
97
-
98
70
  export type ChunkStrategy = 'recursive' | 'character' | 'token' | 'markdown' | 'html' | 'json' | 'latex';
99
71
 
100
72
  export interface ChunkParams extends ChunkOptions {