@eventcatalog/core 2.37.4 → 2.38.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/analytics/analytics.cjs +1 -1
- package/dist/analytics/analytics.js +2 -2
- package/dist/analytics/log-build.cjs +1 -1
- package/dist/analytics/log-build.js +3 -3
- package/dist/{chunk-3K3YK3GK.js → chunk-A6PSUFM3.js} +1 -1
- package/dist/{chunk-GJX7CKPB.js → chunk-C45PRE4R.js} +1 -1
- package/dist/{chunk-NRW4AZMT.js → chunk-CM25PJCS.js} +1 -1
- package/dist/constants.cjs +1 -1
- package/dist/constants.js +1 -1
- package/dist/eventcatalog.cjs +1 -1
- package/dist/eventcatalog.config.d.cts +6 -0
- package/dist/eventcatalog.config.d.ts +6 -0
- package/dist/eventcatalog.js +3 -3
- package/eventcatalog/src/components/MDX/NodeGraph/NodeGraph.astro +30 -1
- package/eventcatalog/src/components/MDX/NodeGraph/NodeGraph.tsx +32 -2
- package/eventcatalog/src/enterprise/eventcatalog-chat/components/windows/ChatWindow.server.tsx +5 -2
- package/eventcatalog/src/enterprise/eventcatalog-chat/providers/ai-provider.ts +58 -0
- package/eventcatalog/src/enterprise/eventcatalog-chat/providers/anthropic.ts +28 -0
- package/eventcatalog/src/enterprise/eventcatalog-chat/providers/google.ts +41 -0
- package/eventcatalog/src/enterprise/eventcatalog-chat/providers/index.ts +26 -0
- package/eventcatalog/src/enterprise/eventcatalog-chat/providers/openai.ts +61 -0
- package/eventcatalog/src/enterprise/eventcatalog-chat/utils/ai.ts +19 -61
- package/eventcatalog/src/utils/protocols.tsx +1 -1
- package/package.json +3 -1
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import {
|
|
2
2
|
log_build_default
|
|
3
|
-
} from "../chunk-
|
|
4
|
-
import "../chunk-
|
|
5
|
-
import "../chunk-
|
|
3
|
+
} from "../chunk-C45PRE4R.js";
|
|
4
|
+
import "../chunk-A6PSUFM3.js";
|
|
5
|
+
import "../chunk-CM25PJCS.js";
|
|
6
6
|
import "../chunk-E7TXTI7G.js";
|
|
7
7
|
export {
|
|
8
8
|
log_build_default as default
|
package/dist/constants.cjs
CHANGED
package/dist/constants.js
CHANGED
package/dist/eventcatalog.cjs
CHANGED
|
@@ -75,9 +75,15 @@ interface Config {
|
|
|
75
75
|
};
|
|
76
76
|
chat?: {
|
|
77
77
|
enabled: boolean;
|
|
78
|
+
provider?: 'openai' | 'anthropic' | 'google';
|
|
78
79
|
model?: string;
|
|
79
80
|
max_tokens?: number;
|
|
80
81
|
similarityResults?: number;
|
|
82
|
+
temperature?: number;
|
|
83
|
+
topP?: number;
|
|
84
|
+
topK?: number;
|
|
85
|
+
frequencyPenalty?: number;
|
|
86
|
+
presencePenalty?: number;
|
|
81
87
|
};
|
|
82
88
|
customDocs?: {
|
|
83
89
|
sidebar?: (ManualSideBarConfig | AutoGeneratedSideBarConfig)[];
|
|
@@ -75,9 +75,15 @@ interface Config {
|
|
|
75
75
|
};
|
|
76
76
|
chat?: {
|
|
77
77
|
enabled: boolean;
|
|
78
|
+
provider?: 'openai' | 'anthropic' | 'google';
|
|
78
79
|
model?: string;
|
|
79
80
|
max_tokens?: number;
|
|
80
81
|
similarityResults?: number;
|
|
82
|
+
temperature?: number;
|
|
83
|
+
topP?: number;
|
|
84
|
+
topK?: number;
|
|
85
|
+
frequencyPenalty?: number;
|
|
86
|
+
presencePenalty?: number;
|
|
81
87
|
};
|
|
82
88
|
customDocs?: {
|
|
83
89
|
sidebar?: (ManualSideBarConfig | AutoGeneratedSideBarConfig)[];
|
package/dist/eventcatalog.js
CHANGED
|
@@ -6,15 +6,15 @@ import {
|
|
|
6
6
|
} from "./chunk-DCLTVJDP.js";
|
|
7
7
|
import {
|
|
8
8
|
log_build_default
|
|
9
|
-
} from "./chunk-
|
|
10
|
-
import "./chunk-
|
|
9
|
+
} from "./chunk-C45PRE4R.js";
|
|
10
|
+
import "./chunk-A6PSUFM3.js";
|
|
11
11
|
import {
|
|
12
12
|
catalogToAstro,
|
|
13
13
|
checkAndConvertMdToMdx
|
|
14
14
|
} from "./chunk-SLEMYHTU.js";
|
|
15
15
|
import {
|
|
16
16
|
VERSION
|
|
17
|
-
} from "./chunk-
|
|
17
|
+
} from "./chunk-CM25PJCS.js";
|
|
18
18
|
import {
|
|
19
19
|
isBackstagePluginEnabled,
|
|
20
20
|
isEventCatalogScaleEnabled,
|
|
@@ -11,7 +11,9 @@ import {
|
|
|
11
11
|
getNodesAndEdgesForDomainContextMap,
|
|
12
12
|
} from '@utils/node-graphs/domains-node-graph';
|
|
13
13
|
import { getNodesAndEdges as getNodesAndEdgesForFlows } from '@utils/node-graphs/flows-node-graph';
|
|
14
|
-
|
|
14
|
+
import { buildUrl } from '@utils/url-builder';
|
|
15
|
+
import { getVersionFromCollection } from '@utils/collections/versions';
|
|
16
|
+
import { pageDataLoader } from '@utils/page-loaders/page-data-loader';
|
|
15
17
|
interface Props {
|
|
16
18
|
id: string;
|
|
17
19
|
collection: string;
|
|
@@ -40,6 +42,8 @@ const getNodesAndEdgesFunctions = {
|
|
|
40
42
|
flows: getNodesAndEdgesForFlows,
|
|
41
43
|
};
|
|
42
44
|
|
|
45
|
+
let links: { label: string; url: string }[] = [];
|
|
46
|
+
|
|
43
47
|
if (collection in getNodesAndEdgesFunctions) {
|
|
44
48
|
const { nodes: fetchedNodes, edges: fetchedEdges } = await getNodesAndEdgesFunctions[
|
|
45
49
|
collection as keyof typeof getNodesAndEdgesFunctions
|
|
@@ -51,6 +55,30 @@ if (collection in getNodesAndEdgesFunctions) {
|
|
|
51
55
|
|
|
52
56
|
nodes = fetchedNodes;
|
|
53
57
|
edges = fetchedEdges;
|
|
58
|
+
|
|
59
|
+
if (mode === 'full') {
|
|
60
|
+
// Try and get the list of versions for the rendered item
|
|
61
|
+
try {
|
|
62
|
+
const allItems = await pageDataLoader[collection as keyof typeof pageDataLoader]();
|
|
63
|
+
const versions = getVersionFromCollection(allItems, id, version);
|
|
64
|
+
|
|
65
|
+
const item = versions[0];
|
|
66
|
+
const listOfVersions = item.data.versions || [];
|
|
67
|
+
|
|
68
|
+
// Order by version
|
|
69
|
+
listOfVersions.sort((a, b) => b.localeCompare(a));
|
|
70
|
+
|
|
71
|
+
if (listOfVersions.length > 1) {
|
|
72
|
+
links = listOfVersions.map((version) => ({
|
|
73
|
+
label: `${item.data.name} v${version}`,
|
|
74
|
+
url: buildUrl(`/visualiser/${collection}/${id}/${version}`),
|
|
75
|
+
selected: version === version,
|
|
76
|
+
}));
|
|
77
|
+
}
|
|
78
|
+
} catch (error) {
|
|
79
|
+
links = [];
|
|
80
|
+
}
|
|
81
|
+
}
|
|
54
82
|
}
|
|
55
83
|
|
|
56
84
|
if (collection === 'domain-context-map') {
|
|
@@ -71,6 +99,7 @@ if (collection === 'domain-context-map') {
|
|
|
71
99
|
linkTo={linkTo}
|
|
72
100
|
client:only="react"
|
|
73
101
|
linksToVisualiser={linksToVisualiser}
|
|
102
|
+
links={links}
|
|
74
103
|
/>
|
|
75
104
|
</div>
|
|
76
105
|
|
|
@@ -14,7 +14,7 @@ import {
|
|
|
14
14
|
useReactFlow,
|
|
15
15
|
} from '@xyflow/react';
|
|
16
16
|
import '@xyflow/react/dist/style.css';
|
|
17
|
-
|
|
17
|
+
import { HistoryIcon } from 'lucide-react';
|
|
18
18
|
// Nodes and edges
|
|
19
19
|
import ServiceNode from './Nodes/Service';
|
|
20
20
|
import FlowNode from './Nodes/Flow';
|
|
@@ -46,6 +46,7 @@ interface Props {
|
|
|
46
46
|
linkTo: 'docs' | 'visualiser';
|
|
47
47
|
includeKey?: boolean;
|
|
48
48
|
linksToVisualiser?: boolean;
|
|
49
|
+
links?: { label: string; url: string }[];
|
|
49
50
|
}
|
|
50
51
|
|
|
51
52
|
const getVisualiserUrlForCollection = (collectionItem: CollectionEntry<CollectionTypes>) => {
|
|
@@ -60,6 +61,7 @@ const NodeGraphBuilder = ({
|
|
|
60
61
|
linkTo = 'docs',
|
|
61
62
|
includeKey = true,
|
|
62
63
|
linksToVisualiser = false,
|
|
64
|
+
links = [],
|
|
63
65
|
}: Props) => {
|
|
64
66
|
const nodeTypes = useMemo(
|
|
65
67
|
() => ({
|
|
@@ -317,8 +319,33 @@ const NodeGraphBuilder = ({
|
|
|
317
319
|
{title}
|
|
318
320
|
</span>
|
|
319
321
|
)}
|
|
320
|
-
<div className="flex justify-end ">
|
|
322
|
+
<div className="flex justify-end space-x-2">
|
|
321
323
|
<DownloadButton filename={title} addPadding={false} />
|
|
324
|
+
{/* // Dropdown for links */}
|
|
325
|
+
{links.length > 0 && (
|
|
326
|
+
<div className="relative flex items-center -mt-1">
|
|
327
|
+
<span className="absolute left-2 pointer-events-none flex items-center h-full">
|
|
328
|
+
<HistoryIcon className="h-4 w-4 text-gray-600" />
|
|
329
|
+
</span>
|
|
330
|
+
<select
|
|
331
|
+
value={links.find((link) => window.location.href.includes(link.url))?.url || links[0].url}
|
|
332
|
+
onChange={(e) => navigate(e.target.value)}
|
|
333
|
+
className="appearance-none pl-7 pr-6 py-0 text-[14px] bg-white rounded-md border border-gray-200 hover:bg-gray-100/50 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-purple-500"
|
|
334
|
+
style={{ minWidth: 120, height: '26px' }}
|
|
335
|
+
>
|
|
336
|
+
{links.map((link) => (
|
|
337
|
+
<option key={link.url} value={link.url}>
|
|
338
|
+
{link.label}
|
|
339
|
+
</option>
|
|
340
|
+
))}
|
|
341
|
+
</select>
|
|
342
|
+
<span className="absolute right-2 pointer-events-none">
|
|
343
|
+
<svg className="w-4 h-4 text-gray-400" fill="none" stroke="currentColor" strokeWidth="2" viewBox="0 0 24 24">
|
|
344
|
+
<path strokeLinecap="round" strokeLinejoin="round" d="M19 9l-7 7-7-7" />
|
|
345
|
+
</svg>
|
|
346
|
+
</span>
|
|
347
|
+
</div>
|
|
348
|
+
)}
|
|
322
349
|
</div>
|
|
323
350
|
</div>
|
|
324
351
|
</Panel>
|
|
@@ -409,6 +436,7 @@ interface NodeGraphProps {
|
|
|
409
436
|
includeKey?: boolean;
|
|
410
437
|
footerLabel?: string;
|
|
411
438
|
linksToVisualiser?: boolean;
|
|
439
|
+
links?: { label: string; url: string }[];
|
|
412
440
|
}
|
|
413
441
|
|
|
414
442
|
const NodeGraph = ({
|
|
@@ -422,6 +450,7 @@ const NodeGraph = ({
|
|
|
422
450
|
includeKey = true,
|
|
423
451
|
footerLabel,
|
|
424
452
|
linksToVisualiser = false,
|
|
453
|
+
links = [],
|
|
425
454
|
}: NodeGraphProps) => {
|
|
426
455
|
const [elem, setElem] = useState(null);
|
|
427
456
|
const [showFooter, setShowFooter] = useState(true);
|
|
@@ -452,6 +481,7 @@ const NodeGraph = ({
|
|
|
452
481
|
linkTo={linkTo}
|
|
453
482
|
includeKey={includeKey}
|
|
454
483
|
linksToVisualiser={linksToVisualiser}
|
|
484
|
+
links={links}
|
|
455
485
|
/>
|
|
456
486
|
|
|
457
487
|
{showFooter && (
|
package/eventcatalog/src/enterprise/eventcatalog-chat/components/windows/ChatWindow.server.tsx
CHANGED
|
@@ -20,6 +20,7 @@ interface Resource {
|
|
|
20
20
|
|
|
21
21
|
interface ChatWindowProps {
|
|
22
22
|
model?: string;
|
|
23
|
+
provider?: string;
|
|
23
24
|
embeddingModel?: string;
|
|
24
25
|
max_tokens?: number;
|
|
25
26
|
similarityResults?: number;
|
|
@@ -30,6 +31,7 @@ interface ChatWindowProps {
|
|
|
30
31
|
const ChatWindow = ({
|
|
31
32
|
model = 'o4-mini',
|
|
32
33
|
embeddingModel = 'text-embedding-3-large',
|
|
34
|
+
provider = 'openai',
|
|
33
35
|
max_tokens = 4096,
|
|
34
36
|
similarityResults = 50,
|
|
35
37
|
resources: mentionInputResources = [],
|
|
@@ -492,8 +494,9 @@ const ChatWindow = ({
|
|
|
492
494
|
</div>
|
|
493
495
|
<div className="max-w-[900px] mx-auto flex justify-between">
|
|
494
496
|
{/* show what model is loaded */}
|
|
495
|
-
<p className="text-
|
|
496
|
-
|
|
497
|
+
<p className="text-[10px] text-gray-400 mt-2">
|
|
498
|
+
Provider: {provider.charAt(0).toUpperCase() + provider.slice(1)} | Model: {model}
|
|
499
|
+
{/* Embedding Model: {embeddingModel} */}
|
|
497
500
|
</p>
|
|
498
501
|
<p className="text-xs text-gray-500 mt-2">EventCatalog Chat can make mistakes. Check important info.</p>
|
|
499
502
|
</div>
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import { streamText, type CoreMessage, type LanguageModel, type Message } from 'ai';
|
|
2
|
+
|
|
3
|
+
// base class for AI providers
|
|
4
|
+
export interface AIProviderOptions {
|
|
5
|
+
modelId: string;
|
|
6
|
+
model?: LanguageModel;
|
|
7
|
+
temperature?: number;
|
|
8
|
+
topP?: number | undefined;
|
|
9
|
+
topK?: number | undefined;
|
|
10
|
+
frequencyPenalty?: number | undefined;
|
|
11
|
+
presencePenalty?: number | undefined;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export class AIProvider {
|
|
15
|
+
private model?: LanguageModel;
|
|
16
|
+
private modelId: string;
|
|
17
|
+
private temperature: number;
|
|
18
|
+
private topP: number | undefined;
|
|
19
|
+
private topK: number | undefined;
|
|
20
|
+
private frequencyPenalty: number | undefined;
|
|
21
|
+
private presencePenalty: number | undefined;
|
|
22
|
+
public models: string[];
|
|
23
|
+
|
|
24
|
+
constructor({ modelId, model, temperature, topP, topK, frequencyPenalty, presencePenalty }: AIProviderOptions) {
|
|
25
|
+
this.modelId = modelId;
|
|
26
|
+
this.temperature = temperature ?? 0.2;
|
|
27
|
+
this.topP = topP;
|
|
28
|
+
this.topK = topK;
|
|
29
|
+
this.frequencyPenalty = frequencyPenalty;
|
|
30
|
+
this.presencePenalty = presencePenalty;
|
|
31
|
+
this.models = [];
|
|
32
|
+
|
|
33
|
+
if (model) {
|
|
34
|
+
this.model = model;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
async validateModel(model: string): Promise<{ isValidModel: boolean; listOfModels: string[] }> {
|
|
39
|
+
const isValidModel = this.models.includes(model);
|
|
40
|
+
return { isValidModel, listOfModels: this.models };
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
async streamText(messages: Array<CoreMessage> | Array<Omit<Message, 'id'>>) {
|
|
44
|
+
if (!this.model) {
|
|
45
|
+
throw new Error('Model not set');
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
return await streamText({
|
|
49
|
+
model: this.model,
|
|
50
|
+
messages: messages,
|
|
51
|
+
temperature: this.temperature,
|
|
52
|
+
...(this.topP && { topP: this.topP }),
|
|
53
|
+
...(this.topK && { topK: this.topK }),
|
|
54
|
+
...(this.frequencyPenalty && { frequencyPenalty: this.frequencyPenalty }),
|
|
55
|
+
...(this.presencePenalty && { presencePenalty: this.presencePenalty }),
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { anthropic } from '@ai-sdk/anthropic';
|
|
2
|
+
import { AIProvider, type AIProviderOptions } from './ai-provider';
|
|
3
|
+
|
|
4
|
+
const AVAILABLE_GOOGLE_MODELS = [
|
|
5
|
+
'claude-3-7-sonnet-20250219',
|
|
6
|
+
'claude-3-5-sonnet-latest',
|
|
7
|
+
'claude-3-5-sonnet-20241022',
|
|
8
|
+
'claude-3-5-sonnet-20240620',
|
|
9
|
+
'claude-3-5-haiku-latest',
|
|
10
|
+
'claude-3-5-haiku-20241022',
|
|
11
|
+
'claude-3-opus-latest',
|
|
12
|
+
'claude-3-opus-20240229',
|
|
13
|
+
'claude-3-sonnet-20240229',
|
|
14
|
+
'claude-3-haiku-20240307',
|
|
15
|
+
] as const;
|
|
16
|
+
|
|
17
|
+
export class AnthropicProvider extends AIProvider {
|
|
18
|
+
public models: string[] = [...AVAILABLE_GOOGLE_MODELS];
|
|
19
|
+
|
|
20
|
+
constructor(options: AIProviderOptions) {
|
|
21
|
+
const languageModel = anthropic(options.modelId || 'claude-3-7-sonnet-20250219');
|
|
22
|
+
|
|
23
|
+
super({
|
|
24
|
+
...options,
|
|
25
|
+
model: languageModel,
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { google } from '@ai-sdk/google';
|
|
2
|
+
import { AIProvider, type AIProviderOptions } from './ai-provider';
|
|
3
|
+
|
|
4
|
+
const AVAILABLE_GOOGLE_MODELS = [
|
|
5
|
+
'gemini-1.5-flash',
|
|
6
|
+
'gemini-1.5-flash-latest',
|
|
7
|
+
'gemini-1.5-flash-001',
|
|
8
|
+
'gemini-1.5-flash-002',
|
|
9
|
+
'gemini-1.5-flash-8b',
|
|
10
|
+
'gemini-1.5-flash-8b-latest',
|
|
11
|
+
'gemini-1.5-flash-8b-001',
|
|
12
|
+
'gemini-1.5-pro',
|
|
13
|
+
'gemini-1.5-pro-latest',
|
|
14
|
+
'gemini-1.5-pro-001',
|
|
15
|
+
'gemini-1.5-pro-002',
|
|
16
|
+
'gemini-2.0-flash',
|
|
17
|
+
'gemini-2.0-flash-001',
|
|
18
|
+
'gemini-2.0-flash-live-001',
|
|
19
|
+
'gemini-2.0-flash-lite',
|
|
20
|
+
'gemini-2.0-pro-exp-02-05',
|
|
21
|
+
'gemini-2.0-flash-thinking-exp-01-21',
|
|
22
|
+
'gemini-2.0-flash-exp',
|
|
23
|
+
'gemini-2.5-pro-exp-03-25',
|
|
24
|
+
'gemini-2.5-pro-preview-05-06',
|
|
25
|
+
'gemini-2.5-flash-preview-04-17',
|
|
26
|
+
'gemini-exp-1206',
|
|
27
|
+
'gemma-3-27b-it',
|
|
28
|
+
'learnlm-1.5-pro-experimental',
|
|
29
|
+
] as const;
|
|
30
|
+
|
|
31
|
+
export class GoogleProvider extends AIProvider {
|
|
32
|
+
public models: string[] = [...AVAILABLE_GOOGLE_MODELS];
|
|
33
|
+
|
|
34
|
+
constructor(options: AIProviderOptions) {
|
|
35
|
+
const languageModel = google(options.modelId || 'gemini-1.5-flash');
|
|
36
|
+
super({
|
|
37
|
+
...options,
|
|
38
|
+
model: languageModel,
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { OpenAIProvider } from './openai';
|
|
2
|
+
import { GoogleProvider } from './google';
|
|
3
|
+
import { AnthropicProvider } from './anthropic';
|
|
4
|
+
import type { AIProviderOptions } from './ai-provider';
|
|
5
|
+
|
|
6
|
+
export function getProvider(provider: string, options: AIProviderOptions) {
|
|
7
|
+
switch (provider) {
|
|
8
|
+
case 'openai':
|
|
9
|
+
return new OpenAIProvider({
|
|
10
|
+
...options,
|
|
11
|
+
modelId: options.modelId,
|
|
12
|
+
});
|
|
13
|
+
case 'google':
|
|
14
|
+
return new GoogleProvider({
|
|
15
|
+
...options,
|
|
16
|
+
modelId: options.modelId,
|
|
17
|
+
});
|
|
18
|
+
case 'anthropic':
|
|
19
|
+
return new AnthropicProvider({
|
|
20
|
+
...options,
|
|
21
|
+
modelId: options.modelId,
|
|
22
|
+
});
|
|
23
|
+
default:
|
|
24
|
+
throw new Error(`Provider ${provider} not supported`);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { openai } from '@ai-sdk/openai';
|
|
2
|
+
import { AIProvider, type AIProviderOptions } from './ai-provider';
|
|
3
|
+
|
|
4
|
+
const AVAILABLE_OPENAI_MODELS = [
|
|
5
|
+
'o1',
|
|
6
|
+
'o1-2024-12-17',
|
|
7
|
+
'o1-mini',
|
|
8
|
+
'o1-mini-2024-09-12',
|
|
9
|
+
'o1-preview',
|
|
10
|
+
'o1-preview-2024-09-12',
|
|
11
|
+
'o3-mini',
|
|
12
|
+
'o3-mini-2025-01-31',
|
|
13
|
+
'o3',
|
|
14
|
+
'o3-2025-04-16',
|
|
15
|
+
'o4-mini',
|
|
16
|
+
'o4-mini-2025-04-16',
|
|
17
|
+
'gpt-4.1',
|
|
18
|
+
'gpt-4.1-2025-04-14',
|
|
19
|
+
'gpt-4.1-mini',
|
|
20
|
+
'gpt-4.1-mini-2025-04-14',
|
|
21
|
+
'gpt-4.1-nano',
|
|
22
|
+
'gpt-4.1-nano-2025-04-14',
|
|
23
|
+
'gpt-4o',
|
|
24
|
+
'gpt-4o-2024-05-13',
|
|
25
|
+
'gpt-4o-2024-08-06',
|
|
26
|
+
'gpt-4o-2024-11-20',
|
|
27
|
+
'gpt-4o-audio-preview',
|
|
28
|
+
'gpt-4o-audio-preview-2024-10-01',
|
|
29
|
+
'gpt-4o-audio-preview-2024-12-17',
|
|
30
|
+
'gpt-4o-search-preview',
|
|
31
|
+
'gpt-4o-search-preview-2025-03-11',
|
|
32
|
+
'gpt-4o-mini-search-preview',
|
|
33
|
+
'gpt-4o-mini-search-preview-2025-03-11',
|
|
34
|
+
'gpt-4o-mini',
|
|
35
|
+
'gpt-4o-mini-2024-07-18',
|
|
36
|
+
'gpt-4-turbo',
|
|
37
|
+
'gpt-4-turbo-2024-04-09',
|
|
38
|
+
'gpt-4-turbo-preview',
|
|
39
|
+
'gpt-4-0125-preview',
|
|
40
|
+
'gpt-4-1106-preview',
|
|
41
|
+
'gpt-4',
|
|
42
|
+
'gpt-4-0613',
|
|
43
|
+
'gpt-4.5-preview',
|
|
44
|
+
'gpt-4.5-preview-2025-02-27',
|
|
45
|
+
'gpt-3.5-turbo-0125',
|
|
46
|
+
'gpt-3.5-turbo',
|
|
47
|
+
'gpt-3.5-turbo-1106',
|
|
48
|
+
'chatgpt-4o-latest',
|
|
49
|
+
] as const;
|
|
50
|
+
|
|
51
|
+
export class OpenAIProvider extends AIProvider {
|
|
52
|
+
public models: string[] = [...AVAILABLE_OPENAI_MODELS];
|
|
53
|
+
|
|
54
|
+
constructor(options: AIProviderOptions) {
|
|
55
|
+
const languageModel = openai(options.modelId || 'o4-mini');
|
|
56
|
+
super({
|
|
57
|
+
...options,
|
|
58
|
+
model: languageModel,
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
}
|
|
@@ -1,62 +1,15 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { openai } from '@ai-sdk/openai';
|
|
1
|
+
import { generateText, type CoreMessage, type Message } from 'ai';
|
|
3
2
|
import { EventCatalogVectorStore, type Resource } from '@enterprise/eventcatalog-chat/EventCatalogVectorStore';
|
|
4
3
|
import fs from 'fs';
|
|
5
4
|
import path from 'path';
|
|
6
5
|
import config from '@config';
|
|
6
|
+
import { anthropic } from '@ai-sdk/anthropic';
|
|
7
|
+
import { getProvider } from '@enterprise/eventcatalog-chat/providers';
|
|
7
8
|
|
|
8
9
|
const AI_EMBEDDINGS_PATH = path.join(process.env.PROJECT_DIR || process.cwd(), 'public/ai');
|
|
9
|
-
|
|
10
10
|
const documents = JSON.parse(fs.readFileSync(path.join(AI_EMBEDDINGS_PATH, 'documents.json'), 'utf8'));
|
|
11
11
|
const embeddings = JSON.parse(fs.readFileSync(path.join(AI_EMBEDDINGS_PATH, 'embeddings.json'), 'utf8'));
|
|
12
12
|
|
|
13
|
-
const OpenAIChatModelIds = [
|
|
14
|
-
'o1',
|
|
15
|
-
'o1-2024-12-17',
|
|
16
|
-
'o1-mini',
|
|
17
|
-
'o1-mini-2024-09-12',
|
|
18
|
-
'o1-preview',
|
|
19
|
-
'o1-preview-2024-09-12',
|
|
20
|
-
'o3-mini',
|
|
21
|
-
'o3-mini-2025-01-31',
|
|
22
|
-
'o3',
|
|
23
|
-
'o3-2025-04-16',
|
|
24
|
-
'o4-mini',
|
|
25
|
-
'o4-mini-2025-04-16',
|
|
26
|
-
'gpt-4.1',
|
|
27
|
-
'gpt-4.1-2025-04-14',
|
|
28
|
-
'gpt-4.1-mini',
|
|
29
|
-
'gpt-4.1-mini-2025-04-14',
|
|
30
|
-
'gpt-4.1-nano',
|
|
31
|
-
'gpt-4.1-nano-2025-04-14',
|
|
32
|
-
'gpt-4o',
|
|
33
|
-
'gpt-4o-2024-05-13',
|
|
34
|
-
'gpt-4o-2024-08-06',
|
|
35
|
-
'gpt-4o-2024-11-20',
|
|
36
|
-
'gpt-4o-audio-preview',
|
|
37
|
-
'gpt-4o-audio-preview-2024-10-01',
|
|
38
|
-
'gpt-4o-audio-preview-2024-12-17',
|
|
39
|
-
'gpt-4o-search-preview',
|
|
40
|
-
'gpt-4o-search-preview-2025-03-11',
|
|
41
|
-
'gpt-4o-mini-search-preview',
|
|
42
|
-
'gpt-4o-mini-search-preview-2025-03-11',
|
|
43
|
-
'gpt-4o-mini',
|
|
44
|
-
'gpt-4o-mini-2024-07-18',
|
|
45
|
-
'gpt-4-turbo',
|
|
46
|
-
'gpt-4-turbo-2024-04-09',
|
|
47
|
-
'gpt-4-turbo-preview',
|
|
48
|
-
'gpt-4-0125-preview',
|
|
49
|
-
'gpt-4-1106-preview',
|
|
50
|
-
'gpt-4',
|
|
51
|
-
'gpt-4-0613',
|
|
52
|
-
'gpt-4.5-preview',
|
|
53
|
-
'gpt-4.5-preview-2025-02-27',
|
|
54
|
-
'gpt-3.5-turbo-0125',
|
|
55
|
-
'gpt-3.5-turbo',
|
|
56
|
-
'gpt-3.5-turbo-1106',
|
|
57
|
-
'chatgpt-4o-latest',
|
|
58
|
-
];
|
|
59
|
-
|
|
60
13
|
export const getResources = async (question: string) => {
|
|
61
14
|
const vectorStore = await EventCatalogVectorStore.create(documents, embeddings);
|
|
62
15
|
const resources = await vectorStore.getEventCatalogResources(question);
|
|
@@ -134,18 +87,23 @@ ${resourceStrings.join('\n')}
|
|
|
134
87
|
},
|
|
135
88
|
] as CoreMessage[];
|
|
136
89
|
|
|
137
|
-
const
|
|
90
|
+
const modelId = config?.chat?.model;
|
|
91
|
+
|
|
92
|
+
// setup the model and provider
|
|
93
|
+
const aiProvider = getProvider(config?.chat?.provider || 'openai', {
|
|
94
|
+
modelId,
|
|
95
|
+
temperature: config?.chat?.temperature,
|
|
96
|
+
topP: config?.chat?.topP,
|
|
97
|
+
topK: config?.chat?.topK,
|
|
98
|
+
frequencyPenalty: config?.chat?.frequencyPenalty,
|
|
99
|
+
presencePenalty: config?.chat?.presencePenalty,
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
const { isValidModel, listOfModels } = await aiProvider.validateModel(modelId);
|
|
138
103
|
|
|
139
|
-
if (!
|
|
140
|
-
throw new Error(`Invalid model: ${
|
|
104
|
+
if (!isValidModel) {
|
|
105
|
+
throw new Error(`Invalid model: "${modelId}", please use a valid model from the following list: ${listOfModels.join(', ')}`);
|
|
141
106
|
}
|
|
142
107
|
|
|
143
|
-
return await streamText(
|
|
144
|
-
model: openai(model),
|
|
145
|
-
messages: messages,
|
|
146
|
-
temperature: 0.2,
|
|
147
|
-
topP: 1,
|
|
148
|
-
frequencyPenalty: 0,
|
|
149
|
-
presencePenalty: 0,
|
|
150
|
-
});
|
|
108
|
+
return await aiProvider.streamText(messages);
|
|
151
109
|
}
|
|
@@ -10,6 +10,6 @@ const protocolIcons = Object.keys(ProtocolIcons).reduce(
|
|
|
10
10
|
);
|
|
11
11
|
|
|
12
12
|
export const getIconForProtocol = (icon: string) => {
|
|
13
|
-
const Icon = protocolIcons[icon
|
|
13
|
+
const Icon = protocolIcons[icon?.replace('-', '').toLowerCase()];
|
|
14
14
|
return Icon ? (props: any) => <span {...props} dangerouslySetInnerHTML={{ __html: Icon }} /> : null;
|
|
15
15
|
};
|
package/package.json
CHANGED
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
"url": "https://github.com/event-catalog/eventcatalog.git"
|
|
7
7
|
},
|
|
8
8
|
"type": "module",
|
|
9
|
-
"version": "2.
|
|
9
|
+
"version": "2.38.1",
|
|
10
10
|
"publishConfig": {
|
|
11
11
|
"access": "public"
|
|
12
12
|
},
|
|
@@ -21,6 +21,8 @@
|
|
|
21
21
|
"default-files-for-collections/"
|
|
22
22
|
],
|
|
23
23
|
"dependencies": {
|
|
24
|
+
"@ai-sdk/anthropic": "^1.2.11",
|
|
25
|
+
"@ai-sdk/google": "^1.2.17",
|
|
24
26
|
"@ai-sdk/openai": "^1.3.16",
|
|
25
27
|
"@astrojs/markdown-remark": "^6.3.1",
|
|
26
28
|
"@astrojs/mdx": "^4.2.4",
|