illuma-agents 1.0.2 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +25 -21
- package/dist/cjs/agents/AgentContext.cjs +222 -0
- package/dist/cjs/agents/AgentContext.cjs.map +1 -0
- package/dist/cjs/common/enum.cjs +5 -4
- package/dist/cjs/common/enum.cjs.map +1 -1
- package/dist/cjs/events.cjs +7 -5
- package/dist/cjs/events.cjs.map +1 -1
- package/dist/cjs/graphs/Graph.cjs +328 -207
- package/dist/cjs/graphs/Graph.cjs.map +1 -1
- package/dist/cjs/graphs/MultiAgentGraph.cjs +507 -0
- package/dist/cjs/graphs/MultiAgentGraph.cjs.map +1 -0
- package/dist/cjs/llm/anthropic/index.cjs.map +1 -1
- package/dist/cjs/llm/google/index.cjs.map +1 -1
- package/dist/cjs/llm/ollama/index.cjs.map +1 -1
- package/dist/cjs/llm/openai/index.cjs +35 -0
- package/dist/cjs/llm/openai/index.cjs.map +1 -1
- package/dist/cjs/llm/openai/utils/index.cjs +3 -1
- package/dist/cjs/llm/openai/utils/index.cjs.map +1 -1
- package/dist/cjs/llm/openrouter/index.cjs.map +1 -1
- package/dist/cjs/llm/providers.cjs +0 -2
- package/dist/cjs/llm/providers.cjs.map +1 -1
- package/dist/cjs/llm/vertexai/index.cjs.map +1 -1
- package/dist/cjs/main.cjs +12 -1
- package/dist/cjs/main.cjs.map +1 -1
- package/dist/cjs/messages/cache.cjs +123 -0
- package/dist/cjs/messages/cache.cjs.map +1 -0
- package/dist/cjs/messages/content.cjs +53 -0
- package/dist/cjs/messages/content.cjs.map +1 -0
- package/dist/cjs/messages/format.cjs +17 -29
- package/dist/cjs/messages/format.cjs.map +1 -1
- package/dist/cjs/run.cjs +119 -74
- package/dist/cjs/run.cjs.map +1 -1
- package/dist/cjs/stream.cjs +77 -73
- package/dist/cjs/stream.cjs.map +1 -1
- package/dist/cjs/tools/Calculator.cjs +45 -0
- package/dist/cjs/tools/Calculator.cjs.map +1 -0
- package/dist/cjs/tools/CodeExecutor.cjs +22 -22
- package/dist/cjs/tools/CodeExecutor.cjs.map +1 -1
- package/dist/cjs/tools/ToolNode.cjs +5 -3
- package/dist/cjs/tools/ToolNode.cjs.map +1 -1
- package/dist/cjs/tools/handlers.cjs +20 -20
- package/dist/cjs/tools/handlers.cjs.map +1 -1
- package/dist/cjs/utils/events.cjs +31 -0
- package/dist/cjs/utils/events.cjs.map +1 -0
- package/dist/cjs/utils/handlers.cjs +70 -0
- package/dist/cjs/utils/handlers.cjs.map +1 -0
- package/dist/cjs/utils/tokens.cjs +54 -7
- package/dist/cjs/utils/tokens.cjs.map +1 -1
- package/dist/esm/agents/AgentContext.mjs +220 -0
- package/dist/esm/agents/AgentContext.mjs.map +1 -0
- package/dist/esm/common/enum.mjs +5 -4
- package/dist/esm/common/enum.mjs.map +1 -1
- package/dist/esm/events.mjs +7 -5
- package/dist/esm/events.mjs.map +1 -1
- package/dist/esm/graphs/Graph.mjs +330 -209
- package/dist/esm/graphs/Graph.mjs.map +1 -1
- package/dist/esm/graphs/MultiAgentGraph.mjs +505 -0
- package/dist/esm/graphs/MultiAgentGraph.mjs.map +1 -0
- package/dist/esm/llm/anthropic/index.mjs.map +1 -1
- package/dist/esm/llm/google/index.mjs.map +1 -1
- package/dist/esm/llm/ollama/index.mjs.map +1 -1
- package/dist/esm/llm/openai/index.mjs +35 -0
- package/dist/esm/llm/openai/index.mjs.map +1 -1
- package/dist/esm/llm/openai/utils/index.mjs +3 -1
- package/dist/esm/llm/openai/utils/index.mjs.map +1 -1
- package/dist/esm/llm/openrouter/index.mjs.map +1 -1
- package/dist/esm/llm/providers.mjs +0 -2
- package/dist/esm/llm/providers.mjs.map +1 -1
- package/dist/esm/llm/vertexai/index.mjs.map +1 -1
- package/dist/esm/main.mjs +7 -2
- package/dist/esm/main.mjs.map +1 -1
- package/dist/esm/messages/cache.mjs +120 -0
- package/dist/esm/messages/cache.mjs.map +1 -0
- package/dist/esm/messages/content.mjs +51 -0
- package/dist/esm/messages/content.mjs.map +1 -0
- package/dist/esm/messages/format.mjs +18 -29
- package/dist/esm/messages/format.mjs.map +1 -1
- package/dist/esm/run.mjs +119 -74
- package/dist/esm/run.mjs.map +1 -1
- package/dist/esm/stream.mjs +77 -73
- package/dist/esm/stream.mjs.map +1 -1
- package/dist/esm/tools/Calculator.mjs +24 -0
- package/dist/esm/tools/Calculator.mjs.map +1 -0
- package/dist/esm/tools/CodeExecutor.mjs +22 -22
- package/dist/esm/tools/CodeExecutor.mjs.map +1 -1
- package/dist/esm/tools/ToolNode.mjs +5 -3
- package/dist/esm/tools/ToolNode.mjs.map +1 -1
- package/dist/esm/tools/handlers.mjs +20 -20
- package/dist/esm/tools/handlers.mjs.map +1 -1
- package/dist/esm/utils/events.mjs +29 -0
- package/dist/esm/utils/events.mjs.map +1 -0
- package/dist/esm/utils/handlers.mjs +68 -0
- package/dist/esm/utils/handlers.mjs.map +1 -0
- package/dist/esm/utils/tokens.mjs +54 -8
- package/dist/esm/utils/tokens.mjs.map +1 -1
- package/dist/types/agents/AgentContext.d.ts +94 -0
- package/dist/types/common/enum.d.ts +7 -5
- package/dist/types/events.d.ts +3 -3
- package/dist/types/graphs/Graph.d.ts +60 -66
- package/dist/types/graphs/MultiAgentGraph.d.ts +47 -0
- package/dist/types/graphs/index.d.ts +1 -0
- package/dist/types/index.d.ts +1 -0
- package/dist/types/llm/openai/index.d.ts +10 -0
- package/dist/types/messages/cache.d.ts +20 -0
- package/dist/types/messages/content.d.ts +7 -0
- package/dist/types/messages/format.d.ts +1 -7
- package/dist/types/messages/index.d.ts +2 -0
- package/dist/types/messages/reducer.d.ts +9 -0
- package/dist/types/run.d.ts +16 -10
- package/dist/types/stream.d.ts +4 -3
- package/dist/types/tools/Calculator.d.ts +8 -0
- package/dist/types/tools/ToolNode.d.ts +1 -1
- package/dist/types/tools/handlers.d.ts +9 -7
- package/dist/types/tools/search/tool.d.ts +4 -4
- package/dist/types/types/graph.d.ts +124 -11
- package/dist/types/types/llm.d.ts +13 -9
- package/dist/types/types/messages.d.ts +4 -0
- package/dist/types/types/run.d.ts +46 -8
- package/dist/types/types/stream.d.ts +3 -2
- package/dist/types/utils/events.d.ts +6 -0
- package/dist/types/utils/handlers.d.ts +34 -0
- package/dist/types/utils/index.d.ts +1 -0
- package/dist/types/utils/tokens.d.ts +24 -0
- package/package.json +162 -145
- package/src/agents/AgentContext.ts +323 -0
- package/src/common/enum.ts +177 -176
- package/src/events.ts +197 -191
- package/src/graphs/Graph.ts +1058 -846
- package/src/graphs/MultiAgentGraph.ts +598 -0
- package/src/graphs/index.ts +2 -1
- package/src/index.ts +25 -24
- package/src/llm/anthropic/index.ts +413 -413
- package/src/llm/google/index.ts +222 -222
- package/src/llm/google/utils/zod_to_genai_parameters.ts +86 -88
- package/src/llm/ollama/index.ts +92 -92
- package/src/llm/openai/index.ts +894 -853
- package/src/llm/openai/utils/index.ts +920 -918
- package/src/llm/openrouter/index.ts +60 -60
- package/src/llm/providers.ts +55 -57
- package/src/llm/vertexai/index.ts +360 -360
- package/src/messages/cache.test.ts +461 -0
- package/src/messages/cache.ts +151 -0
- package/src/messages/content.test.ts +362 -0
- package/src/messages/content.ts +63 -0
- package/src/messages/format.ts +611 -625
- package/src/messages/formatAgentMessages.test.ts +1144 -917
- package/src/messages/index.ts +6 -4
- package/src/messages/reducer.ts +80 -0
- package/src/run.ts +447 -381
- package/src/scripts/abort.ts +157 -138
- package/src/scripts/ant_web_search.ts +158 -158
- package/src/scripts/cli.ts +172 -167
- package/src/scripts/cli2.ts +133 -125
- package/src/scripts/cli3.ts +184 -178
- package/src/scripts/cli4.ts +191 -184
- package/src/scripts/cli5.ts +191 -184
- package/src/scripts/code_exec.ts +213 -214
- package/src/scripts/code_exec_simple.ts +147 -129
- package/src/scripts/content.ts +138 -120
- package/src/scripts/handoff-test.ts +135 -0
- package/src/scripts/multi-agent-chain.ts +278 -0
- package/src/scripts/multi-agent-conditional.ts +220 -0
- package/src/scripts/multi-agent-document-review-chain.ts +197 -0
- package/src/scripts/multi-agent-hybrid-flow.ts +310 -0
- package/src/scripts/multi-agent-parallel.ts +343 -0
- package/src/scripts/multi-agent-sequence.ts +212 -0
- package/src/scripts/multi-agent-supervisor.ts +364 -0
- package/src/scripts/multi-agent-test.ts +186 -0
- package/src/scripts/search.ts +146 -150
- package/src/scripts/simple.ts +225 -225
- package/src/scripts/stream.ts +140 -122
- package/src/scripts/test-custom-prompt-key.ts +145 -0
- package/src/scripts/test-handoff-input.ts +170 -0
- package/src/scripts/test-multi-agent-list-handoff.ts +261 -0
- package/src/scripts/test-tools-before-handoff.ts +222 -0
- package/src/scripts/tools.ts +153 -155
- package/src/specs/agent-handoffs.test.ts +889 -0
- package/src/specs/anthropic.simple.test.ts +320 -317
- package/src/specs/azure.simple.test.ts +325 -316
- package/src/specs/openai.simple.test.ts +311 -316
- package/src/specs/openrouter.simple.test.ts +107 -0
- package/src/specs/prune.test.ts +758 -763
- package/src/specs/reasoning.test.ts +201 -165
- package/src/specs/thinking-prune.test.ts +769 -703
- package/src/specs/token-memoization.test.ts +39 -0
- package/src/stream.ts +664 -651
- package/src/tools/Calculator.test.ts +278 -0
- package/src/tools/Calculator.ts +25 -0
- package/src/tools/CodeExecutor.ts +220 -220
- package/src/tools/ToolNode.ts +170 -170
- package/src/tools/handlers.ts +341 -336
- package/src/types/graph.ts +372 -185
- package/src/types/llm.ts +141 -140
- package/src/types/messages.ts +4 -0
- package/src/types/run.ts +128 -89
- package/src/types/stream.ts +401 -400
- package/src/utils/events.ts +32 -0
- package/src/utils/handlers.ts +107 -0
- package/src/utils/index.ts +6 -5
- package/src/utils/llmConfig.ts +183 -183
- package/src/utils/tokens.ts +129 -70
- package/dist/types/scripts/abort.d.ts +0 -1
- package/dist/types/scripts/ant_web_search.d.ts +0 -1
- package/dist/types/scripts/args.d.ts +0 -7
- package/dist/types/scripts/caching.d.ts +0 -1
- package/dist/types/scripts/cli.d.ts +0 -1
- package/dist/types/scripts/cli2.d.ts +0 -1
- package/dist/types/scripts/cli3.d.ts +0 -1
- package/dist/types/scripts/cli4.d.ts +0 -1
- package/dist/types/scripts/cli5.d.ts +0 -1
- package/dist/types/scripts/code_exec.d.ts +0 -1
- package/dist/types/scripts/code_exec_files.d.ts +0 -1
- package/dist/types/scripts/code_exec_simple.d.ts +0 -1
- package/dist/types/scripts/content.d.ts +0 -1
- package/dist/types/scripts/empty_input.d.ts +0 -1
- package/dist/types/scripts/image.d.ts +0 -1
- package/dist/types/scripts/memory.d.ts +0 -1
- package/dist/types/scripts/search.d.ts +0 -1
- package/dist/types/scripts/simple.d.ts +0 -1
- package/dist/types/scripts/stream.d.ts +0 -1
- package/dist/types/scripts/thinking.d.ts +0 -1
- package/dist/types/scripts/tools.d.ts +0 -1
- package/dist/types/specs/spec.utils.d.ts +0 -1
- package/dist/types/tools/example.d.ts +0 -78
- package/src/tools/example.ts +0 -129
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
/* eslint-disable no-console */
|
|
2
|
+
// src/utils/events.ts
|
|
3
|
+
import { dispatchCustomEvent } from '@langchain/core/callbacks/dispatch';
|
|
4
|
+
import type { RunnableConfig } from '@langchain/core/runnables';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Safely dispatches a custom event and properly awaits it to avoid
|
|
8
|
+
* race conditions where events are dispatched after run cleanup.
|
|
9
|
+
*/
|
|
10
|
+
export async function safeDispatchCustomEvent(
|
|
11
|
+
event: string,
|
|
12
|
+
payload: unknown,
|
|
13
|
+
config?: RunnableConfig
|
|
14
|
+
): Promise<void> {
|
|
15
|
+
try {
|
|
16
|
+
await dispatchCustomEvent(event, payload, config);
|
|
17
|
+
} catch (e) {
|
|
18
|
+
// Check if this is the known EventStreamCallbackHandler error
|
|
19
|
+
if (
|
|
20
|
+
e instanceof Error &&
|
|
21
|
+
e.message.includes('handleCustomEvent: Run ID') &&
|
|
22
|
+
e.message.includes('not found in run map')
|
|
23
|
+
) {
|
|
24
|
+
// Suppress this specific error - it's expected during parallel execution
|
|
25
|
+
// when EventStreamCallbackHandler loses track of run IDs
|
|
26
|
+
// console.debug('Suppressed error dispatching custom event:', e);
|
|
27
|
+
return;
|
|
28
|
+
}
|
|
29
|
+
// Log other errors
|
|
30
|
+
console.error('Error dispatching custom event:', e);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Multi-Agent Handler Utilities
|
|
3
|
+
*
|
|
4
|
+
* Provides a simple helper to create handlers with content aggregation for multi-agent scripts.
|
|
5
|
+
*
|
|
6
|
+
* Usage:
|
|
7
|
+
* ```typescript
|
|
8
|
+
* const { contentParts, aggregateContent, handlers } = createHandlers();
|
|
9
|
+
*
|
|
10
|
+
* // With callbacks
|
|
11
|
+
* const { contentParts, aggregateContent, handlers } = createHandlers({
|
|
12
|
+
* onRunStep: (event, data) => console.log('Step:', data),
|
|
13
|
+
* onRunStepCompleted: (event, data) => console.log('Completed:', data)
|
|
14
|
+
* });
|
|
15
|
+
* ```
|
|
16
|
+
*/
|
|
17
|
+
|
|
18
|
+
import { GraphEvents } from '@/common';
|
|
19
|
+
import { ChatModelStreamHandler, createContentAggregator } from '@/stream';
|
|
20
|
+
import { ToolEndHandler, ModelEndHandler } from '@/events';
|
|
21
|
+
import type * as t from '@/types';
|
|
22
|
+
|
|
23
|
+
interface HandlerCallbacks {
|
|
24
|
+
onRunStep?: (event: GraphEvents.ON_RUN_STEP, data: t.StreamEventData) => void;
|
|
25
|
+
onRunStepCompleted?: (
|
|
26
|
+
event: GraphEvents.ON_RUN_STEP_COMPLETED,
|
|
27
|
+
data: t.StreamEventData
|
|
28
|
+
) => void;
|
|
29
|
+
onRunStepDelta?: (
|
|
30
|
+
event: GraphEvents.ON_RUN_STEP_DELTA,
|
|
31
|
+
data: t.StreamEventData
|
|
32
|
+
) => void;
|
|
33
|
+
onMessageDelta?: (
|
|
34
|
+
event: GraphEvents.ON_MESSAGE_DELTA,
|
|
35
|
+
data: t.StreamEventData
|
|
36
|
+
) => void;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Creates handlers with content aggregation for multi-agent scripts
|
|
41
|
+
*/
|
|
42
|
+
export function createHandlers(callbacks?: HandlerCallbacks): {
|
|
43
|
+
contentParts: Array<t.MessageContentComplex | undefined>;
|
|
44
|
+
aggregateContent: ReturnType<
|
|
45
|
+
typeof createContentAggregator
|
|
46
|
+
>['aggregateContent'];
|
|
47
|
+
handlers: Record<string, t.EventHandler>;
|
|
48
|
+
} {
|
|
49
|
+
// Set up content aggregator
|
|
50
|
+
const { contentParts, aggregateContent } = createContentAggregator();
|
|
51
|
+
|
|
52
|
+
// Create the handlers object
|
|
53
|
+
const handlers = {
|
|
54
|
+
[GraphEvents.TOOL_END]: new ToolEndHandler(),
|
|
55
|
+
[GraphEvents.CHAT_MODEL_END]: new ModelEndHandler(),
|
|
56
|
+
[GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),
|
|
57
|
+
|
|
58
|
+
[GraphEvents.ON_RUN_STEP]: {
|
|
59
|
+
handle: (
|
|
60
|
+
event: GraphEvents.ON_RUN_STEP,
|
|
61
|
+
data: t.StreamEventData
|
|
62
|
+
): void => {
|
|
63
|
+
aggregateContent({ event, data: data as t.RunStep });
|
|
64
|
+
callbacks?.onRunStep?.(event, data);
|
|
65
|
+
},
|
|
66
|
+
},
|
|
67
|
+
|
|
68
|
+
[GraphEvents.ON_RUN_STEP_COMPLETED]: {
|
|
69
|
+
handle: (
|
|
70
|
+
event: GraphEvents.ON_RUN_STEP_COMPLETED,
|
|
71
|
+
data: t.StreamEventData
|
|
72
|
+
): void => {
|
|
73
|
+
aggregateContent({
|
|
74
|
+
event,
|
|
75
|
+
data: data as unknown as { result: t.ToolEndEvent },
|
|
76
|
+
});
|
|
77
|
+
callbacks?.onRunStepCompleted?.(event, data);
|
|
78
|
+
},
|
|
79
|
+
},
|
|
80
|
+
|
|
81
|
+
[GraphEvents.ON_RUN_STEP_DELTA]: {
|
|
82
|
+
handle: (
|
|
83
|
+
event: GraphEvents.ON_RUN_STEP_DELTA,
|
|
84
|
+
data: t.StreamEventData
|
|
85
|
+
): void => {
|
|
86
|
+
aggregateContent({ event, data: data as t.RunStepDeltaEvent });
|
|
87
|
+
callbacks?.onRunStepDelta?.(event, data);
|
|
88
|
+
},
|
|
89
|
+
},
|
|
90
|
+
|
|
91
|
+
[GraphEvents.ON_MESSAGE_DELTA]: {
|
|
92
|
+
handle: (
|
|
93
|
+
event: GraphEvents.ON_MESSAGE_DELTA,
|
|
94
|
+
data: t.StreamEventData
|
|
95
|
+
): void => {
|
|
96
|
+
aggregateContent({ event, data: data as t.MessageDeltaEvent });
|
|
97
|
+
callbacks?.onMessageDelta?.(event, data);
|
|
98
|
+
},
|
|
99
|
+
},
|
|
100
|
+
};
|
|
101
|
+
|
|
102
|
+
return {
|
|
103
|
+
contentParts,
|
|
104
|
+
aggregateContent,
|
|
105
|
+
handlers,
|
|
106
|
+
};
|
|
107
|
+
}
|
package/src/utils/index.ts
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
|
-
export * from './graph';
|
|
2
|
-
export * from './llm';
|
|
3
|
-
export * from './misc';
|
|
4
|
-
export * from './
|
|
5
|
-
export * from './
|
|
1
|
+
export * from './graph';
|
|
2
|
+
export * from './llm';
|
|
3
|
+
export * from './misc';
|
|
4
|
+
export * from './handlers';
|
|
5
|
+
export * from './run';
|
|
6
|
+
export * from './tokens';
|
package/src/utils/llmConfig.ts
CHANGED
|
@@ -1,183 +1,183 @@
|
|
|
1
|
-
// src/utils/llmConfig.ts
|
|
2
|
-
import { Providers } from '@/common';
|
|
3
|
-
import type * as or from '@/llm/openrouter';
|
|
4
|
-
import type * as t from '@/types';
|
|
5
|
-
|
|
6
|
-
export const llmConfigs: Record<string, t.LLMConfig | undefined> = {
|
|
7
|
-
[Providers.OPENAI]: {
|
|
8
|
-
provider: Providers.OPENAI,
|
|
9
|
-
model: 'gpt-4.1',
|
|
10
|
-
temperature: 0.7,
|
|
11
|
-
streaming: true,
|
|
12
|
-
streamUsage: true,
|
|
13
|
-
// disableStreaming: true,
|
|
14
|
-
},
|
|
15
|
-
anthropicLITELLM: {
|
|
16
|
-
provider: Providers.OPENAI,
|
|
17
|
-
streaming: true,
|
|
18
|
-
streamUsage: false,
|
|
19
|
-
apiKey: 'sk-1234',
|
|
20
|
-
model: 'claude-sonnet-4',
|
|
21
|
-
maxTokens: 8192,
|
|
22
|
-
modelKwargs: {
|
|
23
|
-
metadata: {
|
|
24
|
-
user_id: 'some_user_id',
|
|
25
|
-
},
|
|
26
|
-
thinking: {
|
|
27
|
-
type: 'enabled',
|
|
28
|
-
budget_tokens: 2000,
|
|
29
|
-
},
|
|
30
|
-
},
|
|
31
|
-
configuration: {
|
|
32
|
-
baseURL: 'http://host.docker.internal:4000/v1',
|
|
33
|
-
defaultHeaders: {
|
|
34
|
-
'anthropic-beta': 'prompt-caching-2024-07-31,context-1m-2025-08-07',
|
|
35
|
-
},
|
|
36
|
-
},
|
|
37
|
-
// disableStreaming: true,
|
|
38
|
-
},
|
|
39
|
-
[Providers.XAI]: {
|
|
40
|
-
provider: Providers.XAI,
|
|
41
|
-
model: 'grok-2-latest',
|
|
42
|
-
streaming: true,
|
|
43
|
-
streamUsage: true,
|
|
44
|
-
},
|
|
45
|
-
alibaba: {
|
|
46
|
-
provider: Providers.OPENAI,
|
|
47
|
-
streaming: true,
|
|
48
|
-
streamUsage: true,
|
|
49
|
-
model: 'qwen-max',
|
|
50
|
-
openAIApiKey: process.env.ALIBABA_API_KEY,
|
|
51
|
-
configuration: {
|
|
52
|
-
baseURL: 'https://dashscope-intl.aliyuncs.com/compatible-mode/v1',
|
|
53
|
-
},
|
|
54
|
-
},
|
|
55
|
-
[Providers.OPENROUTER]: {
|
|
56
|
-
provider: Providers.OPENROUTER,
|
|
57
|
-
streaming: true,
|
|
58
|
-
streamUsage: true,
|
|
59
|
-
model: '
|
|
60
|
-
openAIApiKey: process.env.OPENROUTER_API_KEY,
|
|
61
|
-
configuration: {
|
|
62
|
-
baseURL: process.env.OPENROUTER_BASE_URL,
|
|
63
|
-
defaultHeaders: {
|
|
64
|
-
'HTTP-Referer': 'https://
|
|
65
|
-
'X-Title': 'Illuma',
|
|
66
|
-
},
|
|
67
|
-
},
|
|
68
|
-
include_reasoning: true,
|
|
69
|
-
} as or.ChatOpenRouterCallOptions & t.LLMConfig,
|
|
70
|
-
[Providers.AZURE]: {
|
|
71
|
-
provider: Providers.AZURE,
|
|
72
|
-
temperature: 0.7,
|
|
73
|
-
streaming: true,
|
|
74
|
-
streamUsage: true,
|
|
75
|
-
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
|
|
76
|
-
azureOpenAIApiInstanceName: process.env.AZURE_OPENAI_API_INSTANCE,
|
|
77
|
-
azureOpenAIApiDeploymentName: process.env.AZURE_OPENAI_API_DEPLOYMENT,
|
|
78
|
-
azureOpenAIApiVersion: process.env.AZURE_OPENAI_API_VERSION,
|
|
79
|
-
model: process.env.AZURE_MODEL_NAME ?? 'gpt-4o',
|
|
80
|
-
},
|
|
81
|
-
[Providers.OLLAMA]: {
|
|
82
|
-
provider: Providers.OLLAMA,
|
|
83
|
-
model: 'gpt-oss:20b',
|
|
84
|
-
streaming: true,
|
|
85
|
-
streamUsage: true,
|
|
86
|
-
baseUrl: 'http://localhost:11434',
|
|
87
|
-
},
|
|
88
|
-
lmstudio: {
|
|
89
|
-
provider: Providers.OPENAI,
|
|
90
|
-
model: 'gpt-oss-20b',
|
|
91
|
-
streaming: true,
|
|
92
|
-
streamUsage: true,
|
|
93
|
-
configuration: {
|
|
94
|
-
baseURL: 'http://192.168.254.183:1233/v1',
|
|
95
|
-
},
|
|
96
|
-
},
|
|
97
|
-
zhipu: {
|
|
98
|
-
provider: Providers.OPENAI,
|
|
99
|
-
streaming: true,
|
|
100
|
-
streamUsage: false,
|
|
101
|
-
model: 'glm-4.5-air',
|
|
102
|
-
apiKey: process.env.ZHIPU_API_KEY,
|
|
103
|
-
configuration: {
|
|
104
|
-
baseURL: 'https://open.bigmodel.cn/api/paas/v4',
|
|
105
|
-
},
|
|
106
|
-
},
|
|
107
|
-
[Providers.DEEPSEEK]: {
|
|
108
|
-
provider: Providers.DEEPSEEK,
|
|
109
|
-
model: 'deepseek-reasoner',
|
|
110
|
-
streaming: true,
|
|
111
|
-
streamUsage: true,
|
|
112
|
-
},
|
|
113
|
-
[Providers.ANTHROPIC]: {
|
|
114
|
-
provider: Providers.ANTHROPIC,
|
|
115
|
-
model: 'claude-sonnet-4-5',
|
|
116
|
-
streaming: true,
|
|
117
|
-
streamUsage: true,
|
|
118
|
-
},
|
|
119
|
-
// [Providers.MISTRALAI]: {
|
|
120
|
-
// provider: Providers.MISTRALAI,
|
|
121
|
-
// model: 'mistral-large-latest',
|
|
122
|
-
// streaming: true,
|
|
123
|
-
// streamUsage: true,
|
|
124
|
-
// },
|
|
125
|
-
[Providers.MISTRAL]: {
|
|
126
|
-
provider: Providers.OPENAI,
|
|
127
|
-
streaming: true,
|
|
128
|
-
streamUsage: false,
|
|
129
|
-
// model: 'codestral-latest',
|
|
130
|
-
model: 'mistral-large-latest',
|
|
131
|
-
openAIApiKey: process.env.MISTRAL_API_KEY,
|
|
132
|
-
configuration: {
|
|
133
|
-
baseURL: 'https://api.mistral.ai/v1',
|
|
134
|
-
defaultHeaders: {},
|
|
135
|
-
},
|
|
136
|
-
},
|
|
137
|
-
[Providers.VERTEXAI]: {
|
|
138
|
-
provider: Providers.VERTEXAI,
|
|
139
|
-
model: 'gemini-2.5-flash',
|
|
140
|
-
streaming: true,
|
|
141
|
-
streamUsage: true,
|
|
142
|
-
keyFile: process.env.VERTEXAI_KEY_FILE,
|
|
143
|
-
} as t.VertexAIClientOptions & t.LLMConfig,
|
|
144
|
-
[Providers.GOOGLE]: {
|
|
145
|
-
provider: Providers.GOOGLE,
|
|
146
|
-
model: 'gemini-2.5-flash',
|
|
147
|
-
streaming: true,
|
|
148
|
-
streamUsage: true,
|
|
149
|
-
},
|
|
150
|
-
[Providers.BEDROCK]: {
|
|
151
|
-
provider: Providers.BEDROCK,
|
|
152
|
-
// model: 'anthropic.claude-3-sonnet-20240229-v1:0',
|
|
153
|
-
// model: 'us.anthropic.claude-3-5-sonnet-20241022-v2:0',
|
|
154
|
-
// model: 'us.amazon.nova-pro-v1:0',
|
|
155
|
-
model: 'us.anthropic.claude-sonnet-4-20250514-v1:0',
|
|
156
|
-
// additionalModelRequestFields: { thinking: { type: 'enabled', budget_tokens: 2000 } },
|
|
157
|
-
region: process.env.BEDROCK_AWS_REGION,
|
|
158
|
-
credentials: {
|
|
159
|
-
accessKeyId: process.env.BEDROCK_AWS_ACCESS_KEY_ID!,
|
|
160
|
-
secretAccessKey: process.env.BEDROCK_AWS_SECRET_ACCESS_KEY!,
|
|
161
|
-
},
|
|
162
|
-
streaming: true,
|
|
163
|
-
streamUsage: true,
|
|
164
|
-
},
|
|
165
|
-
perplexity: {
|
|
166
|
-
provider: Providers.OPENAI,
|
|
167
|
-
model: 'llama-3.1-sonar-small-128k-online',
|
|
168
|
-
streaming: true,
|
|
169
|
-
streamUsage: true,
|
|
170
|
-
apiKey: process.env.PERPLEXITY_API_KEY,
|
|
171
|
-
configuration: {
|
|
172
|
-
baseURL: 'https://api.perplexity.ai/',
|
|
173
|
-
},
|
|
174
|
-
},
|
|
175
|
-
};
|
|
176
|
-
|
|
177
|
-
export function getLLMConfig(provider: string): t.LLMConfig {
|
|
178
|
-
const config = llmConfigs[provider];
|
|
179
|
-
if (config === undefined) {
|
|
180
|
-
throw new Error(`Unsupported provider: ${provider}`);
|
|
181
|
-
}
|
|
182
|
-
return config;
|
|
183
|
-
}
|
|
1
|
+
// src/utils/llmConfig.ts
|
|
2
|
+
import { Providers } from '@/common';
|
|
3
|
+
import type * as or from '@/llm/openrouter';
|
|
4
|
+
import type * as t from '@/types';
|
|
5
|
+
|
|
6
|
+
export const llmConfigs: Record<string, t.LLMConfig | undefined> = {
|
|
7
|
+
[Providers.OPENAI]: {
|
|
8
|
+
provider: Providers.OPENAI,
|
|
9
|
+
model: 'gpt-4.1',
|
|
10
|
+
// temperature: 0.7,
|
|
11
|
+
streaming: true,
|
|
12
|
+
streamUsage: true,
|
|
13
|
+
// disableStreaming: true,
|
|
14
|
+
},
|
|
15
|
+
anthropicLITELLM: {
|
|
16
|
+
provider: Providers.OPENAI,
|
|
17
|
+
streaming: true,
|
|
18
|
+
streamUsage: false,
|
|
19
|
+
apiKey: 'sk-1234',
|
|
20
|
+
model: 'claude-sonnet-4',
|
|
21
|
+
maxTokens: 8192,
|
|
22
|
+
modelKwargs: {
|
|
23
|
+
metadata: {
|
|
24
|
+
user_id: 'some_user_id',
|
|
25
|
+
},
|
|
26
|
+
thinking: {
|
|
27
|
+
type: 'enabled',
|
|
28
|
+
budget_tokens: 2000,
|
|
29
|
+
},
|
|
30
|
+
},
|
|
31
|
+
configuration: {
|
|
32
|
+
baseURL: 'http://host.docker.internal:4000/v1',
|
|
33
|
+
defaultHeaders: {
|
|
34
|
+
'anthropic-beta': 'prompt-caching-2024-07-31,context-1m-2025-08-07',
|
|
35
|
+
},
|
|
36
|
+
},
|
|
37
|
+
// disableStreaming: true,
|
|
38
|
+
},
|
|
39
|
+
[Providers.XAI]: {
|
|
40
|
+
provider: Providers.XAI,
|
|
41
|
+
model: 'grok-2-latest',
|
|
42
|
+
streaming: true,
|
|
43
|
+
streamUsage: true,
|
|
44
|
+
},
|
|
45
|
+
alibaba: {
|
|
46
|
+
provider: Providers.OPENAI,
|
|
47
|
+
streaming: true,
|
|
48
|
+
streamUsage: true,
|
|
49
|
+
model: 'qwen-max',
|
|
50
|
+
openAIApiKey: process.env.ALIBABA_API_KEY,
|
|
51
|
+
configuration: {
|
|
52
|
+
baseURL: 'https://dashscope-intl.aliyuncs.com/compatible-mode/v1',
|
|
53
|
+
},
|
|
54
|
+
},
|
|
55
|
+
[Providers.OPENROUTER]: {
|
|
56
|
+
provider: Providers.OPENROUTER,
|
|
57
|
+
streaming: true,
|
|
58
|
+
streamUsage: true,
|
|
59
|
+
model: 'openai/gpt-4.1',
|
|
60
|
+
openAIApiKey: process.env.OPENROUTER_API_KEY,
|
|
61
|
+
configuration: {
|
|
62
|
+
baseURL: process.env.OPENROUTER_BASE_URL,
|
|
63
|
+
defaultHeaders: {
|
|
64
|
+
'HTTP-Referer': 'https://illuma.ai',
|
|
65
|
+
'X-Title': 'Illuma',
|
|
66
|
+
},
|
|
67
|
+
},
|
|
68
|
+
include_reasoning: true,
|
|
69
|
+
} as or.ChatOpenRouterCallOptions & t.LLMConfig,
|
|
70
|
+
[Providers.AZURE]: {
|
|
71
|
+
provider: Providers.AZURE,
|
|
72
|
+
temperature: 0.7,
|
|
73
|
+
streaming: true,
|
|
74
|
+
streamUsage: true,
|
|
75
|
+
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
|
|
76
|
+
azureOpenAIApiInstanceName: process.env.AZURE_OPENAI_API_INSTANCE,
|
|
77
|
+
azureOpenAIApiDeploymentName: process.env.AZURE_OPENAI_API_DEPLOYMENT,
|
|
78
|
+
azureOpenAIApiVersion: process.env.AZURE_OPENAI_API_VERSION,
|
|
79
|
+
model: process.env.AZURE_MODEL_NAME ?? 'gpt-4o',
|
|
80
|
+
},
|
|
81
|
+
[Providers.OLLAMA]: {
|
|
82
|
+
provider: Providers.OLLAMA,
|
|
83
|
+
model: 'gpt-oss:20b',
|
|
84
|
+
streaming: true,
|
|
85
|
+
streamUsage: true,
|
|
86
|
+
baseUrl: 'http://localhost:11434',
|
|
87
|
+
},
|
|
88
|
+
lmstudio: {
|
|
89
|
+
provider: Providers.OPENAI,
|
|
90
|
+
model: 'gpt-oss-20b',
|
|
91
|
+
streaming: true,
|
|
92
|
+
streamUsage: true,
|
|
93
|
+
configuration: {
|
|
94
|
+
baseURL: 'http://192.168.254.183:1233/v1',
|
|
95
|
+
},
|
|
96
|
+
},
|
|
97
|
+
zhipu: {
|
|
98
|
+
provider: Providers.OPENAI,
|
|
99
|
+
streaming: true,
|
|
100
|
+
streamUsage: false,
|
|
101
|
+
model: 'glm-4.5-air',
|
|
102
|
+
apiKey: process.env.ZHIPU_API_KEY,
|
|
103
|
+
configuration: {
|
|
104
|
+
baseURL: 'https://open.bigmodel.cn/api/paas/v4',
|
|
105
|
+
},
|
|
106
|
+
},
|
|
107
|
+
[Providers.DEEPSEEK]: {
|
|
108
|
+
provider: Providers.DEEPSEEK,
|
|
109
|
+
model: 'deepseek-reasoner',
|
|
110
|
+
streaming: true,
|
|
111
|
+
streamUsage: true,
|
|
112
|
+
},
|
|
113
|
+
[Providers.ANTHROPIC]: {
|
|
114
|
+
provider: Providers.ANTHROPIC,
|
|
115
|
+
model: 'claude-sonnet-4-5',
|
|
116
|
+
streaming: true,
|
|
117
|
+
streamUsage: true,
|
|
118
|
+
},
|
|
119
|
+
// [Providers.MISTRALAI]: {
|
|
120
|
+
// provider: Providers.MISTRALAI,
|
|
121
|
+
// model: 'mistral-large-latest',
|
|
122
|
+
// streaming: true,
|
|
123
|
+
// streamUsage: true,
|
|
124
|
+
// },
|
|
125
|
+
[Providers.MISTRAL]: {
|
|
126
|
+
provider: Providers.OPENAI,
|
|
127
|
+
streaming: true,
|
|
128
|
+
streamUsage: false,
|
|
129
|
+
// model: 'codestral-latest',
|
|
130
|
+
model: 'mistral-large-latest',
|
|
131
|
+
openAIApiKey: process.env.MISTRAL_API_KEY,
|
|
132
|
+
configuration: {
|
|
133
|
+
baseURL: 'https://api.mistral.ai/v1',
|
|
134
|
+
defaultHeaders: {},
|
|
135
|
+
},
|
|
136
|
+
},
|
|
137
|
+
[Providers.VERTEXAI]: {
|
|
138
|
+
provider: Providers.VERTEXAI,
|
|
139
|
+
model: 'gemini-2.5-flash',
|
|
140
|
+
streaming: true,
|
|
141
|
+
streamUsage: true,
|
|
142
|
+
keyFile: process.env.VERTEXAI_KEY_FILE,
|
|
143
|
+
} as t.VertexAIClientOptions & t.LLMConfig,
|
|
144
|
+
[Providers.GOOGLE]: {
|
|
145
|
+
provider: Providers.GOOGLE,
|
|
146
|
+
model: 'gemini-2.5-flash',
|
|
147
|
+
streaming: true,
|
|
148
|
+
streamUsage: true,
|
|
149
|
+
},
|
|
150
|
+
[Providers.BEDROCK]: {
|
|
151
|
+
provider: Providers.BEDROCK,
|
|
152
|
+
// model: 'anthropic.claude-3-sonnet-20240229-v1:0',
|
|
153
|
+
// model: 'us.anthropic.claude-3-5-sonnet-20241022-v2:0',
|
|
154
|
+
// model: 'us.amazon.nova-pro-v1:0',
|
|
155
|
+
model: 'us.anthropic.claude-sonnet-4-20250514-v1:0',
|
|
156
|
+
// additionalModelRequestFields: { thinking: { type: 'enabled', budget_tokens: 2000 } },
|
|
157
|
+
region: process.env.BEDROCK_AWS_REGION,
|
|
158
|
+
credentials: {
|
|
159
|
+
accessKeyId: process.env.BEDROCK_AWS_ACCESS_KEY_ID!,
|
|
160
|
+
secretAccessKey: process.env.BEDROCK_AWS_SECRET_ACCESS_KEY!,
|
|
161
|
+
},
|
|
162
|
+
streaming: true,
|
|
163
|
+
streamUsage: true,
|
|
164
|
+
},
|
|
165
|
+
perplexity: {
|
|
166
|
+
provider: Providers.OPENAI,
|
|
167
|
+
model: 'llama-3.1-sonar-small-128k-online',
|
|
168
|
+
streaming: true,
|
|
169
|
+
streamUsage: true,
|
|
170
|
+
apiKey: process.env.PERPLEXITY_API_KEY,
|
|
171
|
+
configuration: {
|
|
172
|
+
baseURL: 'https://api.perplexity.ai/',
|
|
173
|
+
},
|
|
174
|
+
},
|
|
175
|
+
};
|
|
176
|
+
|
|
177
|
+
export function getLLMConfig(provider: string): t.LLMConfig {
|
|
178
|
+
const config = llmConfigs[provider];
|
|
179
|
+
if (config === undefined) {
|
|
180
|
+
throw new Error(`Unsupported provider: ${provider}`);
|
|
181
|
+
}
|
|
182
|
+
return config;
|
|
183
|
+
}
|