@looopy-ai/core 2.1.8 → 2.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -16,7 +16,7 @@ export interface AgentConfig<AuthContext> {
16
16
  agentStore?: AgentStore;
17
17
  autoCompact?: boolean;
18
18
  maxMessages?: number;
19
- systemPrompt?: SystemPromptProp;
19
+ systemPrompt?: SystemPromptProp<AuthContext>;
20
20
  skillRegistry?: SkillRegistry;
21
21
  logger?: import('pino').Logger;
22
22
  }
@@ -2,4 +2,4 @@ import { type Observable } from 'rxjs';
2
2
  import type { ContextAnyEvent } from '../types/event';
3
3
  import type { Message } from '../types/message';
4
4
  import type { IterationConfig, LoopContext } from './types';
5
- export declare const runIteration: <AuthContext>(context: LoopContext<AuthContext>, config: IterationConfig, history: Message[]) => Observable<ContextAnyEvent>;
5
+ export declare const runIteration: <AuthContext>(context: LoopContext<AuthContext>, config: IterationConfig<AuthContext>, history: Message[]) => Observable<ContextAnyEvent>;
@@ -19,7 +19,10 @@ export const runIteration = (context, config, history) => {
19
19
  return { messages, tools, systemPrompt };
20
20
  }).pipe(mergeMap(({ messages, tools, systemPrompt }) => {
21
21
  const { tapFinish: finishLLMCallSpan } = startLLMCallSpan({ ...context, parentContext: iterationContext }, systemPrompt, messages, tools);
22
- return config.llmProvider
22
+ const llmProvider = typeof config.llmProvider === 'function'
23
+ ? config.llmProvider(context, systemPrompt?.metadata)
24
+ : config.llmProvider;
25
+ return llmProvider
23
26
  .call({
24
27
  messages,
25
28
  tools,
@@ -10,7 +10,7 @@ export type AgentContext<AuthContext> = {
10
10
  authContext?: AuthContext;
11
11
  toolProviders: ToolProvider<AuthContext>[];
12
12
  logger: pino.Logger;
13
- systemPrompt?: SystemPromptProp;
13
+ systemPrompt?: SystemPromptProp<AuthContext>;
14
14
  skillRegistry?: SkillRegistry;
15
15
  metadata?: Record<string, unknown>;
16
16
  };
@@ -25,7 +25,7 @@ export type LoopConfig = {
25
25
  maxIterations: number;
26
26
  stopOnToolError: boolean;
27
27
  };
28
- export type IterationConfig = {
29
- llmProvider: LLMProvider;
28
+ export type IterationConfig<AuthContext> = {
29
+ llmProvider: LLMProvider | ((context: LoopContext<AuthContext>, systemPromptMetadata: Record<string, unknown> | undefined) => LLMProvider);
30
30
  iterationNumber: number;
31
31
  };
@@ -23,6 +23,7 @@ export declare class LiteLLMProvider implements LLMProvider {
23
23
  messages: Message[];
24
24
  tools?: ToolDefinition[];
25
25
  sessionId?: string;
26
+ metadata?: Record<string, unknown>;
26
27
  }): Observable<AnyEvent>;
27
28
  private debugLogRawChunk;
28
29
  private debugLog;
@@ -20,9 +20,9 @@ export class LiteLLMProvider {
20
20
  constructor(config) {
21
21
  this.config = {
22
22
  ...config,
23
- temperature: config.temperature ?? 0.7,
23
+ temperature: config.temperature,
24
24
  maxTokens: config.maxTokens ?? 4096,
25
- topP: config.topP ?? 1.0,
25
+ topP: config.topP,
26
26
  timeout: config.timeout ?? 60000,
27
27
  extraParams: config.extraParams ?? {},
28
28
  };
@@ -205,13 +205,12 @@ export class LiteLLMProvider {
205
205
  stream: true,
206
206
  stream_options: { include_usage: true },
207
207
  ...this.config.extraParams,
208
+ metadata: {
209
+ ...(this.config.extraParams.metadata || {}),
210
+ ...request.metadata,
211
+ ...(request.sessionId ? { session_id: request.sessionId } : {}),
212
+ },
208
213
  };
209
- if (request.sessionId) {
210
- litellmRequest.metadata = {
211
- ...(litellmRequest.metadata || {}),
212
- session_id: request.sessionId,
213
- };
214
- }
215
214
  if (request.tools && request.tools.length > 0) {
216
215
  litellmRequest.tools = request.tools.map((tool) => ({
217
216
  type: 'function',
@@ -300,6 +299,7 @@ export const LiteLLM = {
300
299
  apiKey,
301
300
  temperature: 0.7,
302
301
  maxTokens: 8192,
302
+ topP: 1.0,
303
303
  });
304
304
  },
305
305
  novaLite(baseUrl, apiKey, debugLogPath) {
@@ -310,6 +310,7 @@ export const LiteLLM = {
310
310
  temperature: 0.7,
311
311
  maxTokens: 8192,
312
312
  debugLogPath,
313
+ topP: 1.0,
313
314
  });
314
315
  },
315
316
  gpt4(baseUrl, apiKey) {
@@ -372,6 +373,7 @@ export const LiteLLM = {
372
373
  model: `ollama/${model}`,
373
374
  temperature: 0.7,
374
375
  maxTokens: 2048,
376
+ topP: 1.0,
375
377
  });
376
378
  },
377
379
  };
@@ -3,6 +3,7 @@ export type SystemPrompt = {
3
3
  prompt: string;
4
4
  name?: string;
5
5
  version?: number;
6
+ metadata?: Record<string, unknown>;
6
7
  };
7
- export type SystemPromptProp = string | SystemPrompt | (<AuthContext>(loopContext: LoopContext<AuthContext>) => Promise<SystemPrompt> | SystemPrompt);
8
- export declare const getSystemPrompt: <AuthContext>(systemPrompt: SystemPromptProp | undefined, loopContext: LoopContext<AuthContext>) => Promise<SystemPrompt | undefined>;
8
+ export type SystemPromptProp<AuthContext> = string | SystemPrompt | ((loopContext: LoopContext<AuthContext>) => Promise<SystemPrompt> | SystemPrompt);
9
+ export declare const getSystemPrompt: <AuthContext>(systemPrompt: SystemPromptProp<AuthContext> | undefined, loopContext: LoopContext<AuthContext>) => Promise<SystemPrompt | undefined>;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@looopy-ai/core",
3
- "version": "2.1.8",
3
+ "version": "2.1.9",
4
4
  "description": "RxJS-based AI agent framework",
5
5
  "keywords": [
6
6
  "agent",