@upstash/workflow 0.2.5-agents → 0.2.5-agents-3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,6 @@ import { PublishRequest, Client, Receiver, HTTPMethods as HTTPMethods$1 } from '
2
2
  import * as ai from 'ai';
3
3
  import { CoreTool, generateText } from 'ai';
4
4
  import * as _ai_sdk_openai from '@ai-sdk/openai';
5
- import { Tool } from 'langchain/tools';
6
5
 
7
6
  /**
8
7
  * Base class outlining steps. Basically, each step kind (run/sleep/sleepUntil)
@@ -391,53 +390,156 @@ declare class WorkflowApi extends BaseWorkflowApi {
391
390
  get anthropic(): AnthropicAPI;
392
391
  }
393
392
 
393
+ /**
394
+ * An Agent which utilizes the model and tools available to it
395
+ * to achieve a given task
396
+ *
397
+ * @param name Name of the agent
398
+ * @param background Background of the agent
399
+ * @param model LLM model to use
400
+ * @param tools tools available to the agent
401
+ * @param maxSteps number of times the agent can call the LLM at most. If
402
+ * the agent abruptly stops execution after calling tools, you may need
403
+ * to increase maxSteps
404
+ * @param temparature temparature used when calling the LLM
405
+ */
394
406
  declare class Agent {
395
407
  readonly name: AgentParameters["name"];
396
408
  readonly tools: AgentParameters["tools"];
397
409
  readonly maxSteps: AgentParameters["maxSteps"];
398
410
  readonly background: AgentParameters["background"];
399
411
  readonly model: AgentParameters["model"];
400
- constructor({ tools, maxSteps, background, name, model }: AgentParameters);
412
+ readonly temparature: AgentParameters["temparature"];
413
+ private readonly context;
414
+ constructor({ tools, maxSteps, background, name, model, temparature }: AgentParameters, context: WorkflowContext);
415
+ /**
416
+ * Trigger the agent by passing a prompt
417
+ *
418
+ * @param prompt task to assign to the agent
419
+ * @returns Response as `{ text: string }`
420
+ */
401
421
  call({ prompt }: {
402
422
  prompt: string;
403
- }): Promise<ai.GenerateTextResult<Record<string, AISDKTool>, never>>;
423
+ }): Promise<{
424
+ text: string;
425
+ }>;
426
+ /**
427
+ * Convert the agent to a tool which can be used by other agents.
428
+ *
429
+ * @returns the agent as a tool
430
+ */
404
431
  asTool(): AISDKTool;
405
432
  }
406
- type ManagerAgentParameters = {
407
- agents: Agent[];
408
- model: Model;
409
- } & Pick<Partial<AgentParameters>, "name" | "background"> & Pick<AgentParameters, "maxSteps">;
410
- declare class ManagerAgent extends Agent {
411
- agents: ManagerAgentParameters["agents"];
412
- constructor({ maxSteps, background, agents, model, name, }: ManagerAgentParameters);
413
- }
414
433
 
415
434
  type AISDKTool = CoreTool;
416
- type LangchainTool = Tool;
435
+ type LangchainTool = {
436
+ description: string;
437
+ schema: AISDKTool["parameters"];
438
+ invoke: (...params: any[]) => any;
439
+ };
417
440
  type GenerateTextParams = Parameters<typeof generateText>[0];
418
441
  type Model = GenerateTextParams["model"];
419
442
  type AgentParameters<TTool extends AISDKTool | LangchainTool = AISDKTool> = {
443
+ /**
444
+ * number of times the agent can call the LLM at most. If
445
+ * the agent abruptly stops execution after calling tools, you may need
446
+ * to increase maxSteps
447
+ */
420
448
  maxSteps: number;
449
+ /**
450
+ * Background of the agent
451
+ */
421
452
  background: string;
453
+ /**
454
+ * tools available to the agent
455
+ */
422
456
  tools: Record<string, TTool>;
457
+ /**
458
+ * Name of the agent
459
+ */
423
460
  name: string;
461
+ /**
462
+ * LLM model to use
463
+ */
424
464
  model: Model;
465
+ /**
466
+ * temparature used when calling the LLM
467
+ *
468
+ * @default 0.1
469
+ */
470
+ temparature?: number;
425
471
  };
426
472
  type TaskParams = {
473
+ /**
474
+ * task assigned to the agent
475
+ */
427
476
  prompt: string;
428
477
  };
429
478
  type SingleAgentTaskParams = TaskParams & {
479
+ /**
480
+ * agent to perform the task
481
+ */
430
482
  agent: Agent;
431
483
  };
432
484
  type MultiAgentTaskParams = TaskParams & {
485
+ /**
486
+ * Agents which will collaborate to achieve the task
487
+ */
433
488
  agents: Agent[];
489
+ /**
490
+ * number of times the manager agent can call the LLM at most.
491
+ * If the agent abruptly stops execution after calling other agents, you may
492
+ * need to increase maxSteps
493
+ */
434
494
  maxSteps: number;
495
+ /**
496
+ * LLM model to use
497
+ */
435
498
  model: Model;
499
+ /**
500
+ * Background of the agent. If not passed, default will be used.
501
+ */
436
502
  background?: string;
437
503
  };
438
504
 
505
+ /**
506
+ * creates an AI SDK openai client with a custom
507
+ * fetch implementation which uses context.call.
508
+ *
509
+ * @param context workflow context
510
+ * @returns ai sdk openai
511
+ */
439
512
  declare const createWorkflowOpenAI: (context: WorkflowContext) => _ai_sdk_openai.OpenAIProvider;
440
513
 
514
+ /**
515
+ * An Agent Task
516
+ *
517
+ * Can be run to make the agent(s) complete it using the tools available to them
518
+ *
519
+ * Can consist of a single agent or multiple agents.
520
+ *
521
+ * Single agent:
522
+ *
523
+ * ```ts
524
+ * const task = context.agents.task({
525
+ * agent: researcherAgent,
526
+ * prompt: "Tell me about 5 topics in advanced physics.",
527
+ * });
528
+ * const { text } = await task.run();
529
+ * ```
530
+ *
531
+ * Multi Agent:
532
+ *
533
+ * ```ts
534
+ * const task = context.agents.task({
535
+ * model,
536
+ * maxSteps: 3,
537
+ * agents: [researcherAgent, mathAgent],
538
+ * prompt: "Tell me about 3 cities in Japan and calculate the sum of their populations",
539
+ * });
540
+ * const { text } = await task.run();
541
+ * ```
542
+ */
441
543
  declare class Task {
442
544
  private readonly context;
443
545
  private readonly taskParameters;
@@ -445,19 +547,80 @@ declare class Task {
445
547
  context: WorkflowContext;
446
548
  taskParameters: SingleAgentTaskParams | MultiAgentTaskParams;
447
549
  });
550
+ /**
551
+ * Run the agents to complete the task
552
+ *
553
+ * @returns Result of the task as { text: string }
554
+ */
448
555
  run(): Promise<{
449
556
  text: string;
450
557
  }>;
451
558
  }
452
559
 
560
+ /**
561
+ * Workflow Agents API
562
+ *
563
+ * https://upstash.com/docs/workflow/agents/overview
564
+ *
565
+ * Allows defining agents which can complete a given task
566
+ * using tools available to them.
567
+ */
453
568
  declare class WorkflowAgents {
454
569
  private context;
455
570
  constructor({ context }: {
456
571
  context: WorkflowContext;
457
572
  });
573
+ /**
574
+ * Defines an agent
575
+ *
576
+ * ```ts
577
+ * const researcherAgent = context.agents.agent({
578
+ * model,
579
+ * name: 'academic',
580
+ * maxSteps: 2,
581
+ * tools: {
582
+ * wikiTool: new WikipediaQueryRun({
583
+ * topKResults: 1,
584
+ * maxDocContentLength: 500,
585
+ * })
586
+ * },
587
+ * background:
588
+ * 'You are researcher agent with access to Wikipedia. ' +
589
+ * 'Utilize Wikipedia as much as possible for correct information',
590
+ * });
591
+ * ```
592
+ *
593
+ * @param params agent parameters
594
+ * @returns
595
+ */
458
596
  agent(params: AgentParameters<AISDKTool | LangchainTool>): Agent;
597
+ /**
598
+ * Defines a task to be executed by a single agent
599
+ *
600
+ * ```ts
601
+ * const task = context.agents.task({
602
+ * agent: researcherAgent,
603
+ * prompt: "Tell me about 5 topics in advanced physics.",
604
+ * });
605
+ * ```
606
+ */
459
607
  task(taskParameters: SingleAgentTaskParams): Task;
608
+ /**
609
+ * Defines a task to be executed by multiple collaborating agents
610
+ *
611
+ * ```ts
612
+ * const task = context.agents.task({
613
+ * model,
614
+ * maxSteps: 3,
615
+ * agents: [researcherAgent, mathAgent],
616
+ * prompt: "Tell me about 3 cities in Japan and calculate the sum of their populations",
617
+ * });
618
+ * ```
619
+ */
460
620
  task(taskParameters: MultiAgentTaskParams): Task;
621
+ /**
622
+ * creates an openai model for agents
623
+ */
461
624
  openai(...params: Parameters<ReturnType<typeof createWorkflowOpenAI>>): ai.LanguageModelV1;
462
625
  }
463
626
 
@@ -1132,4 +1295,4 @@ type HeaderParams = {
1132
1295
  callTimeout?: never;
1133
1296
  });
1134
1297
 
1135
- export { type AsyncStepFunction as A, type CallResponse as C, type Duration as D, type FinishCondition as F, type HeaderParams as H, type LogLevel as L, ManagerAgent as M, type NotifyResponse as N, type ParallelCallState as P, type RouteFunction as R, type Step as S, type Telemetry as T, type WorkflowServeOptions as W, type Waiter as a, WorkflowContext as b, type WorkflowClient as c, type WorkflowReceiver as d, StepTypes as e, type StepType as f, type RawStep as g, type SyncStepFunction as h, type StepFunction as i, type PublicServeOptions as j, type FailureFunctionPayload as k, type RequiredExceptFields as l, type WaitRequest as m, type WaitStepResponse as n, type NotifyStepResponse as o, type WaitEventOptions as p, type CallSettings as q, type WorkflowLoggerOptions as r, WorkflowLogger as s, WorkflowAgents as t, createWorkflowOpenAI as u, Agent as v };
1298
+ export { type AsyncStepFunction as A, type CallResponse as C, type Duration as D, type FinishCondition as F, type HeaderParams as H, type LogLevel as L, type NotifyResponse as N, type ParallelCallState as P, type RouteFunction as R, type Step as S, type Telemetry as T, type WorkflowServeOptions as W, type Waiter as a, WorkflowContext as b, type WorkflowClient as c, type WorkflowReceiver as d, StepTypes as e, type StepType as f, type RawStep as g, type SyncStepFunction as h, type StepFunction as i, type PublicServeOptions as j, type FailureFunctionPayload as k, type RequiredExceptFields as l, type WaitRequest as m, type WaitStepResponse as n, type NotifyStepResponse as o, type WaitEventOptions as p, type CallSettings as q, type WorkflowLoggerOptions as r, WorkflowLogger as s };
@@ -2,7 +2,6 @@ import { PublishRequest, Client, Receiver, HTTPMethods as HTTPMethods$1 } from '
2
2
  import * as ai from 'ai';
3
3
  import { CoreTool, generateText } from 'ai';
4
4
  import * as _ai_sdk_openai from '@ai-sdk/openai';
5
- import { Tool } from 'langchain/tools';
6
5
 
7
6
  /**
8
7
  * Base class outlining steps. Basically, each step kind (run/sleep/sleepUntil)
@@ -391,53 +390,156 @@ declare class WorkflowApi extends BaseWorkflowApi {
391
390
  get anthropic(): AnthropicAPI;
392
391
  }
393
392
 
393
+ /**
394
+ * An Agent which utilizes the model and tools available to it
395
+ * to achieve a given task
396
+ *
397
+ * @param name Name of the agent
398
+ * @param background Background of the agent
399
+ * @param model LLM model to use
400
+ * @param tools tools available to the agent
401
+ * @param maxSteps number of times the agent can call the LLM at most. If
402
+ * the agent abruptly stops execution after calling tools, you may need
403
+ * to increase maxSteps
404
+ * @param temparature temparature used when calling the LLM
405
+ */
394
406
  declare class Agent {
395
407
  readonly name: AgentParameters["name"];
396
408
  readonly tools: AgentParameters["tools"];
397
409
  readonly maxSteps: AgentParameters["maxSteps"];
398
410
  readonly background: AgentParameters["background"];
399
411
  readonly model: AgentParameters["model"];
400
- constructor({ tools, maxSteps, background, name, model }: AgentParameters);
412
+ readonly temparature: AgentParameters["temparature"];
413
+ private readonly context;
414
+ constructor({ tools, maxSteps, background, name, model, temparature }: AgentParameters, context: WorkflowContext);
415
+ /**
416
+ * Trigger the agent by passing a prompt
417
+ *
418
+ * @param prompt task to assign to the agent
419
+ * @returns Response as `{ text: string }`
420
+ */
401
421
  call({ prompt }: {
402
422
  prompt: string;
403
- }): Promise<ai.GenerateTextResult<Record<string, AISDKTool>, never>>;
423
+ }): Promise<{
424
+ text: string;
425
+ }>;
426
+ /**
427
+ * Convert the agent to a tool which can be used by other agents.
428
+ *
429
+ * @returns the agent as a tool
430
+ */
404
431
  asTool(): AISDKTool;
405
432
  }
406
- type ManagerAgentParameters = {
407
- agents: Agent[];
408
- model: Model;
409
- } & Pick<Partial<AgentParameters>, "name" | "background"> & Pick<AgentParameters, "maxSteps">;
410
- declare class ManagerAgent extends Agent {
411
- agents: ManagerAgentParameters["agents"];
412
- constructor({ maxSteps, background, agents, model, name, }: ManagerAgentParameters);
413
- }
414
433
 
415
434
  type AISDKTool = CoreTool;
416
- type LangchainTool = Tool;
435
+ type LangchainTool = {
436
+ description: string;
437
+ schema: AISDKTool["parameters"];
438
+ invoke: (...params: any[]) => any;
439
+ };
417
440
  type GenerateTextParams = Parameters<typeof generateText>[0];
418
441
  type Model = GenerateTextParams["model"];
419
442
  type AgentParameters<TTool extends AISDKTool | LangchainTool = AISDKTool> = {
443
+ /**
444
+ * number of times the agent can call the LLM at most. If
445
+ * the agent abruptly stops execution after calling tools, you may need
446
+ * to increase maxSteps
447
+ */
420
448
  maxSteps: number;
449
+ /**
450
+ * Background of the agent
451
+ */
421
452
  background: string;
453
+ /**
454
+ * tools available to the agent
455
+ */
422
456
  tools: Record<string, TTool>;
457
+ /**
458
+ * Name of the agent
459
+ */
423
460
  name: string;
461
+ /**
462
+ * LLM model to use
463
+ */
424
464
  model: Model;
465
+ /**
466
+ * temparature used when calling the LLM
467
+ *
468
+ * @default 0.1
469
+ */
470
+ temparature?: number;
425
471
  };
426
472
  type TaskParams = {
473
+ /**
474
+ * task assigned to the agent
475
+ */
427
476
  prompt: string;
428
477
  };
429
478
  type SingleAgentTaskParams = TaskParams & {
479
+ /**
480
+ * agent to perform the task
481
+ */
430
482
  agent: Agent;
431
483
  };
432
484
  type MultiAgentTaskParams = TaskParams & {
485
+ /**
486
+ * Agents which will collaborate to achieve the task
487
+ */
433
488
  agents: Agent[];
489
+ /**
490
+ * number of times the manager agent can call the LLM at most.
491
+ * If the agent abruptly stops execution after calling other agents, you may
492
+ * need to increase maxSteps
493
+ */
434
494
  maxSteps: number;
495
+ /**
496
+ * LLM model to use
497
+ */
435
498
  model: Model;
499
+ /**
500
+ * Background of the agent. If not passed, default will be used.
501
+ */
436
502
  background?: string;
437
503
  };
438
504
 
505
+ /**
506
+ * creates an AI SDK openai client with a custom
507
+ * fetch implementation which uses context.call.
508
+ *
509
+ * @param context workflow context
510
+ * @returns ai sdk openai
511
+ */
439
512
  declare const createWorkflowOpenAI: (context: WorkflowContext) => _ai_sdk_openai.OpenAIProvider;
440
513
 
514
+ /**
515
+ * An Agent Task
516
+ *
517
+ * Can be run to make the agent(s) complete it using the tools available to them
518
+ *
519
+ * Can consist of a single agent or multiple agents.
520
+ *
521
+ * Single agent:
522
+ *
523
+ * ```ts
524
+ * const task = context.agents.task({
525
+ * agent: researcherAgent,
526
+ * prompt: "Tell me about 5 topics in advanced physics.",
527
+ * });
528
+ * const { text } = await task.run();
529
+ * ```
530
+ *
531
+ * Multi Agent:
532
+ *
533
+ * ```ts
534
+ * const task = context.agents.task({
535
+ * model,
536
+ * maxSteps: 3,
537
+ * agents: [researcherAgent, mathAgent],
538
+ * prompt: "Tell me about 3 cities in Japan and calculate the sum of their populations",
539
+ * });
540
+ * const { text } = await task.run();
541
+ * ```
542
+ */
441
543
  declare class Task {
442
544
  private readonly context;
443
545
  private readonly taskParameters;
@@ -445,19 +547,80 @@ declare class Task {
445
547
  context: WorkflowContext;
446
548
  taskParameters: SingleAgentTaskParams | MultiAgentTaskParams;
447
549
  });
550
+ /**
551
+ * Run the agents to complete the task
552
+ *
553
+ * @returns Result of the task as { text: string }
554
+ */
448
555
  run(): Promise<{
449
556
  text: string;
450
557
  }>;
451
558
  }
452
559
 
560
+ /**
561
+ * Workflow Agents API
562
+ *
563
+ * https://upstash.com/docs/workflow/agents/overview
564
+ *
565
+ * Allows defining agents which can complete a given task
566
+ * using tools available to them.
567
+ */
453
568
  declare class WorkflowAgents {
454
569
  private context;
455
570
  constructor({ context }: {
456
571
  context: WorkflowContext;
457
572
  });
573
+ /**
574
+ * Defines an agent
575
+ *
576
+ * ```ts
577
+ * const researcherAgent = context.agents.agent({
578
+ * model,
579
+ * name: 'academic',
580
+ * maxSteps: 2,
581
+ * tools: {
582
+ * wikiTool: new WikipediaQueryRun({
583
+ * topKResults: 1,
584
+ * maxDocContentLength: 500,
585
+ * })
586
+ * },
587
+ * background:
588
+ * 'You are researcher agent with access to Wikipedia. ' +
589
+ * 'Utilize Wikipedia as much as possible for correct information',
590
+ * });
591
+ * ```
592
+ *
593
+ * @param params agent parameters
594
+ * @returns
595
+ */
458
596
  agent(params: AgentParameters<AISDKTool | LangchainTool>): Agent;
597
+ /**
598
+ * Defines a task to be executed by a single agent
599
+ *
600
+ * ```ts
601
+ * const task = context.agents.task({
602
+ * agent: researcherAgent,
603
+ * prompt: "Tell me about 5 topics in advanced physics.",
604
+ * });
605
+ * ```
606
+ */
459
607
  task(taskParameters: SingleAgentTaskParams): Task;
608
+ /**
609
+ * Defines a task to be executed by multiple collaborating agents
610
+ *
611
+ * ```ts
612
+ * const task = context.agents.task({
613
+ * model,
614
+ * maxSteps: 3,
615
+ * agents: [researcherAgent, mathAgent],
616
+ * prompt: "Tell me about 3 cities in Japan and calculate the sum of their populations",
617
+ * });
618
+ * ```
619
+ */
460
620
  task(taskParameters: MultiAgentTaskParams): Task;
621
+ /**
622
+ * creates an openai model for agents
623
+ */
461
624
  openai(...params: Parameters<ReturnType<typeof createWorkflowOpenAI>>): ai.LanguageModelV1;
462
625
  }
463
626
 
@@ -1132,4 +1295,4 @@ type HeaderParams = {
1132
1295
  callTimeout?: never;
1133
1296
  });
1134
1297
 
1135
- export { type AsyncStepFunction as A, type CallResponse as C, type Duration as D, type FinishCondition as F, type HeaderParams as H, type LogLevel as L, ManagerAgent as M, type NotifyResponse as N, type ParallelCallState as P, type RouteFunction as R, type Step as S, type Telemetry as T, type WorkflowServeOptions as W, type Waiter as a, WorkflowContext as b, type WorkflowClient as c, type WorkflowReceiver as d, StepTypes as e, type StepType as f, type RawStep as g, type SyncStepFunction as h, type StepFunction as i, type PublicServeOptions as j, type FailureFunctionPayload as k, type RequiredExceptFields as l, type WaitRequest as m, type WaitStepResponse as n, type NotifyStepResponse as o, type WaitEventOptions as p, type CallSettings as q, type WorkflowLoggerOptions as r, WorkflowLogger as s, WorkflowAgents as t, createWorkflowOpenAI as u, Agent as v };
1298
+ export { type AsyncStepFunction as A, type CallResponse as C, type Duration as D, type FinishCondition as F, type HeaderParams as H, type LogLevel as L, type NotifyResponse as N, type ParallelCallState as P, type RouteFunction as R, type Step as S, type Telemetry as T, type WorkflowServeOptions as W, type Waiter as a, WorkflowContext as b, type WorkflowClient as c, type WorkflowReceiver as d, StepTypes as e, type StepType as f, type RawStep as g, type SyncStepFunction as h, type StepFunction as i, type PublicServeOptions as j, type FailureFunctionPayload as k, type RequiredExceptFields as l, type WaitRequest as m, type WaitStepResponse as n, type NotifyStepResponse as o, type WaitEventOptions as p, type CallSettings as q, type WorkflowLoggerOptions as r, WorkflowLogger as s };
package/agents.d.mts DELETED
@@ -1,5 +0,0 @@
1
- import 'ai';
2
- export { v as Agent, M as ManagerAgent, t as WorkflowAgents, u as createWorkflowOpenAI } from './types-BEyIoCRe.mjs';
3
- import '@upstash/qstash';
4
- import '@ai-sdk/openai';
5
- import 'langchain/tools';
package/agents.d.ts DELETED
@@ -1,5 +0,0 @@
1
- import 'ai';
2
- export { v as Agent, M as ManagerAgent, t as WorkflowAgents, u as createWorkflowOpenAI } from './types-BEyIoCRe.js';
3
- import '@upstash/qstash';
4
- import '@ai-sdk/openai';
5
- import 'langchain/tools';