@ax-llm/ax 9.0.0 → 9.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +106 -99
- package/build/module/docs/assets/navigation.js +1 -1
- package/build/module/docs/assets/navigation.js.map +1 -1
- package/build/module/docs/assets/search.js +1 -1
- package/build/module/docs/assets/search.js.map +1 -1
- package/build/module/src/ai/anthropic/api.d.ts +13 -19
- package/build/module/src/ai/anthropic/api.js +165 -72
- package/build/module/src/ai/anthropic/api.js.map +1 -1
- package/build/module/src/ai/anthropic/info.d.ts +2 -2
- package/build/module/src/ai/anthropic/info.js +7 -7
- package/build/module/src/ai/anthropic/info.js.map +1 -1
- package/build/module/src/ai/anthropic/types.d.ts +63 -52
- package/build/module/src/ai/anthropic/types.js +8 -12
- package/build/module/src/ai/anthropic/types.js.map +1 -1
- package/build/module/src/ai/azure-openai/api.d.ts +12 -28
- package/build/module/src/ai/azure-openai/api.js +6 -22
- package/build/module/src/ai/azure-openai/api.js.map +1 -1
- package/build/module/src/ai/balance.d.ts +9 -12
- package/build/module/src/ai/balance.js +1 -1
- package/build/module/src/ai/balance.js.map +1 -1
- package/build/module/src/ai/base.d.ts +26 -29
- package/build/module/src/ai/base.js +37 -55
- package/build/module/src/ai/base.js.map +1 -1
- package/build/module/src/ai/cohere/api.d.ts +16 -30
- package/build/module/src/ai/cohere/api.js +26 -32
- package/build/module/src/ai/cohere/api.js.map +1 -1
- package/build/module/src/ai/cohere/info.d.ts +2 -2
- package/build/module/src/ai/cohere/info.js +10 -10
- package/build/module/src/ai/cohere/info.js.map +1 -1
- package/build/module/src/ai/cohere/types.d.ts +13 -13
- package/build/module/src/ai/cohere/types.js +14 -14
- package/build/module/src/ai/cohere/types.js.map +1 -1
- package/build/module/src/ai/deepseek/api.d.ts +10 -22
- package/build/module/src/ai/deepseek/api.js +14 -24
- package/build/module/src/ai/deepseek/api.js.map +1 -1
- package/build/module/src/ai/deepseek/info.d.ts +2 -2
- package/build/module/src/ai/deepseek/info.js +4 -4
- package/build/module/src/ai/deepseek/info.js.map +1 -1
- package/build/module/src/ai/deepseek/types.d.ts +1 -1
- package/build/module/src/ai/deepseek/types.js +5 -5
- package/build/module/src/ai/deepseek/types.js.map +1 -1
- package/build/module/src/ai/google-gemini/api.d.ts +18 -20
- package/build/module/src/ai/google-gemini/api.js +93 -67
- package/build/module/src/ai/google-gemini/api.js.map +1 -1
- package/build/module/src/ai/google-gemini/info.d.ts +3 -3
- package/build/module/src/ai/google-gemini/info.js +6 -6
- package/build/module/src/ai/google-gemini/info.js.map +1 -1
- package/build/module/src/ai/google-gemini/types.d.ts +41 -48
- package/build/module/src/ai/google-gemini/types.js +25 -37
- package/build/module/src/ai/google-gemini/types.js.map +1 -1
- package/build/module/src/ai/groq/api.d.ts +9 -18
- package/build/module/src/ai/groq/api.js +11 -17
- package/build/module/src/ai/groq/api.js.map +1 -1
- package/build/module/src/ai/groq/types.d.ts +6 -0
- package/build/module/src/ai/groq/types.js +8 -0
- package/build/module/src/ai/groq/types.js.map +1 -0
- package/build/module/src/ai/huggingface/api.d.ts +13 -27
- package/build/module/src/ai/huggingface/api.js +12 -24
- package/build/module/src/ai/huggingface/api.js.map +1 -1
- package/build/module/src/ai/huggingface/info.d.ts +2 -2
- package/build/module/src/ai/huggingface/info.js +1 -1
- package/build/module/src/ai/huggingface/info.js.map +1 -1
- package/build/module/src/ai/huggingface/types.d.ts +7 -15
- package/build/module/src/ai/huggingface/types.js +4 -8
- package/build/module/src/ai/huggingface/types.js.map +1 -1
- package/build/module/src/ai/index.d.ts +13 -13
- package/build/module/src/ai/index.js +23 -23
- package/build/module/src/ai/index.js.map +1 -1
- package/build/module/src/ai/mistral/api.d.ts +10 -22
- package/build/module/src/ai/mistral/api.js +14 -24
- package/build/module/src/ai/mistral/api.js.map +1 -1
- package/build/module/src/ai/mistral/info.d.ts +2 -2
- package/build/module/src/ai/mistral/info.js +7 -7
- package/build/module/src/ai/mistral/info.js.map +1 -1
- package/build/module/src/ai/mistral/types.d.ts +2 -10
- package/build/module/src/ai/mistral/types.js +12 -20
- package/build/module/src/ai/mistral/types.js.map +1 -1
- package/build/module/src/ai/ollama/api.d.ts +13 -16
- package/build/module/src/ai/ollama/api.js +17 -13
- package/build/module/src/ai/ollama/api.js.map +1 -1
- package/build/module/src/ai/openai/api.d.ts +19 -40
- package/build/module/src/ai/openai/api.js +90 -135
- package/build/module/src/ai/openai/api.js.map +1 -1
- package/build/module/src/ai/openai/info.d.ts +2 -2
- package/build/module/src/ai/openai/info.js +9 -9
- package/build/module/src/ai/openai/info.js.map +1 -1
- package/build/module/src/ai/openai/types.d.ts +29 -56
- package/build/module/src/ai/openai/types.js +17 -34
- package/build/module/src/ai/openai/types.js.map +1 -1
- package/build/module/src/ai/together/api.d.ts +9 -17
- package/build/module/src/ai/together/api.js +9 -15
- package/build/module/src/ai/together/api.js.map +1 -1
- package/build/module/src/ai/together/info.d.ts +2 -2
- package/build/module/src/ai/together/info.js +1 -1
- package/build/module/src/ai/together/info.js.map +1 -1
- package/build/module/src/ai/types.d.ts +112 -25
- package/build/module/src/ai/util.d.ts +4 -4
- package/build/module/src/ai/util.js +6 -3
- package/build/module/src/ai/util.js.map +1 -1
- package/build/module/src/db/base.d.ts +14 -15
- package/build/module/src/db/base.js +26 -29
- package/build/module/src/db/base.js.map +1 -1
- package/build/module/src/db/cloudflare.d.ts +10 -9
- package/build/module/src/db/cloudflare.js +2 -2
- package/build/module/src/db/cloudflare.js.map +1 -1
- package/build/module/src/db/index.d.ts +6 -7
- package/build/module/src/db/index.js +9 -9
- package/build/module/src/db/index.js.map +1 -1
- package/build/module/src/db/memory.d.ts +13 -11
- package/build/module/src/db/memory.js +2 -2
- package/build/module/src/db/memory.js.map +1 -1
- package/build/module/src/db/pinecone.d.ts +10 -9
- package/build/module/src/db/pinecone.js +2 -2
- package/build/module/src/db/pinecone.js.map +1 -1
- package/build/module/src/db/types.d.ts +9 -9
- package/build/module/src/db/weaviate.d.ts +10 -9
- package/build/module/src/db/weaviate.js +2 -2
- package/build/module/src/db/weaviate.js.map +1 -1
- package/build/module/src/docs/manager.d.ts +16 -16
- package/build/module/src/docs/manager.js +1 -1
- package/build/module/src/docs/manager.js.map +1 -1
- package/build/module/src/docs/reranker.d.ts +6 -6
- package/build/module/src/docs/reranker.js +3 -3
- package/build/module/src/docs/reranker.js.map +1 -1
- package/build/module/src/docs/rewriter.d.ts +3 -3
- package/build/module/src/docs/rewriter.js +2 -2
- package/build/module/src/docs/rewriter.js.map +1 -1
- package/build/module/src/docs/tika.d.ts +5 -8
- package/build/module/src/docs/tika.js +2 -2
- package/build/module/src/docs/tika.js.map +1 -1
- package/build/module/src/dsp/asserts.d.ts +8 -8
- package/build/module/src/dsp/asserts.js +9 -6
- package/build/module/src/dsp/asserts.js.map +1 -1
- package/build/module/src/dsp/eval.d.ts +9 -28
- package/build/module/src/dsp/eval.js +10 -5
- package/build/module/src/dsp/eval.js.map +1 -1
- package/build/module/src/dsp/evaluate.d.ts +8 -8
- package/build/module/src/dsp/evaluate.js +1 -1
- package/build/module/src/dsp/evaluate.js.map +1 -1
- package/build/module/src/dsp/extract.d.ts +6 -6
- package/build/module/src/dsp/extract.js +1 -0
- package/build/module/src/dsp/extract.js.map +1 -1
- package/build/module/src/dsp/generate.d.ts +24 -25
- package/build/module/src/dsp/generate.js +24 -23
- package/build/module/src/dsp/generate.js.map +1 -1
- package/build/module/src/dsp/generate.test.js +4 -4
- package/build/module/src/dsp/generate.test.js.map +1 -1
- package/build/module/src/dsp/index.d.ts +4 -4
- package/build/module/src/dsp/loader.d.ts +5 -5
- package/build/module/src/dsp/loader.js +1 -1
- package/build/module/src/dsp/loader.js.map +1 -1
- package/build/module/src/dsp/optimize.d.ts +12 -12
- package/build/module/src/dsp/optimize.js +5 -5
- package/build/module/src/dsp/optimize.js.map +1 -1
- package/build/module/src/dsp/parser.d.ts +2 -1
- package/build/module/src/dsp/parser.js +178 -73
- package/build/module/src/dsp/parser.js.map +1 -1
- package/build/module/src/dsp/program.d.ts +38 -37
- package/build/module/src/dsp/program.js +4 -53
- package/build/module/src/dsp/program.js.map +1 -1
- package/build/module/src/dsp/prompt.d.ts +36 -13
- package/build/module/src/dsp/prompt.js +123 -51
- package/build/module/src/dsp/prompt.js.map +1 -1
- package/build/module/src/dsp/registry.d.ts +1 -1
- package/build/module/src/dsp/registry.js +1 -1
- package/build/module/src/dsp/registry.js.map +1 -1
- package/build/module/src/dsp/router.d.ts +7 -7
- package/build/module/src/dsp/router.js +4 -4
- package/build/module/src/dsp/router.js.map +1 -1
- package/build/module/src/dsp/sig.d.ts +13 -13
- package/build/module/src/dsp/sig.js +7 -3
- package/build/module/src/dsp/sig.js.map +1 -1
- package/build/module/src/dsp/sig.test.js +4 -1
- package/build/module/src/dsp/sig.test.js.map +1 -1
- package/build/module/src/dsp/stopwords.js +1 -0
- package/build/module/src/dsp/stopwords.js.map +1 -1
- package/build/module/src/dsp/strutil.d.ts +10 -8
- package/build/module/src/dsp/strutil.js +14 -6
- package/build/module/src/dsp/strutil.js.map +1 -1
- package/build/module/src/dsp/util.d.ts +4 -0
- package/build/module/src/dsp/util.js +62 -0
- package/build/module/src/dsp/util.js.map +1 -1
- package/build/module/src/examples/agent.js +7 -5
- package/build/module/src/examples/agent.js.map +1 -1
- package/build/module/src/examples/chain-of-thought.js +5 -3
- package/build/module/src/examples/chain-of-thought.js.map +1 -1
- package/build/module/src/examples/customer-support.js +6 -4
- package/build/module/src/examples/customer-support.js.map +1 -1
- package/build/module/src/examples/fibonacci.js +6 -4
- package/build/module/src/examples/fibonacci.js.map +1 -1
- package/build/module/src/examples/food-search.js +8 -8
- package/build/module/src/examples/food-search.js.map +1 -1
- package/build/module/src/examples/marketing.js +5 -3
- package/build/module/src/examples/marketing.js.map +1 -1
- package/build/module/src/examples/multi-modal.d.ts +1 -0
- package/build/module/src/examples/multi-modal.js +16 -0
- package/build/module/src/examples/multi-modal.js.map +1 -0
- package/build/module/src/examples/qna-tune.js +9 -7
- package/build/module/src/examples/qna-tune.js.map +1 -1
- package/build/module/src/examples/qna-use-tuned.js +8 -6
- package/build/module/src/examples/qna-use-tuned.js.map +1 -1
- package/build/module/src/examples/rag-docs.js +7 -5
- package/build/module/src/examples/rag-docs.js.map +1 -1
- package/build/module/src/examples/rag.js +6 -4
- package/build/module/src/examples/rag.js.map +1 -1
- package/build/module/src/examples/react.js +5 -3
- package/build/module/src/examples/react.js.map +1 -1
- package/build/module/src/examples/routing.js +9 -7
- package/build/module/src/examples/routing.js.map +1 -1
- package/build/module/src/examples/smart-home.d.ts +10 -0
- package/build/module/src/examples/smart-home.js +106 -0
- package/build/module/src/examples/smart-home.js.map +1 -0
- package/build/module/src/examples/streaming1.js +7 -5
- package/build/module/src/examples/streaming1.js.map +1 -1
- package/build/module/src/examples/streaming2.js +9 -7
- package/build/module/src/examples/streaming2.js.map +1 -1
- package/build/module/src/examples/summarize.js +5 -5
- package/build/module/src/examples/summarize.js.map +1 -1
- package/build/module/src/examples/vectordb.js +6 -4
- package/build/module/src/examples/vectordb.js.map +1 -1
- package/build/module/src/funcs/code.d.ts +3 -4
- package/build/module/src/funcs/code.js +15 -15
- package/build/module/src/funcs/code.js.map +1 -1
- package/build/module/src/funcs/embed.d.ts +3 -5
- package/build/module/src/funcs/embed.js +2 -2
- package/build/module/src/funcs/embed.js.map +1 -1
- package/build/module/src/funcs/functions.d.ts +18 -0
- package/build/module/src/funcs/functions.js +55 -0
- package/build/module/src/funcs/functions.js.map +1 -0
- package/build/module/src/funcs/index.d.ts +1 -0
- package/build/module/src/funcs/index.js +1 -0
- package/build/module/src/funcs/index.js.map +1 -1
- package/build/module/src/funcs/jsonschema.d.ts +2 -0
- package/build/module/src/funcs/jsonschema.js +97 -0
- package/build/module/src/funcs/jsonschema.js.map +1 -0
- package/build/module/src/index.d.ts +1 -2
- package/build/module/src/index.js +1 -2
- package/build/module/src/index.js.map +1 -1
- package/build/module/src/mem/index.d.ts +2 -0
- package/build/module/src/mem/index.js +3 -0
- package/build/module/src/mem/index.js.map +1 -0
- package/build/module/src/mem/memory.d.ts +16 -0
- package/build/module/src/mem/memory.js +74 -0
- package/build/module/src/mem/memory.js.map +1 -0
- package/build/module/src/mem/types.d.ts +10 -0
- package/build/module/src/mem/types.js +2 -0
- package/build/module/src/mem/types.js.map +1 -0
- package/build/module/src/prompts/agent.d.ts +14 -15
- package/build/module/src/prompts/agent.js +10 -11
- package/build/module/src/prompts/agent.js.map +1 -1
- package/build/module/src/prompts/cot.d.ts +6 -6
- package/build/module/src/prompts/cot.js +4 -4
- package/build/module/src/prompts/cot.js.map +1 -1
- package/build/module/src/prompts/prompts.test.js +9 -5
- package/build/module/src/prompts/prompts.test.js.map +1 -1
- package/build/module/src/prompts/rag.d.ts +7 -7
- package/build/module/src/prompts/rag.js +6 -6
- package/build/module/src/prompts/rag.js.map +1 -1
- package/build/module/src/prompts/react.d.ts +7 -7
- package/build/module/src/prompts/react.js +4 -4
- package/build/module/src/prompts/react.js.map +1 -1
- package/build/module/src/text/functions.d.ts +3 -20
- package/build/module/src/text/functions.js.map +1 -1
- package/build/module/src/text/jsonschema.d.ts +1 -2
- package/build/module/src/text/memory.d.ts +8 -8
- package/build/module/src/text/memory.js.map +1 -1
- package/build/module/src/text/types.d.ts +14 -14
- package/build/module/src/trace/index.d.ts +50 -50
- package/build/module/src/trace/index.js +30 -30
- package/build/module/src/trace/index.js.map +1 -1
- package/build/module/src/types/index.d.ts +9 -39
- package/build/module/src/types/index.js +1 -1
- package/build/module/src/types/index.js.map +1 -1
- package/build/module/src/util/apicall.d.ts +2 -5
- package/build/module/src/util/apicall.js +4 -3
- package/build/module/src/util/apicall.js.map +1 -1
- package/build/module/src/util/log.js +1 -0
- package/build/module/src/util/log.js.map +1 -1
- package/build/module/src/util/other.js +1 -0
- package/build/module/src/util/other.js.map +1 -1
- package/build/module/src/util/stream.d.ts +0 -1
- package/build/module/src/util/stream.js +1 -0
- package/build/module/src/util/stream.js.map +1 -1
- package/build/module/src/util/transform.d.ts +0 -1
- package/build/module/src/util/transform.js +2 -0
- package/build/module/src/util/transform.js.map +1 -1
- package/package.json +2 -4
package/README.md
CHANGED
|
@@ -1,28 +1,28 @@
|
|
|
1
1
|
# Ax - Build LLMs Powered Agents (Typescript)
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
Build intelligent agents with ease, inspired by the power of "Agentic workflows" and the Stanford DSP paper. Seamlessly integrates with multiple LLMs and VectorDBs to build RAG pipelines or collaborative agents that can solve complex problems. Advanced features streaming validation, multi-modal DSP, etc.
|
|
4
4
|
|
|
5
5
|
[](https://www.npmjs.com/package/@ax-llm/ax)
|
|
6
6
|
[](https://twitter.com/dosco)
|
|
7
7
|
[](https://discord.gg/DSHg3dU7dW)
|
|
8
8
|
|
|
9
|
-

|
|
10
10
|
|
|
11
|
-
##
|
|
11
|
+
## Our focus on agents
|
|
12
12
|
|
|
13
|
-
|
|
13
|
+
We've renamed from "llmclient" to "ax" to highlight our focus on powering agentic workflows. We agree with many experts like "Andrew Ng" that agentic workflows are the key to unlocking the true power of large language models and what can be achieved with in-context learning. Also we are big fans of the Stanford DSP paper and this library is the result of all of this coming together to build a powerful framework for you to build with.
|
|
14
14
|
|
|
15
|
-
|
|
15
|
+

|
|
16
16
|
|
|
17
17
|
## Why use Ax?
|
|
18
18
|
|
|
19
19
|
- Support for various LLMs and Vector DBs
|
|
20
20
|
- Prompts auto-generated from simple signatures
|
|
21
|
-
- Multi-Hop RAG, ReAcT, CoT, Function Calling and more
|
|
22
21
|
- Build Agents that can call other agents
|
|
23
22
|
- Convert docs of any format to text
|
|
24
23
|
- RAG, smart chunking, embedding, querying
|
|
25
|
-
- Output
|
|
24
|
+
- Output validation while streaming
|
|
25
|
+
- Multi-modal DSP supported
|
|
26
26
|
- Automatic prompt tuning using optimizers
|
|
27
27
|
- OpenTelemetry tracing / observability
|
|
28
28
|
- Production ready Typescript code
|
|
@@ -32,7 +32,7 @@ Build powerful workflows using components like RAG, ReAcT, Chain of Thought, Fun
|
|
|
32
32
|
|
|
33
33
|
<img width="860" alt="shapes at 24-03-31 00 05 55" src="https://github.com/dosco/llm-client/assets/832235/0f0306ea-1812-4a0a-9ed5-76cd908cd26b">
|
|
34
34
|
|
|
35
|
-
Efficient type-safe prompts are auto-generated from a simple signature. A prompt signature is made of a `"task description" inputField:type "field description" -> outputField:type"`. The idea behind prompt signatures is based off work done in the "Demonstrate-Search-Predict" paper.
|
|
35
|
+
Efficient type-safe prompts are auto-generated from a simple signature. A prompt signature is made up of a `"task description" inputField:type "field description" -> outputField:type"`. The idea behind prompt signatures is based off work done in the "Demonstrate-Search-Predict" paper.
|
|
36
36
|
|
|
37
37
|
You can have multiple input and output fields and each field has one of these types `string`, `number`, `boolean`, `json` or a array of any of these eg. `string[]`. When a type is not defined it defaults to `string`. When the `json` type if used the underlying AI is encouraged to generate correct JSON.
|
|
38
38
|
|
|
@@ -63,13 +63,13 @@ yarn add @ax-llm/ax
|
|
|
63
63
|
## Example: Using chain-of-thought to summarize text
|
|
64
64
|
|
|
65
65
|
```typescript
|
|
66
|
-
import {
|
|
66
|
+
import { axAI, AxChainOfThought } from '@ax-llm/ax';
|
|
67
67
|
|
|
68
68
|
const textToSummarize = `
|
|
69
69
|
The technological singularity—or simply the singularity[1]—is a hypothetical future point in time at which technological growth becomes uncontrollable and irreversible, resulting in unforeseeable changes to human civilization.[2][3] ...`;
|
|
70
70
|
|
|
71
|
-
const ai =
|
|
72
|
-
const gen = new
|
|
71
|
+
const ai = axAI('openai', { apiKey: process.env.OPENAI_APIKEY });
|
|
72
|
+
const gen = new AxChainOfThought(
|
|
73
73
|
ai,
|
|
74
74
|
`textToSummarize -> shortSummary "summarize in 5 to 10 words"`
|
|
75
75
|
);
|
|
@@ -85,19 +85,19 @@ Use the agent prompt (framework) to build agents that work with other agents to
|
|
|
85
85
|
```typescript
|
|
86
86
|
# npm run tsx ./src/examples/agent.ts
|
|
87
87
|
|
|
88
|
-
const researcher = new
|
|
88
|
+
const researcher = new AxAgent(ai, {
|
|
89
89
|
name: 'researcher',
|
|
90
90
|
description: 'Researcher agent',
|
|
91
91
|
signature: `physicsQuestion "physics questions" -> answer "reply in bullet points"`
|
|
92
92
|
});
|
|
93
93
|
|
|
94
|
-
const summarizer = new
|
|
94
|
+
const summarizer = new AxAgent(ai, {
|
|
95
95
|
name: 'summarizer',
|
|
96
96
|
description: 'Summarizer agent',
|
|
97
97
|
signature: `text "text so summarize" -> shortSummary "summarize in 5 to 10 words"`
|
|
98
98
|
});
|
|
99
99
|
|
|
100
|
-
const agent = new
|
|
100
|
+
const agent = new AxAgent(ai, {
|
|
101
101
|
name: 'agent',
|
|
102
102
|
description: 'A an agent to research complex topics',
|
|
103
103
|
signature: `question -> answer`,
|
|
@@ -107,49 +107,6 @@ const agent = new Agent(ai, {
|
|
|
107
107
|
agent.forward({ questions: "How many atoms are there in the universe" })
|
|
108
108
|
```
|
|
109
109
|
|
|
110
|
-
## Fast LLM Router
|
|
111
|
-
|
|
112
|
-
A special router that uses no LLM calls only embeddings to route user requests smartly.
|
|
113
|
-
|
|
114
|
-
Use the Router to efficiently route user queries to specific routes designed to handle certain types of questions or tasks. Each route is tailored to a particular domain or service area. Instead of using a slow or expensive LLM to decide how input from the user should be handled use our fast "Semantic Router" that uses inexpensive and fast embedding queries.
|
|
115
|
-
|
|
116
|
-
```typescript
|
|
117
|
-
# npm run tsx ./src/examples/routing.ts
|
|
118
|
-
|
|
119
|
-
const customerSupport = new Route('customerSupport', [
|
|
120
|
-
'how can I return a product?',
|
|
121
|
-
'where is my order?',
|
|
122
|
-
'can you help me with a refund?',
|
|
123
|
-
'I need to update my shipping address',
|
|
124
|
-
'my product arrived damaged, what should I do?'
|
|
125
|
-
]);
|
|
126
|
-
|
|
127
|
-
const technicalSupport = new Route('technicalSupport', [
|
|
128
|
-
'how do I install your software?',
|
|
129
|
-
'I’m having trouble logging in',
|
|
130
|
-
'can you help me configure my settings?',
|
|
131
|
-
'my application keeps crashing',
|
|
132
|
-
'how do I update to the latest version?'
|
|
133
|
-
]);
|
|
134
|
-
|
|
135
|
-
const ai = AI('openai', { apiKey: process.env.OPENAI_APIKEY } as OpenAIArgs);
|
|
136
|
-
|
|
137
|
-
const router = new Router(ai);
|
|
138
|
-
await router.setRoutes(
|
|
139
|
-
[customerSupport, technicalSupport],
|
|
140
|
-
{ filename: 'router.json' }
|
|
141
|
-
);
|
|
142
|
-
|
|
143
|
-
const tag = await router.forward('I need help with my order');
|
|
144
|
-
|
|
145
|
-
if (tag === "customerSupport") {
|
|
146
|
-
...
|
|
147
|
-
}
|
|
148
|
-
if (tag === "technicalSupport") {
|
|
149
|
-
...
|
|
150
|
-
}
|
|
151
|
-
```
|
|
152
|
-
|
|
153
110
|
## Vector DBs Supported
|
|
154
111
|
|
|
155
112
|
Vector databases are critical to building LLM workflows. We have clean abstractions over popular vector db's as well as our own quick in memory vector database.
|
|
@@ -166,7 +123,7 @@ Vector databases are critical to building LLM workflows. We have clean abstracti
|
|
|
166
123
|
const ret = await this.ai.embed({ texts: 'hello world' });
|
|
167
124
|
|
|
168
125
|
// Create an in memory vector db
|
|
169
|
-
const db = new
|
|
126
|
+
const db = new axDB('memory');
|
|
170
127
|
|
|
171
128
|
// Insert into vector db
|
|
172
129
|
await this.db.upsert({
|
|
@@ -182,11 +139,11 @@ const matches = await this.db.query({
|
|
|
182
139
|
});
|
|
183
140
|
```
|
|
184
141
|
|
|
185
|
-
Alternatively you can use the `
|
|
142
|
+
Alternatively you can use the `AxDBManager` which handles smart chunking, embedding and querying everything
|
|
186
143
|
for you, it makes things almost too easy.
|
|
187
144
|
|
|
188
145
|
```typescript
|
|
189
|
-
const manager = new
|
|
146
|
+
const manager = new AxDBManager({ ai, db });
|
|
190
147
|
await manager.insert(text);
|
|
191
148
|
|
|
192
149
|
const matches = await manager.query(
|
|
@@ -205,26 +162,43 @@ Launch Apache Tika
|
|
|
205
162
|
docker run -p 9998:9998 apache/tika
|
|
206
163
|
```
|
|
207
164
|
|
|
208
|
-
Convert documents to text and embed them for retrieval using the `
|
|
165
|
+
Convert documents to text and embed them for retrieval using the `AxDBManager` it also supports a reranker and query rewriter. Two default implementations `AxDefaultResultReranker` and `AxDefaultQueryRewriter` are available to use.
|
|
209
166
|
|
|
210
167
|
```typescript
|
|
211
|
-
const tika = new
|
|
168
|
+
const tika = new AxApacheTika();
|
|
212
169
|
const text = await tika.convert('/path/to/document.pdf');
|
|
213
170
|
|
|
214
|
-
const manager = new
|
|
171
|
+
const manager = new AxDBManager({ ai, db });
|
|
215
172
|
await manager.insert(text);
|
|
216
173
|
|
|
217
174
|
const matches = await manager.query('Find some text');
|
|
218
175
|
console.log(matches);
|
|
219
176
|
```
|
|
220
177
|
|
|
178
|
+
## Multi-modal DSP
|
|
179
|
+
|
|
180
|
+
When using models like `gpt-4o` and `gemini` that support multi-modal prompts we support using image fields and this works with the whole dsp pipeline.
|
|
181
|
+
|
|
182
|
+
```typescript
|
|
183
|
+
const image = fs
|
|
184
|
+
.readFileSync('./src/examples/assets/kitten.jpeg')
|
|
185
|
+
.toString('base64');
|
|
186
|
+
|
|
187
|
+
const gen = new AxChainOfThought(ai, `question, animalImage:image -> answer`);
|
|
188
|
+
|
|
189
|
+
const res = await gen.forward({
|
|
190
|
+
question: 'What family does this animal belong to?',
|
|
191
|
+
animalImage: { mimeType: 'image/jpeg', data: image }
|
|
192
|
+
});
|
|
193
|
+
```
|
|
194
|
+
|
|
221
195
|
## Streaming
|
|
222
196
|
|
|
223
197
|
We support parsing output fields and function execution while streaming. This allows for fail-fast and error correction without having to wait for the whole output saving tokens, cost and reducing latency. Assertions are a powerful way to ensure the output matches your requirements these work with streaming as well.
|
|
224
198
|
|
|
225
199
|
```typescript
|
|
226
200
|
// setup the prompt program
|
|
227
|
-
const gen = new
|
|
201
|
+
const gen = new AxChainOfThought(
|
|
228
202
|
ai,
|
|
229
203
|
`startNumber:number -> next10Numbers:number[]`
|
|
230
204
|
);
|
|
@@ -267,6 +241,49 @@ const res = await gen.forward(
|
|
|
267
241
|
);
|
|
268
242
|
```
|
|
269
243
|
|
|
244
|
+
## Fast LLM Router
|
|
245
|
+
|
|
246
|
+
A special router that uses no LLM calls only embeddings to route user requests smartly.
|
|
247
|
+
|
|
248
|
+
Use the Router to efficiently route user queries to specific routes designed to handle certain types of questions or tasks. Each route is tailored to a particular domain or service area. Instead of using a slow or expensive LLM to decide how input from the user should be handled use our fast "Semantic Router" that uses inexpensive and fast embedding queries.
|
|
249
|
+
|
|
250
|
+
```typescript
|
|
251
|
+
# npm run tsx ./src/examples/routing.ts
|
|
252
|
+
|
|
253
|
+
const customerSupport = new AxRoute('customerSupport', [
|
|
254
|
+
'how can I return a product?',
|
|
255
|
+
'where is my order?',
|
|
256
|
+
'can you help me with a refund?',
|
|
257
|
+
'I need to update my shipping address',
|
|
258
|
+
'my product arrived damaged, what should I do?'
|
|
259
|
+
]);
|
|
260
|
+
|
|
261
|
+
const technicalSupport = new AxRoute('technicalSupport', [
|
|
262
|
+
'how do I install your software?',
|
|
263
|
+
'I’m having trouble logging in',
|
|
264
|
+
'can you help me configure my settings?',
|
|
265
|
+
'my application keeps crashing',
|
|
266
|
+
'how do I update to the latest version?'
|
|
267
|
+
]);
|
|
268
|
+
|
|
269
|
+
const ai = axAI('openai', { apiKey: process.env.OPENAI_APIKEY });
|
|
270
|
+
|
|
271
|
+
const router = new AxRouter(ai);
|
|
272
|
+
await router.setRoutes(
|
|
273
|
+
[customerSupport, technicalSupport],
|
|
274
|
+
{ filename: 'router.json' }
|
|
275
|
+
);
|
|
276
|
+
|
|
277
|
+
const tag = await router.forward('I need help with my order');
|
|
278
|
+
|
|
279
|
+
if (tag === "customerSupport") {
|
|
280
|
+
...
|
|
281
|
+
}
|
|
282
|
+
if (tag === "technicalSupport") {
|
|
283
|
+
...
|
|
284
|
+
}
|
|
285
|
+
```
|
|
286
|
+
|
|
270
287
|
## OpenTelemetry support
|
|
271
288
|
|
|
272
289
|
Ability to trace and observe your llm workflow is critical to building production workflows. OpenTelemetry is an industry standard and we support the new `gen_ai` attribute namespace.
|
|
@@ -285,12 +302,12 @@ trace.setGlobalTracerProvider(provider);
|
|
|
285
302
|
|
|
286
303
|
const tracer = trace.getTracer('test');
|
|
287
304
|
|
|
288
|
-
const ai =
|
|
305
|
+
const ai = axAI('ollama', {
|
|
289
306
|
model: 'nous-hermes2',
|
|
290
307
|
options: { tracer }
|
|
291
308
|
} as unknown as OllamaArgs);
|
|
292
309
|
|
|
293
|
-
const gen = new
|
|
310
|
+
const gen = new AxChainOfThought(
|
|
294
311
|
ai,
|
|
295
312
|
`text -> shortSummary "summarize in 5 to 10 words"`
|
|
296
313
|
);
|
|
@@ -322,26 +339,13 @@ const res = await gen.forward({ text });
|
|
|
322
339
|
}
|
|
323
340
|
```
|
|
324
341
|
|
|
325
|
-
Alternatively you can use the `DBManager` which handles smart chunking, embedding and querying everything
|
|
326
|
-
for you, it makes things almost too easy.
|
|
327
|
-
|
|
328
|
-
```typescript
|
|
329
|
-
const manager = new DBManager({ ai, db });
|
|
330
|
-
await manager.insert(text);
|
|
331
|
-
|
|
332
|
-
const matches = await manager.query(
|
|
333
|
-
'John von Neumann on human intelligence and singularity.'
|
|
334
|
-
);
|
|
335
|
-
console.log(matches);
|
|
336
|
-
```
|
|
337
|
-
|
|
338
342
|
## Tuning the prompts (programs)
|
|
339
343
|
|
|
340
|
-
You can tune your prompts using a larger model to help them run more efficiently and give you better results. This is done by using an optimizer like `
|
|
344
|
+
You can tune your prompts using a larger model to help them run more efficiently and give you better results. This is done by using an optimizer like `AxBootstrapFewShot` with and examples from the popular `HotPotQA` dataset. The optimizer generates demonstrations `demos` which when used with the prompt help improve its efficiency.
|
|
341
345
|
|
|
342
346
|
```typescript
|
|
343
347
|
// Download the HotPotQA dataset from huggingface
|
|
344
|
-
const hf = new
|
|
348
|
+
const hf = new AxHFDataLoader();
|
|
345
349
|
const examples = await hf.getData<{ question: string; answer: string }>({
|
|
346
350
|
dataset: 'hotpot_qa',
|
|
347
351
|
split: 'train',
|
|
@@ -349,24 +353,25 @@ const examples = await hf.getData<{ question: string; answer: string }>({
|
|
|
349
353
|
fields: ['question', 'answer']
|
|
350
354
|
});
|
|
351
355
|
|
|
352
|
-
const ai =
|
|
356
|
+
const ai = axAI('openai', { apiKey: process.env.OPENAI_APIKEY });
|
|
353
357
|
|
|
354
358
|
// Setup the program to tune
|
|
355
|
-
const program = new
|
|
359
|
+
const program = new AxChainOfThought<{ question: string }, { answer: string }>(
|
|
356
360
|
ai,
|
|
357
361
|
`question -> answer "in short 2 or 3 words"`
|
|
358
362
|
);
|
|
359
363
|
|
|
360
364
|
// Setup a Bootstrap Few Shot optimizer to tune the above program
|
|
361
|
-
const optimize = new
|
|
362
|
-
{
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
365
|
+
const optimize = new AxBootstrapFewShot<
|
|
366
|
+
{ question: string },
|
|
367
|
+
{ answer: string }
|
|
368
|
+
>({
|
|
369
|
+
program,
|
|
370
|
+
examples
|
|
371
|
+
});
|
|
367
372
|
|
|
368
373
|
// Setup a evaluation metric em, f1 scores are a popular way measure retrieval performance.
|
|
369
|
-
const metricFn:
|
|
374
|
+
const metricFn: AxMetricFn = ({ prediction, example }) =>
|
|
370
375
|
emScore(prediction.answer as string, example.answer as string);
|
|
371
376
|
|
|
372
377
|
// Run the optimizer and save the result
|
|
@@ -378,10 +383,10 @@ await optimize.compile(metricFn, { filename: 'demos.json' });
|
|
|
378
383
|
And to use the generated demos with the above `ChainOfThought` program
|
|
379
384
|
|
|
380
385
|
```typescript
|
|
381
|
-
const ai =
|
|
386
|
+
const ai = axAI('openai', { apiKey: process.env.OPENAI_APIKEY });
|
|
382
387
|
|
|
383
388
|
// Setup the program to use the tuned data
|
|
384
|
-
const program = new
|
|
389
|
+
const program = new AxChainOfThought<{ question: string }, { answer: string }>(
|
|
385
390
|
ai,
|
|
386
391
|
`question -> answer "in short 2 or 3 words"`
|
|
387
392
|
);
|
|
@@ -421,6 +426,8 @@ OPENAI_APIKEY=openai_key npm run tsx ./src/examples/marketing.ts
|
|
|
421
426
|
| qna-use-tuned.ts | Use the optimized tuned prompts |
|
|
422
427
|
| streaming1.ts | Output fields validation while streaming |
|
|
423
428
|
| streaming2.ts | Per output field validation while streaming |
|
|
429
|
+
| smart-hone.ts | Agent looks for dog in smart home |
|
|
430
|
+
| multi-modal.ts | Use an image input along with other text inputs |
|
|
424
431
|
|
|
425
432
|
## Built-in Functions
|
|
426
433
|
|
|
@@ -439,7 +446,7 @@ Large language models (LLMs) are getting really powerful and have reached a poin
|
|
|
439
446
|
|
|
440
447
|
```ts
|
|
441
448
|
// Pick a LLM
|
|
442
|
-
const ai = new
|
|
449
|
+
const ai = new AxOpenAI({ apiKey: process.env.OPENAI_APIKEY } as AxOpenAIArgs);
|
|
443
450
|
```
|
|
444
451
|
|
|
445
452
|
### 2. Create a prompt signature based on your usecase
|
|
@@ -501,13 +508,13 @@ const functions = [
|
|
|
501
508
|
### 2. Pass the functions to a prompt
|
|
502
509
|
|
|
503
510
|
```ts
|
|
504
|
-
const cot = new
|
|
511
|
+
const cot = new AxReAct(ai, `question:string -> answer:string`, { functions });
|
|
505
512
|
```
|
|
506
513
|
|
|
507
514
|
## Enable debug logs
|
|
508
515
|
|
|
509
516
|
```ts
|
|
510
|
-
const ai = new
|
|
517
|
+
const ai = new AxOpenAI({ apiKey: process.env.OPENAI_APIKEY } as AxOpenAIArgs);
|
|
511
518
|
ai.setOptions({ debug: true });
|
|
512
519
|
```
|
|
513
520
|
|
|
@@ -528,20 +535,20 @@ You can pass a configuration object as the second parameter when creating a new
|
|
|
528
535
|
|
|
529
536
|
```ts
|
|
530
537
|
const apiKey = process.env.OPENAI_APIKEY;
|
|
531
|
-
const conf =
|
|
532
|
-
const ai = new
|
|
538
|
+
const conf = AxOpenAIBestConfig();
|
|
539
|
+
const ai = new AxOpenAI({ apiKey, conf } as AxOpenAIArgs);
|
|
533
540
|
```
|
|
534
541
|
|
|
535
542
|
## 3. My prompt is too long and can I change the max tokens
|
|
536
543
|
|
|
537
544
|
```ts
|
|
538
|
-
const conf =
|
|
545
|
+
const conf = axOpenAIDefaultConfig(); // or OpenAIBestOptions()
|
|
539
546
|
conf.maxTokens = 2000;
|
|
540
547
|
```
|
|
541
548
|
|
|
542
549
|
## 4. How do I change the model say I want to use GPT4
|
|
543
550
|
|
|
544
551
|
```ts
|
|
545
|
-
const conf =
|
|
552
|
+
const conf = axOpenAIDefaultConfig(); // or OpenAIBestOptions()
|
|
546
553
|
conf.model = OpenAIModel.GPT4Turbo;
|
|
547
554
|
```
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
window.navigationData = "data:application/octet-stream;base64,
|
|
1
|
+
window.navigationData = "data:application/octet-stream;base64,H4sIAAAAAAAAE52cy3LjthKG38XrXM5MbifZSZbtUWLHPpYmWaSywEiQxDJFMCDksXMq7x4SIEWAaHQ3vJly6f/7a15AoAGQ88f/L4x8MRc/XcxeZpU5aFUXmzu1leXFFxe1MIdWkdXp2Hw91b86mGNneiqq7cVP//3nC4902RqWlZG61rL990HqY9E0hapiaNKK8w9Sy6vjJ7lNHOvUQdNQEMlYSFmvpHxKUAIZ49wotS/ljTwWVTEefRMTE0YuO3GckYXLW4mdNK+Xwsi90q84OPTmZVgftGwOqtxyUpzNWI4Pp/2+qPbXYpNqAlMHRru9vXuUf51kY9avtfxNlO2fMRFyYdS7ojFalGiDiD0MYuKMfRWj3Neymi3Rw4osNC9xUJ6IMVa1qH7pfo8Ag8KJTt25UKdIKyPMqem6OJg06hhptpeVGQGbUjSNtB1yJ4SR795PYoc+G4wfRIJRi81BrosnAULOKkH5+6Slu4cgZpRxzlyUotpIDUEGjSI0Ej4IpxDRSpnu6aiv5efVQYE3ZurBiZcHUVT3u/VBnfYHkBc6CFqpTttdKbQESWeVoNhhDyRYBY9ezO9EJfbwTTqLBKMfMkFEr1GEnTiV5n8nqV8f5WddmMQBAT4W+VE29l8tqicUHRpx9vWp2pi2/nnQaiObRoHcyIQzb2TV5jbg/Rw0guANriDF0wmSVn+BhPZ3PPLD9UIYcavEFr7Yvk6QxhEdBI0yzllWjek6nEe570bMVwg29eDEO3lUMMcpnOjFPB2/mBMEN/KDACfh8fdlKY7gOOEUIjo5PHBGhoeikhtVgbd00AiCVnstjiDASWT8sTZreazLxNMWOnDa4+wGQrQ/E3FytgGHESsQseoEH7gVGLHgk+kUPHpV7Ku2GoIHnbOIM9ZtRe2uMAQZVYKi9tIc4DMZNJzwuxTPRaIFDBpRLS2nPUHRTZJ3bZdkq6Ul1B28/+77CWQl9XPhd3ITSq8TmK7MXKYYViQAQ5050/smxfE9TNylagmVmZdq87SQpRFXz0GlDCeAo96Qsq3etclOOUa9KaWq35CxD2ImvNJaaVaW0clE37XFSlv68W9WFJCXiH+LogBmora0q1XVuCOkkgRmZoKV0VIcWefgWQn4ec6GPZGBiQtsG96z1Oa+7spTGh3aiSRtD6pNsIoYUgedwNhSEzvxs4EAjVNWjBa6cKSbgCK00YCDxnkeAgtNBNBO+zDY2UCBKvt3imJVHLGYuyIfb2QTF4XsJ6XIGQYeGmc2BwTUqhTCTkjxETw0UUCKxcP0c2/sQnkWHHZdSH8pN6BYDQ8f5q54Q5i4CKQ3kUVOcWrDod6EEmFOXDhymM4hPN9CwNzUDmONDhxF9ofcrtD5OMMr4MTRw2QQOUrfgsOGvB9EtS3R/gNwEmg7bbpW+rPQW7yNQ1Yc3q0/J2CdRAfPjNHFpzZt6phCEw3ERwbPQaOuXjayRkqFwEPjbovqCSF1Mg3Bb6DnoFFu3wAhOQMNWhdHuazqE3bRzx4CZ4vPtv+iyrTYiIOHKTfyZPkWAqZFsHcQYqxIAE6V+FSmxtBexREfG4TgRBwwLCEgV8S3oLAHb2JvXusu+GE6nf/Pjz+8++79ZGnhV3GUUWj/OyfercS0T/Su2EMcX+fw+uJlZtfFo2dt5EK+DD5N5jO7pZM0z1MZrPPE/yCMnZYDxMiTye33rAly78pmu8GRhDvbG+mTMgJNARUSaJ5UUw4NGcTzmkS06BOxp9aMLG2ps6fwZw+DC65HdO85APykl86DNsisdog3v7xW57uHTbpLUZY413fm5XA7izjdeRjc83JAt8ownVqcyYArh72Y39fJTg+0Meh27QFvEVNLHjXZOiLPW7iJbilhZGdIdEi+ymXZF3iIi+t7Mrn45Q1MNHkxh2sT9zsnvn8VIHG2oZ7BS53lxMAhdgU+SLICh/Cx7qpv5CQDQw4xfZqhg8EURjyqzwDKCTQBb7d5LZZoq5mt9OpZlKeoiD/DPJXBehHHuoSOyQk0wS62DdvS11VMmhiYRPuaXAJmNQanHxsBSq/wGVcvcpPmdCqf1a/ipHG9gU/8eXX/62pzkEdgKIg9NPdGVkvgstmfWdH3J+CZcb+z4u2qa6pCCXUGz1txnXfL5/ijjdrfni318OP+vHxoHZMwviUD51zyKptUZKK+Qe2Z2RK1TuzJ5hpwagSY8sh9+++qfsaxT915udzr8CtpTDuPA0abtDcvz1opYKYzdeQzOVdo9OXzh951ITft3EPAww0jiM7s7fGkTiqyZFGTHUfsyeSmugvARJOXk52/AbaEdv2AeLe3hc1Tpw4uE52fRhYO1ehiA5VUg8JnpM50VBms7gOKVNvzRCZpWe1UgtNJGZTfC3N40Oq52EJVFWij6e6t09kydcKhzuaB92HUGBy7Z4mO9JElj5p6XGPPW7iJET1hZGdI3SVP5bLw8jD2ZHLxy5tZ/bmgW7WvtfqUYvYyl/axWw9OsazIIZniWPw93XsbWZ5M04Y9dazXjj18LtpzAyYG2b30vWh7fQjpqWxWajd/AkV38tN0u4OZhFqVzUo0IF+lWY/tDO+2bSTdywnJiTxgYpD7r2mgKe6o8TngZNcTOST77RB8QL3EpiQOZ9BoTvD2RWIdJvbQ3LV6klWibYwazRl2qLHeIPbwuWhvAJgo8oPQjdyCZasn8SjLbTtdLHZFXOhMdYon7ErhR1N4865noYvuJYLm61ENOd+EiNTbPD4o9Hz5jgTarTwE5nQc06ar9umTG3UU439RtOv7ly58+j3R99+GYcPWZP8R37Q4CVCgF8eP7wXP2/IDpUNWLvxSy3Zu+hxNNxMJQjs3CecSJdzcFNeCfZFGKwp371IzDh0wZoAZ1x8JQBP1O3mMDJCTgWZcHMCIgv2vE33O9MvEaVj/TnH3sTx2OLGPhWWcKGhF4XYWMNuKOvgszmf6DhTlLz4xjjVpz07CaFtkGJrUX+5iJEvauUkYVy/lRlP8vPL+G5u41vX5oBWF969fUwNFZONAGRcEcqJotxTCIAPGDDCjvSABeCLWyJw1KPPH4+yhmD0K5w7AvLE3a9gdXtZlHC1ojeB//gtv9uVaxUoAAA==";
|
|
2
2
|
export {};
|
|
3
3
|
//# sourceMappingURL=navigation.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"navigation.js","sourceRoot":"","sources":["../../../../docs/assets/navigation.js"],"names":[],"mappings":"AAAA,MAAM,CAAC,cAAc,GAAG,
|
|
1
|
+
{"version":3,"file":"navigation.js","sourceRoot":"","sources":["../../../../docs/assets/navigation.js"],"names":[],"mappings":"AAAA,MAAM,CAAC,cAAc,GAAG,+kGAA+kG,CAAA"}
|