@upstash/qstash 2.6.4-workflow-alpha.2 → 2.6.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +8 -29
- package/{chunk-AP5AZGAQ.mjs → chunk-6HSNQRF3.mjs} +540 -500
- package/chunk-CP4IU45K.mjs +59 -0
- package/{chunk-Z3TALRVS.js → chunk-MZSHBRF6.js} +552 -512
- package/chunk-UUR7N6E6.js +59 -0
- package/index.d.mts +1122 -17
- package/index.d.ts +1122 -17
- package/index.js +3 -5
- package/index.mjs +11 -13
- package/nextjs.d.mts +1 -3
- package/nextjs.d.ts +1 -3
- package/nextjs.js +5 -21
- package/nextjs.mjs +1 -17
- package/nuxt.d.mts +1 -11
- package/nuxt.d.ts +1 -11
- package/nuxt.js +4 -59
- package/nuxt.mjs +2 -57
- package/package.json +1 -1
- package/solidjs.d.mts +2 -4
- package/solidjs.d.ts +2 -4
- package/solidjs.js +4 -25
- package/solidjs.mjs +2 -23
- package/svelte.d.mts +1 -3
- package/svelte.d.ts +1 -3
- package/svelte.js +4 -27
- package/svelte.mjs +2 -25
- package/chunk-AIRND3SP.js +0 -1418
- package/chunk-F6QRAN74.js +0 -30
- package/chunk-FRFK4HSB.mjs +0 -1418
- package/chunk-TAUBWUPA.mjs +0 -30
- package/types-C4BlT_L_.d.mts +0 -1454
- package/types-C4BlT_L_.d.ts +0 -1454
- package/workflow.d.mts +0 -14
- package/workflow.d.ts +0 -14
- package/workflow.js +0 -13
- package/workflow.mjs +0 -13
package/README.md
CHANGED
|
@@ -55,11 +55,11 @@ import { Client } from "@upstash/qstash";
|
|
|
55
55
|
*/
|
|
56
56
|
import "isomorphic-fetch";
|
|
57
57
|
|
|
58
|
-
const
|
|
58
|
+
const client = new Client({
|
|
59
59
|
token: "<QSTASH_TOKEN>",
|
|
60
60
|
});
|
|
61
61
|
|
|
62
|
-
const res = await
|
|
62
|
+
const res = await client.publishJSON({
|
|
63
63
|
url: "https://my-api...",
|
|
64
64
|
// or urlGroup: "the name or id of a url group"
|
|
65
65
|
body: {
|
|
@@ -100,14 +100,14 @@ const isValid = await r.verify({
|
|
|
100
100
|
})
|
|
101
101
|
```
|
|
102
102
|
|
|
103
|
-
### Publishing a message to
|
|
103
|
+
### Publishing a message to an LLM provider
|
|
104
104
|
|
|
105
105
|
No need for complicated setup your LLM request. We'll call LLM and schedule it for your serverless needs.
|
|
106
106
|
|
|
107
107
|
```ts
|
|
108
108
|
import { Client, openai } from "@upstash/qstash";
|
|
109
109
|
|
|
110
|
-
const
|
|
110
|
+
const client = new Client({
|
|
111
111
|
token: "<QSTASH_TOKEN>",
|
|
112
112
|
});
|
|
113
113
|
|
|
@@ -126,31 +126,6 @@ const result = await client.publishJSON({
|
|
|
126
126
|
});
|
|
127
127
|
```
|
|
128
128
|
|
|
129
|
-
### Chatting with your favorite LLM
|
|
130
|
-
|
|
131
|
-
You can easily start streaming Upstash or OpenAI responses from your favorite framework(Next.js) or library
|
|
132
|
-
|
|
133
|
-
```ts
|
|
134
|
-
import { upstash } from "@upstash/qstash";
|
|
135
|
-
|
|
136
|
-
const response = await client.chat().create({
|
|
137
|
-
provider: upstash(), // Optionally, provider: "custom({token: "XXX", baseUrl: "https://api.openai.com"})". This will allow you to call every OpenAI compatible API out there.
|
|
138
|
-
model: "meta-llama/Meta-Llama-3-8B-Instruct", // Optionally, model: "gpt-3.5-turbo",
|
|
139
|
-
messages: [
|
|
140
|
-
{
|
|
141
|
-
role: "system",
|
|
142
|
-
content: "from now on, foo is whale",
|
|
143
|
-
},
|
|
144
|
-
{
|
|
145
|
-
role: "user",
|
|
146
|
-
content: "what exactly is foo?",
|
|
147
|
-
},
|
|
148
|
-
],
|
|
149
|
-
stream: true,
|
|
150
|
-
temperature: 0.5,
|
|
151
|
-
});
|
|
152
|
-
```
|
|
153
|
-
|
|
154
129
|
## Docs
|
|
155
130
|
|
|
156
131
|
See [the documentation](https://docs.upstash.com/qstash) for details.
|
|
@@ -158,3 +133,7 @@ See [the documentation](https://docs.upstash.com/qstash) for details.
|
|
|
158
133
|
## Contributing
|
|
159
134
|
|
|
160
135
|
### [Install Deno](https://deno.land/#installation)
|
|
136
|
+
|
|
137
|
+
```
|
|
138
|
+
|
|
139
|
+
```
|