@upstash/qstash 2.6.4 → 2.6.5-workflow-url-canary
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +29 -8
- package/chunk-DH2LYTLA.mjs +2620 -0
- package/chunk-F6QRAN74.js +30 -0
- package/chunk-MFQHGR5V.js +2620 -0
- package/chunk-TAUBWUPA.mjs +30 -0
- package/client-CzkJKX67.d.mts +1569 -0
- package/client-CzkJKX67.d.ts +1569 -0
- package/index.d.mts +25 -1122
- package/index.d.ts +25 -1122
- package/index.js +7 -3
- package/index.mjs +14 -10
- package/nextjs.d.mts +4 -1
- package/nextjs.d.ts +4 -1
- package/nextjs.js +24 -5
- package/nextjs.mjs +21 -2
- package/nuxt.d.mts +7 -1
- package/nuxt.d.ts +7 -1
- package/nuxt.js +43 -4
- package/nuxt.mjs +42 -3
- package/package.json +1 -1
- package/solidjs.d.mts +5 -2
- package/solidjs.d.ts +5 -2
- package/solidjs.js +24 -4
- package/solidjs.mjs +23 -3
- package/svelte.d.mts +4 -1
- package/svelte.d.ts +4 -1
- package/svelte.js +22 -4
- package/svelte.mjs +21 -3
- package/workflow.d.mts +2 -0
- package/workflow.d.ts +2 -0
- package/workflow.js +16 -0
- package/workflow.mjs +16 -0
- package/chunk-6HSNQRF3.mjs +0 -1059
- package/chunk-CP4IU45K.mjs +0 -59
- package/chunk-MZSHBRF6.js +0 -1059
- package/chunk-UUR7N6E6.js +0 -59
package/README.md
CHANGED
|
@@ -55,11 +55,11 @@ import { Client } from "@upstash/qstash";
|
|
|
55
55
|
*/
|
|
56
56
|
import "isomorphic-fetch";
|
|
57
57
|
|
|
58
|
-
const
|
|
58
|
+
const c = new Client({
|
|
59
59
|
token: "<QSTASH_TOKEN>",
|
|
60
60
|
});
|
|
61
61
|
|
|
62
|
-
const res = await
|
|
62
|
+
const res = await c.publishJSON({
|
|
63
63
|
url: "https://my-api...",
|
|
64
64
|
// or urlGroup: "the name or id of a url group"
|
|
65
65
|
body: {
|
|
@@ -100,14 +100,14 @@ const isValid = await r.verify({
|
|
|
100
100
|
})
|
|
101
101
|
```
|
|
102
102
|
|
|
103
|
-
### Publishing a message to
|
|
103
|
+
### Publishing a message to Open AI or any Open AI Compatible LLM
|
|
104
104
|
|
|
105
105
|
No need for complicated setup your LLM request. We'll call LLM and schedule it for your serverless needs.
|
|
106
106
|
|
|
107
107
|
```ts
|
|
108
108
|
import { Client, openai } from "@upstash/qstash";
|
|
109
109
|
|
|
110
|
-
const
|
|
110
|
+
const c = new Client({
|
|
111
111
|
token: "<QSTASH_TOKEN>",
|
|
112
112
|
});
|
|
113
113
|
|
|
@@ -126,6 +126,31 @@ const result = await client.publishJSON({
|
|
|
126
126
|
});
|
|
127
127
|
```
|
|
128
128
|
|
|
129
|
+
### Chatting with your favorite LLM
|
|
130
|
+
|
|
131
|
+
You can easily start streaming Upstash or OpenAI responses from your favorite framework(Next.js) or library
|
|
132
|
+
|
|
133
|
+
```ts
|
|
134
|
+
import { upstash } from "@upstash/qstash";
|
|
135
|
+
|
|
136
|
+
const response = await client.chat().create({
|
|
137
|
+
provider: upstash(), // Optionally, provider: "custom({token: "XXX", baseUrl: "https://api.openai.com"})". This will allow you to call every OpenAI compatible API out there.
|
|
138
|
+
model: "meta-llama/Meta-Llama-3-8B-Instruct", // Optionally, model: "gpt-3.5-turbo",
|
|
139
|
+
messages: [
|
|
140
|
+
{
|
|
141
|
+
role: "system",
|
|
142
|
+
content: "from now on, foo is whale",
|
|
143
|
+
},
|
|
144
|
+
{
|
|
145
|
+
role: "user",
|
|
146
|
+
content: "what exactly is foo?",
|
|
147
|
+
},
|
|
148
|
+
],
|
|
149
|
+
stream: true,
|
|
150
|
+
temperature: 0.5,
|
|
151
|
+
});
|
|
152
|
+
```
|
|
153
|
+
|
|
129
154
|
## Docs
|
|
130
155
|
|
|
131
156
|
See [the documentation](https://docs.upstash.com/qstash) for details.
|
|
@@ -133,7 +158,3 @@ See [the documentation](https://docs.upstash.com/qstash) for details.
|
|
|
133
158
|
## Contributing
|
|
134
159
|
|
|
135
160
|
### [Install Deno](https://deno.land/#installation)
|
|
136
|
-
|
|
137
|
-
```
|
|
138
|
-
|
|
139
|
-
```
|