workers-ai-provider 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Sunil Pai
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,56 @@
1
+ # workers-ai-provider ⬡ ⤫ ▴
2
+
3
+ A custom provider that enables [Workers AI](https://ai.cloudflare.com/)'s models for the [Vercel AI SDK](https://sdk.vercel.ai/).
4
+
5
+ > [!CAUTION]
6
+ > This project is in its experimental early stages and is not recommended for production use.
7
+
8
+ ## Install
9
+
10
+ ```bash
11
+ npm install workers-ai-provider
12
+ ```
13
+
14
+ ## Usage
15
+
16
+ First setup an AI binding in `wrangler.toml`:
17
+
18
+ ```toml
19
+ # ...
20
+ [ai]
21
+ binding = "AI"
22
+ # ...
23
+ ```
24
+
25
+ Then in your Worker, import the factory function and create a new AI provider:
26
+
27
+ ```ts
28
+ // index.ts
29
+ import { createWorkersAI } from "workers-vercel-ai-sdk";
30
+ import * as ai from "ai";
31
+
32
+ export default {
33
+ fetch(req: Request, env: Env) {
34
+ const workersai = createWorkersAI({ binding: env.AI });
35
+ // Use the AI provider to interact with the Vercel AI SDK
36
+ // Here, we generate a chat stream based on a prompt
37
+ const response = ai.streamText({
38
+ model: workersai("@cf/meta/llama-2-7b-chat-int8"),
39
+ messages: [
40
+ {
41
+ role: "user",
42
+ content: "Write an essay about hello world",
43
+ },
44
+ ],
45
+ });
46
+
47
+ return new Response(txt.text);
48
+ },
49
+ };
50
+ ```
51
+
52
+ For more info, refer to the documentation of the [Vercel AI SDK](https://sdk.vercel.ai/).
53
+
54
+ ### Credits
55
+
56
+ Based on work by [Dhravya Shah](https://twitter.com/DhravyaShah) and the Workers AI team at Cloudflare.
@@ -0,0 +1,53 @@
1
+ import { LanguageModelV1 } from '@ai-sdk/provider';
2
+
3
+ interface WorkersAIChatSettings {
4
+ /**
5
+ Whether to inject a safety prompt before all conversations.
6
+
7
+ Defaults to `false`.
8
+ */
9
+ safePrompt?: boolean;
10
+ }
11
+
12
+ type WorkersAIChatConfig = {
13
+ provider: string;
14
+ binding: Ai;
15
+ };
16
+ declare class WorkersAIChatLanguageModel implements LanguageModelV1 {
17
+ readonly specificationVersion = "v1";
18
+ readonly defaultObjectGenerationMode = "json";
19
+ readonly modelId: BaseAiTextGenerationModels;
20
+ readonly settings: WorkersAIChatSettings;
21
+ private readonly config;
22
+ constructor(modelId: BaseAiTextGenerationModels, settings: WorkersAIChatSettings, config: WorkersAIChatConfig);
23
+ get provider(): string;
24
+ private getArgs;
25
+ doGenerate(options: Parameters<LanguageModelV1["doGenerate"]>[0]): Promise<Awaited<ReturnType<LanguageModelV1["doGenerate"]>>>;
26
+ doStream(options: Parameters<LanguageModelV1["doStream"]>[0]): Promise<Awaited<ReturnType<LanguageModelV1["doStream"]>>>;
27
+ }
28
+
29
+ interface WorkersAI {
30
+ (modelId: BaseAiTextGenerationModels, settings?: WorkersAIChatSettings): WorkersAIChatLanguageModel;
31
+ /**
32
+ Creates a model for text generation.
33
+ */
34
+ chat(modelId: BaseAiTextGenerationModels, settings?: WorkersAIChatSettings): WorkersAIChatLanguageModel;
35
+ }
36
+ interface WorkersAISettings {
37
+ /**
38
+ Provide an `env.AI` binding to use for the AI inference.
39
+ You can set up an AI bindings in your Workers project
40
+ by adding the following this to `wrangler.toml`:
41
+
42
+ ```toml
43
+ [ai]
44
+ binding = "AI"
45
+ ``` */
46
+ binding: Ai;
47
+ }
48
+ /**
49
+ Create a Workers AI provider instance.
50
+ */
51
+ declare function createWorkersAIProvider(options: WorkersAISettings): WorkersAI;
52
+
53
+ export { type WorkersAI, type WorkersAISettings, createWorkersAIProvider };