@positronic/template-new-project 0.0.74 → 0.0.75
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +4 -4
- package/package.json +1 -1
- package/template/_env +1 -1
- package/template/brains/example.ts +4 -0
- package/template/runner.ts +33 -1
package/index.js
CHANGED
|
@@ -53,10 +53,10 @@ module.exports = {
|
|
|
53
53
|
],
|
|
54
54
|
setup: async ctx => {
|
|
55
55
|
const devRootPath = process.env.POSITRONIC_LOCAL_PATH;
|
|
56
|
-
let coreVersion = '^0.0.
|
|
57
|
-
let cloudflareVersion = '^0.0.
|
|
58
|
-
let clientVercelVersion = '^0.0.
|
|
59
|
-
let genUIComponentsVersion = '^0.0.
|
|
56
|
+
let coreVersion = '^0.0.75';
|
|
57
|
+
let cloudflareVersion = '^0.0.75';
|
|
58
|
+
let clientVercelVersion = '^0.0.75';
|
|
59
|
+
let genUIComponentsVersion = '^0.0.75';
|
|
60
60
|
|
|
61
61
|
// Map backend selection to package names
|
|
62
62
|
const backendPackageMap = {
|
package/package.json
CHANGED
package/template/_env
CHANGED
|
@@ -1,6 +1,10 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
2
|
import { brain } from '../brain.js';
|
|
3
3
|
|
|
4
|
+
// This brain uses the AI client configured in runner.ts.
|
|
5
|
+
// By default it uses Google Gemini (requires GOOGLE_GENERATIVE_AI_API_KEY in .env).
|
|
6
|
+
// See runner.ts to switch to Anthropic, OpenAI, or any other Vercel AI SDK provider.
|
|
7
|
+
|
|
4
8
|
const exampleBrain = brain('example')
|
|
5
9
|
.step('Start', ({ state }) => ({
|
|
6
10
|
...state,
|
package/template/runner.ts
CHANGED
|
@@ -5,6 +5,39 @@ import { google } from '@ai-sdk/google';
|
|
|
5
5
|
/**
|
|
6
6
|
* The BrainRunner executes brains with the configured client and adapters.
|
|
7
7
|
*
|
|
8
|
+
* ## AI Provider Setup
|
|
9
|
+
*
|
|
10
|
+
* By default this uses Google Gemini. Set GOOGLE_GENERATIVE_AI_API_KEY in
|
|
11
|
+
* your .env file (get a key at https://aistudio.google.com/apikey).
|
|
12
|
+
*
|
|
13
|
+
* To switch to a different provider, install its Vercel AI SDK adapter
|
|
14
|
+
* and swap the model below:
|
|
15
|
+
*
|
|
16
|
+
* **Anthropic (Claude):**
|
|
17
|
+
* ```bash
|
|
18
|
+
* npm install @ai-sdk/anthropic
|
|
19
|
+
* ```
|
|
20
|
+
* ```typescript
|
|
21
|
+
* import { anthropic } from '@ai-sdk/anthropic';
|
|
22
|
+
* const client = new VercelClient(anthropic('claude-sonnet-4-5-20250929'));
|
|
23
|
+
* ```
|
|
24
|
+
* Then set ANTHROPIC_API_KEY in your .env file.
|
|
25
|
+
*
|
|
26
|
+
* **OpenAI:**
|
|
27
|
+
* ```bash
|
|
28
|
+
* npm install @ai-sdk/openai
|
|
29
|
+
* ```
|
|
30
|
+
* ```typescript
|
|
31
|
+
* import { openai } from '@ai-sdk/openai';
|
|
32
|
+
* const client = new VercelClient(openai('gpt-4o'));
|
|
33
|
+
* ```
|
|
34
|
+
* Then set OPENAI_API_KEY in your .env file.
|
|
35
|
+
*
|
|
36
|
+
* Any provider supported by the Vercel AI SDK works — just install the
|
|
37
|
+
* package and pass the model to VercelClient.
|
|
38
|
+
*
|
|
39
|
+
* ## Memory
|
|
40
|
+
*
|
|
8
41
|
* To add memory (automatic conversation indexing with Mem0):
|
|
9
42
|
*
|
|
10
43
|
* ```typescript
|
|
@@ -23,7 +56,6 @@ import { google } from '@ai-sdk/google';
|
|
|
23
56
|
* });
|
|
24
57
|
* ```
|
|
25
58
|
*
|
|
26
|
-
* The adapter automatically indexes all agent conversations to memory.
|
|
27
59
|
* See docs/memory-guide.md for more details.
|
|
28
60
|
*/
|
|
29
61
|
const client = new VercelClient(google('gemini-3-pro-preview'));
|