create-better-t-stack 2.29.4 → 2.31.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +256 -347
- package/package.json +1 -1
- package/templates/addons/vibe-rules/.bts/rules.md.hbs +132 -0
- package/templates/backend/server/express/src/index.ts.hbs +9 -9
- package/templates/backend/server/fastify/src/index.ts.hbs +9 -12
- package/templates/backend/server/hono/src/index.ts.hbs +46 -43
- package/templates/examples/ai/native/nativewind/app/(drawer)/ai.tsx.hbs +36 -21
- package/templates/examples/ai/native/unistyles/app/(drawer)/ai.tsx.hbs +44 -22
- package/templates/examples/ai/server/next/src/app/ai/route.ts.hbs +15 -0
- package/templates/examples/ai/web/nuxt/app/pages/{ai.vue → ai.vue.hbs} +29 -14
- package/templates/examples/ai/web/react/next/src/app/ai/{page.tsx → page.tsx.hbs} +28 -8
- package/templates/examples/ai/web/react/react-router/src/routes/{ai.tsx → ai.tsx.hbs} +30 -7
- package/templates/examples/ai/web/react/tanstack-router/src/routes/{ai.tsx → ai.tsx.hbs} +26 -5
- package/templates/examples/ai/web/react/tanstack-start/src/routes/{ai.tsx → ai.tsx.hbs} +27 -6
- package/templates/examples/ai/web/svelte/src/routes/ai/+page.svelte.hbs +107 -0
- package/templates/extras/bunfig.toml.hbs +4 -4
- package/templates/examples/ai/server/next/src/app/ai/route.ts +0 -15
- package/templates/examples/ai/web/svelte/src/routes/ai/+page.svelte +0 -98
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "create-better-t-stack",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.31.0",
|
|
4
4
|
"description": "A modern CLI tool for scaffolding end-to-end type-safe TypeScript projects with best practices and customizable configurations",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"license": "MIT",
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
# Better-T-Stack Project Rules
|
|
2
|
+
|
|
3
|
+
This is a {{projectName}} project created with Better-T-Stack CLI.
|
|
4
|
+
|
|
5
|
+
## Project Structure
|
|
6
|
+
|
|
7
|
+
This is a monorepo with the following structure:
|
|
8
|
+
|
|
9
|
+
{{#if (or (includes frontend "tanstack-router") (includes frontend "react-router") (includes frontend "tanstack-start")
|
|
10
|
+
(includes frontend "next") (includes frontend "nuxt") (includes frontend "svelte") (includes frontend "solid"))}}
|
|
11
|
+
- **`apps/web/`** - Frontend application{{#if (includes frontend "tanstack-router")}} (React with TanStack Router){{else
|
|
12
|
+
if (includes frontend "react-router")}} (React with React Router){{else if (includes frontend "next")}} (Next.js){{else
|
|
13
|
+
if (includes frontend "nuxt")}} (Nuxt.js){{else if (includes frontend "svelte")}} (SvelteKit){{else if (includes
|
|
14
|
+
frontend "solid")}} (SolidStart){{/if}}
|
|
15
|
+
{{/if}}
|
|
16
|
+
|
|
17
|
+
{{#if (ne backend "convex")}}
|
|
18
|
+
{{#if (ne backend "none")}}
|
|
19
|
+
- **`apps/server/`** - Backend server{{#if (eq backend "hono")}} (Hono){{else if (eq backend "express")}}
|
|
20
|
+
(Express){{else if (eq backend "fastify")}} (Fastify){{else if (eq backend "elysia")}} (Elysia){{else if (eq backend
|
|
21
|
+
"next")}} (Next.js API){{/if}}
|
|
22
|
+
{{/if}}
|
|
23
|
+
{{else}}
|
|
24
|
+
- **`packages/backend/`** - Convex backend functions
|
|
25
|
+
{{/if}}
|
|
26
|
+
|
|
27
|
+
{{#if (or (includes frontend "native-nativewind") (includes frontend "native-unistyles"))}}
|
|
28
|
+
- **`apps/native/`** - React Native mobile app{{#if (includes frontend "native-nativewind")}} (with NativeWind){{else if
|
|
29
|
+
(includes frontend "native-unistyles")}} (with Unistyles){{/if}}
|
|
30
|
+
{{/if}}
|
|
31
|
+
|
|
32
|
+
## Available Scripts
|
|
33
|
+
|
|
34
|
+
- `{{packageManager}} run dev` - Start all apps in development mode
|
|
35
|
+
{{#if (or (includes frontend "tanstack-router") (includes frontend "react-router") (includes frontend "tanstack-start")
|
|
36
|
+
(includes frontend "next") (includes frontend "nuxt") (includes frontend "svelte") (includes frontend "solid"))}}
|
|
37
|
+
- `{{packageManager}} run dev:web` - Start only the web app
|
|
38
|
+
{{/if}}
|
|
39
|
+
{{#if (ne backend "none")}}
|
|
40
|
+
{{#if (ne backend "convex")}}
|
|
41
|
+
- `{{packageManager}} run dev:server` - Start only the server
|
|
42
|
+
{{/if}}
|
|
43
|
+
{{/if}}
|
|
44
|
+
{{#if (or (includes frontend "native-nativewind") (includes frontend "native-unistyles"))}}
|
|
45
|
+
- `{{packageManager}} run dev:native` - Start only the native app
|
|
46
|
+
{{/if}}
|
|
47
|
+
|
|
48
|
+
{{#if (and (ne database "none") (ne orm "none") (ne backend "convex"))}}
|
|
49
|
+
## Database Commands
|
|
50
|
+
|
|
51
|
+
All database operations should be run from the server workspace:
|
|
52
|
+
|
|
53
|
+
- `{{packageManager}} run db:push` - Push schema changes to database
|
|
54
|
+
- `{{packageManager}} run db:studio` - Open database studio
|
|
55
|
+
- `{{packageManager}} run db:generate` - Generate {{#if (eq orm "drizzle")}}Drizzle{{else if (eq orm
|
|
56
|
+
"prisma")}}Prisma{{else}}{{orm}}{{/if}} files
|
|
57
|
+
- `{{packageManager}} run db:migrate` - Run database migrations
|
|
58
|
+
|
|
59
|
+
{{#if (eq orm "drizzle")}}
|
|
60
|
+
Database schema files are located in `apps/server/src/db/schema/`
|
|
61
|
+
{{else if (eq orm "prisma")}}
|
|
62
|
+
Database schema is located in `apps/server/prisma/schema.prisma`
|
|
63
|
+
{{else if (eq orm "mongoose")}}
|
|
64
|
+
Database models are located in `apps/server/src/db/models/`
|
|
65
|
+
{{/if}}
|
|
66
|
+
{{/if}}
|
|
67
|
+
|
|
68
|
+
{{#if (ne api "none")}}
|
|
69
|
+
## API Structure
|
|
70
|
+
|
|
71
|
+
{{#if (eq api "trpc")}}
|
|
72
|
+
- tRPC routers are in `apps/server/src/routers/`
|
|
73
|
+
- Client-side tRPC utils are in `apps/web/src/utils/trpc.ts`
|
|
74
|
+
{{else if (eq api "orpc")}}
|
|
75
|
+
- oRPC endpoints are in `apps/server/src/api/`
|
|
76
|
+
- Client-side API utils are in `apps/web/src/utils/api.ts`
|
|
77
|
+
{{/if}}
|
|
78
|
+
{{/if}}
|
|
79
|
+
|
|
80
|
+
{{#if auth}}
|
|
81
|
+
## Authentication
|
|
82
|
+
|
|
83
|
+
Authentication is enabled in this project:
|
|
84
|
+
{{#if (ne backend "convex")}}
|
|
85
|
+
- Server auth logic is in `apps/server/src/lib/auth.ts`
|
|
86
|
+
{{#if (or (includes frontend "tanstack-router") (includes frontend "react-router") (includes frontend "tanstack-start")
|
|
87
|
+
(includes frontend "next") (includes frontend "nuxt") (includes frontend "svelte") (includes frontend "solid"))}}
|
|
88
|
+
- Web app auth client is in `apps/web/src/lib/auth-client.ts`
|
|
89
|
+
{{/if}}
|
|
90
|
+
{{#if (or (includes frontend "native-nativewind") (includes frontend "native-unistyles"))}}
|
|
91
|
+
- Native app auth client is in `apps/native/src/lib/auth-client.ts`
|
|
92
|
+
{{/if}}
|
|
93
|
+
{{else}}
|
|
94
|
+
{{/if}}
|
|
95
|
+
{{/if}}
|
|
96
|
+
|
|
97
|
+
## Adding More Features
|
|
98
|
+
|
|
99
|
+
You can add additional addons or deployment options to your project using:
|
|
100
|
+
|
|
101
|
+
```bash
|
|
102
|
+
{{#if (eq packageManager "bun")}}bunx{{else if (eq packageManager "pnpm")}}pnpx{{else}}npx{{/if}} create-better-t-stack
|
|
103
|
+
add
|
|
104
|
+
```
|
|
105
|
+
|
|
106
|
+
Available addons you can add:
|
|
107
|
+
- **Documentation**: Starlight, Fumadocs
|
|
108
|
+
- **Linting**: Biome, Oxlint, Ultracite
|
|
109
|
+
- **Other**: vibe-rules, Turborepo, PWA, Tauri, Husky
|
|
110
|
+
|
|
111
|
+
You can also add web deployment configurations like Cloudflare Workers support.
|
|
112
|
+
|
|
113
|
+
## Project Configuration
|
|
114
|
+
|
|
115
|
+
This project includes a `bts.jsonc` configuration file that stores your Better-T-Stack settings:
|
|
116
|
+
|
|
117
|
+
- Contains your selected stack configuration (database, ORM, backend, frontend, etc.)
|
|
118
|
+
- Used by the CLI to understand your project structure
|
|
119
|
+
- Safe to delete if not needed
|
|
120
|
+
- Updated automatically when using the `add` command
|
|
121
|
+
|
|
122
|
+
## Key Points
|
|
123
|
+
|
|
124
|
+
- This is a {{#if (includes addons "turborepo")}}Turborepo {{/if}}monorepo using {{packageManager}} workspaces
|
|
125
|
+
- Each app has its own `package.json` and dependencies
|
|
126
|
+
- Run commands from the root to execute across all workspaces
|
|
127
|
+
- Run workspace-specific commands with `{{packageManager}} run command-name`
|
|
128
|
+
{{#if (includes addons "turborepo")}}
|
|
129
|
+
- Turborepo handles build caching and parallel execution
|
|
130
|
+
{{/if}}
|
|
131
|
+
- Use `{{#if (eq packageManager "bun")}}bunx{{else if (eq packageManager "pnpm")}}pnpx{{else}}npx{{/if}}
|
|
132
|
+
create-better-t-stack add` to add more features later
|
|
@@ -14,7 +14,7 @@ import { createContext } from "./lib/context";
|
|
|
14
14
|
import cors from "cors";
|
|
15
15
|
import express from "express";
|
|
16
16
|
{{#if (includes examples "ai")}}
|
|
17
|
-
import { streamText } from "ai";
|
|
17
|
+
import { streamText, type UIMessage, convertToModelMessages } from "ai";
|
|
18
18
|
import { google } from "@ai-sdk/google";
|
|
19
19
|
{{/if}}
|
|
20
20
|
{{#if auth}}
|
|
@@ -44,16 +44,16 @@ app.use(
|
|
|
44
44
|
"/trpc",
|
|
45
45
|
createExpressMiddleware({
|
|
46
46
|
router: appRouter,
|
|
47
|
-
createContext
|
|
47
|
+
createContext,
|
|
48
48
|
})
|
|
49
49
|
);
|
|
50
50
|
{{/if}}
|
|
51
51
|
|
|
52
52
|
{{#if (eq api "orpc")}}
|
|
53
53
|
const handler = new RPCHandler(appRouter);
|
|
54
|
-
app.use(
|
|
54
|
+
app.use("/rpc{*path}", async (req, res, next) => {
|
|
55
55
|
const { matched } = await handler.handle(req, res, {
|
|
56
|
-
prefix:
|
|
56
|
+
prefix: "/rpc",
|
|
57
57
|
{{#if auth}}
|
|
58
58
|
context: await createContext({ req }),
|
|
59
59
|
{{else}}
|
|
@@ -65,16 +65,16 @@ app.use('/rpc{*path}', async (req, res, next) => {
|
|
|
65
65
|
});
|
|
66
66
|
{{/if}}
|
|
67
67
|
|
|
68
|
-
app.use(express.json())
|
|
68
|
+
app.use(express.json());
|
|
69
69
|
|
|
70
70
|
{{#if (includes examples "ai")}}
|
|
71
71
|
app.post("/ai", async (req, res) => {
|
|
72
|
-
const { messages = [] } = req.body || {};
|
|
72
|
+
const { messages = [] } = (req.body || {}) as { messages: UIMessage[] };
|
|
73
73
|
const result = streamText({
|
|
74
74
|
model: google("gemini-1.5-flash"),
|
|
75
|
-
messages,
|
|
75
|
+
messages: convertToModelMessages(messages),
|
|
76
76
|
});
|
|
77
|
-
result.
|
|
77
|
+
result.pipeUIMessageStreamToResponse(res);
|
|
78
78
|
});
|
|
79
79
|
{{/if}}
|
|
80
80
|
|
|
@@ -85,4 +85,4 @@ app.get("/", (_req, res) => {
|
|
|
85
85
|
const port = process.env.PORT || 3000;
|
|
86
86
|
app.listen(port, () => {
|
|
87
87
|
console.log(`Server is running on port ${port}`);
|
|
88
|
-
});
|
|
88
|
+
});
|
|
@@ -19,8 +19,7 @@ import { createContext } from "./lib/context";
|
|
|
19
19
|
{{/if}}
|
|
20
20
|
|
|
21
21
|
{{#if (includes examples "ai")}}
|
|
22
|
-
import
|
|
23
|
-
import { streamText, type Message } from "ai";
|
|
22
|
+
import { streamText, type UIMessage, convertToModelMessages } from "ai";
|
|
24
23
|
import { google } from "@ai-sdk/google";
|
|
25
24
|
{{/if}}
|
|
26
25
|
|
|
@@ -99,7 +98,7 @@ fastify.route({
|
|
|
99
98
|
response.headers.forEach((value, key) => reply.header(key, value));
|
|
100
99
|
reply.send(response.body ? await response.text() : null);
|
|
101
100
|
} catch (error) {
|
|
102
|
-
fastify.log.error("Authentication Error:"
|
|
101
|
+
fastify.log.error({ err: error }, "Authentication Error:");
|
|
103
102
|
reply.status(500).send({
|
|
104
103
|
error: "Internal authentication error",
|
|
105
104
|
code: "AUTH_FAILURE"
|
|
@@ -125,26 +124,24 @@ fastify.register(fastifyTRPCPlugin, {
|
|
|
125
124
|
{{#if (includes examples "ai")}}
|
|
126
125
|
interface AiRequestBody {
|
|
127
126
|
id?: string;
|
|
128
|
-
messages:
|
|
127
|
+
messages: UIMessage[];
|
|
129
128
|
}
|
|
130
129
|
|
|
131
130
|
fastify.post('/ai', async function (request, reply) {
|
|
131
|
+
// there are some issues with the ai sdk and fastify, docs: https://ai-sdk.dev/cookbook/api-servers/fastify
|
|
132
132
|
const { messages } = request.body as AiRequestBody;
|
|
133
133
|
const result = streamText({
|
|
134
134
|
model: google('gemini-1.5-flash'),
|
|
135
|
-
messages,
|
|
135
|
+
messages: convertToModelMessages(messages),
|
|
136
136
|
});
|
|
137
137
|
|
|
138
|
-
reply.
|
|
139
|
-
reply.header('Content-Type', 'text/plain; charset=utf-8');
|
|
140
|
-
|
|
141
|
-
return reply.send(result.toDataStream());
|
|
138
|
+
return result.pipeUIMessageStreamToResponse(reply.raw);
|
|
142
139
|
});
|
|
143
140
|
{{/if}}
|
|
144
141
|
|
|
145
142
|
fastify.get('/', async () => {
|
|
146
|
-
return 'OK'
|
|
147
|
-
})
|
|
143
|
+
return 'OK';
|
|
144
|
+
});
|
|
148
145
|
|
|
149
146
|
fastify.listen({ port: 3000 }, (err) => {
|
|
150
147
|
if (err) {
|
|
@@ -152,4 +149,4 @@ fastify.listen({ port: 3000 }, (err) => {
|
|
|
152
149
|
process.exit(1);
|
|
153
150
|
}
|
|
154
151
|
console.log("Server running on port 3000");
|
|
155
|
-
});
|
|
152
|
+
});
|
|
@@ -21,32 +21,33 @@ import { Hono } from "hono";
|
|
|
21
21
|
import { cors } from "hono/cors";
|
|
22
22
|
import { logger } from "hono/logger";
|
|
23
23
|
{{#if (and (includes examples "ai") (or (eq runtime "bun") (eq runtime "node")))}}
|
|
24
|
-
import { streamText } from "ai";
|
|
24
|
+
import { streamText, convertToModelMessages } from "ai";
|
|
25
25
|
import { google } from "@ai-sdk/google";
|
|
26
|
-
import { stream } from "hono/streaming";
|
|
27
26
|
{{/if}}
|
|
28
27
|
{{#if (and (includes examples "ai") (eq runtime "workers"))}}
|
|
29
|
-
import { streamText } from "ai";
|
|
30
|
-
import { stream } from "hono/streaming";
|
|
28
|
+
import { streamText, convertToModelMessages } from "ai";
|
|
31
29
|
import { createGoogleGenerativeAI } from "@ai-sdk/google";
|
|
32
30
|
{{/if}}
|
|
33
31
|
|
|
34
32
|
const app = new Hono();
|
|
35
33
|
|
|
36
34
|
app.use(logger());
|
|
37
|
-
app.use(
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
35
|
+
app.use(
|
|
36
|
+
"/*",
|
|
37
|
+
cors({
|
|
38
|
+
{{#if (or (eq runtime "bun") (eq runtime "node"))}}
|
|
39
|
+
origin: process.env.CORS_ORIGIN || "",
|
|
40
|
+
{{/if}}
|
|
41
|
+
{{#if (eq runtime "workers")}}
|
|
42
|
+
origin: env.CORS_ORIGIN || "",
|
|
43
|
+
{{/if}}
|
|
44
|
+
allowMethods: ["GET", "POST", "OPTIONS"],
|
|
45
|
+
{{#if auth}}
|
|
46
|
+
allowHeaders: ["Content-Type", "Authorization"],
|
|
47
|
+
credentials: true,
|
|
48
|
+
{{/if}}
|
|
49
|
+
})
|
|
50
|
+
);
|
|
50
51
|
|
|
51
52
|
{{#if auth}}
|
|
52
53
|
app.on(["POST", "GET"], "/api/auth/**", (c) => auth.handler(c.req.raw));
|
|
@@ -69,44 +70,43 @@ app.use("/rpc/*", async (c, next) => {
|
|
|
69
70
|
{{/if}}
|
|
70
71
|
|
|
71
72
|
{{#if (eq api "trpc")}}
|
|
72
|
-
app.use(
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
})
|
|
73
|
+
app.use(
|
|
74
|
+
"/trpc/*",
|
|
75
|
+
trpcServer({
|
|
76
|
+
router: appRouter,
|
|
77
|
+
createContext: (_opts, context) => {
|
|
78
|
+
return createContext({ context });
|
|
79
|
+
},
|
|
80
|
+
})
|
|
81
|
+
);
|
|
78
82
|
{{/if}}
|
|
79
83
|
|
|
80
84
|
{{#if (and (includes examples "ai") (or (eq runtime "bun") (eq runtime "node")))}}
|
|
81
85
|
app.post("/ai", async (c) => {
|
|
82
86
|
const body = await c.req.json();
|
|
83
|
-
const
|
|
87
|
+
const uiMessages = body.messages || [];
|
|
84
88
|
const result = streamText({
|
|
85
89
|
model: google("gemini-1.5-flash"),
|
|
86
|
-
messages,
|
|
90
|
+
messages: convertToModelMessages(uiMessages),
|
|
87
91
|
});
|
|
88
92
|
|
|
89
|
-
|
|
90
|
-
c.header("Content-Type", "text/plain; charset=utf-8");
|
|
91
|
-
return stream(c, (stream) => stream.pipe(result.toDataStream()));
|
|
93
|
+
return result.toUIMessageStreamResponse();
|
|
92
94
|
});
|
|
93
95
|
{{/if}}
|
|
94
96
|
|
|
95
97
|
{{#if (and (includes examples "ai") (eq runtime "workers"))}}
|
|
96
98
|
app.post("/ai", async (c) => {
|
|
97
99
|
const body = await c.req.json();
|
|
98
|
-
const
|
|
100
|
+
const uiMessages = body.messages || [];
|
|
99
101
|
const google = createGoogleGenerativeAI({
|
|
100
102
|
apiKey: env.GOOGLE_GENERATIVE_AI_API_KEY,
|
|
101
103
|
});
|
|
102
104
|
const result = streamText({
|
|
103
105
|
model: google("gemini-1.5-flash"),
|
|
104
|
-
messages,
|
|
106
|
+
messages: convertToModelMessages(uiMessages),
|
|
105
107
|
});
|
|
106
108
|
|
|
107
|
-
|
|
108
|
-
c.header("Content-Type", "text/plain; charset=utf-8");
|
|
109
|
-
return stream(c, (stream) => stream.pipe(result.toDataStream()));
|
|
109
|
+
return result.toUIMessageStreamResponse();
|
|
110
110
|
});
|
|
111
111
|
{{/if}}
|
|
112
112
|
|
|
@@ -117,17 +117,20 @@ app.get("/", (c) => {
|
|
|
117
117
|
{{#if (eq runtime "node")}}
|
|
118
118
|
import { serve } from "@hono/node-server";
|
|
119
119
|
|
|
120
|
-
serve(
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
120
|
+
serve(
|
|
121
|
+
{
|
|
122
|
+
fetch: app.fetch,
|
|
123
|
+
port: 3000,
|
|
124
|
+
},
|
|
125
|
+
(info) => {
|
|
126
|
+
console.log(`Server is running on http://localhost:${info.port}`);
|
|
127
|
+
}
|
|
128
|
+
);
|
|
126
129
|
{{else}}
|
|
127
|
-
|
|
130
|
+
{{#if (eq runtime "bun")}}
|
|
128
131
|
export default app;
|
|
129
|
-
|
|
130
|
-
|
|
132
|
+
{{/if}}
|
|
133
|
+
{{#if (eq runtime "workers")}}
|
|
131
134
|
export default app;
|
|
132
|
-
{{/if}}
|
|
133
135
|
{{/if}}
|
|
136
|
+
{{/if}}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { useRef, useEffect } from "react";
|
|
1
|
+
import { useRef, useEffect, useState } from "react";
|
|
2
2
|
import {
|
|
3
3
|
View,
|
|
4
4
|
Text,
|
|
@@ -9,11 +9,11 @@ import {
|
|
|
9
9
|
Platform,
|
|
10
10
|
} from "react-native";
|
|
11
11
|
import { useChat } from "@ai-sdk/react";
|
|
12
|
+
import { DefaultChatTransport } from "ai";
|
|
12
13
|
import { fetch as expoFetch } from "expo/fetch";
|
|
13
14
|
import { Ionicons } from "@expo/vector-icons";
|
|
14
15
|
import { Container } from "@/components/container";
|
|
15
16
|
|
|
16
|
-
// Utility function to generate API URLs
|
|
17
17
|
const generateAPIUrl = (relativePath: string) => {
|
|
18
18
|
const serverUrl = process.env.EXPO_PUBLIC_SERVER_URL;
|
|
19
19
|
if (!serverUrl) {
|
|
@@ -25,11 +25,13 @@ const generateAPIUrl = (relativePath: string) => {
|
|
|
25
25
|
};
|
|
26
26
|
|
|
27
27
|
export default function AIScreen() {
|
|
28
|
-
const
|
|
29
|
-
|
|
30
|
-
|
|
28
|
+
const [input, setInput] = useState("");
|
|
29
|
+
const { messages, error, sendMessage } = useChat({
|
|
30
|
+
transport: new DefaultChatTransport({
|
|
31
|
+
fetch: expoFetch as unknown as typeof globalThis.fetch,
|
|
32
|
+
api: generateAPIUrl('/ai'),
|
|
33
|
+
}),
|
|
31
34
|
onError: error => console.error(error, 'AI Chat Error'),
|
|
32
|
-
maxSteps: 5,
|
|
33
35
|
});
|
|
34
36
|
|
|
35
37
|
const scrollViewRef = useRef<ScrollView>(null);
|
|
@@ -39,8 +41,10 @@ export default function AIScreen() {
|
|
|
39
41
|
}, [messages]);
|
|
40
42
|
|
|
41
43
|
const onSubmit = () => {
|
|
42
|
-
|
|
43
|
-
|
|
44
|
+
const value = input.trim();
|
|
45
|
+
if (value) {
|
|
46
|
+
sendMessage({ text: value });
|
|
47
|
+
setInput("");
|
|
44
48
|
}
|
|
45
49
|
};
|
|
46
50
|
|
|
@@ -100,9 +104,28 @@ export default function AIScreen() {
|
|
|
100
104
|
<Text className="text-sm font-semibold mb-1 text-foreground">
|
|
101
105
|
{message.role === "user" ? "You" : "AI Assistant"}
|
|
102
106
|
</Text>
|
|
103
|
-
<
|
|
104
|
-
{message.
|
|
105
|
-
|
|
107
|
+
<View className="space-y-1">
|
|
108
|
+
{message.parts.map((part, i) => {
|
|
109
|
+
if (part.type === 'text') {
|
|
110
|
+
return (
|
|
111
|
+
<Text
|
|
112
|
+
key={`${message.id}-${i}`}
|
|
113
|
+
className="text-foreground leading-relaxed"
|
|
114
|
+
>
|
|
115
|
+
{part.text}
|
|
116
|
+
</Text>
|
|
117
|
+
);
|
|
118
|
+
}
|
|
119
|
+
return (
|
|
120
|
+
<Text
|
|
121
|
+
key={`${message.id}-${i}`}
|
|
122
|
+
className="text-foreground leading-relaxed"
|
|
123
|
+
>
|
|
124
|
+
{JSON.stringify(part)}
|
|
125
|
+
</Text>
|
|
126
|
+
);
|
|
127
|
+
})}
|
|
128
|
+
</View>
|
|
106
129
|
</View>
|
|
107
130
|
))}
|
|
108
131
|
</View>
|
|
@@ -113,21 +136,13 @@ export default function AIScreen() {
|
|
|
113
136
|
<View className="flex-row items-end space-x-2">
|
|
114
137
|
<TextInput
|
|
115
138
|
value={input}
|
|
116
|
-
|
|
117
|
-
handleInputChange({
|
|
118
|
-
...e,
|
|
119
|
-
target: {
|
|
120
|
-
...e.target,
|
|
121
|
-
value: e.nativeEvent.text,
|
|
122
|
-
},
|
|
123
|
-
} as unknown as React.ChangeEvent<HTMLInputElement>)
|
|
124
|
-
}
|
|
139
|
+
onChangeText={setInput}
|
|
125
140
|
placeholder="Type your message..."
|
|
126
141
|
placeholderTextColor="#6b7280"
|
|
127
142
|
className="flex-1 border border-border rounded-md px-3 py-2 text-foreground bg-background min-h-[40px] max-h-[120px]"
|
|
128
143
|
onSubmitEditing={(e) => {
|
|
129
|
-
handleSubmit(e);
|
|
130
144
|
e.preventDefault();
|
|
145
|
+
onSubmit();
|
|
131
146
|
}}
|
|
132
147
|
autoFocus={true}
|
|
133
148
|
/>
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { useRef, useEffect } from "react";
|
|
1
|
+
import React, { useRef, useEffect, useState } from "react";
|
|
2
2
|
import {
|
|
3
3
|
View,
|
|
4
4
|
Text,
|
|
@@ -9,6 +9,7 @@ import {
|
|
|
9
9
|
Platform,
|
|
10
10
|
} from "react-native";
|
|
11
11
|
import { useChat } from "@ai-sdk/react";
|
|
12
|
+
import { DefaultChatTransport } from "ai";
|
|
12
13
|
import { fetch as expoFetch } from "expo/fetch";
|
|
13
14
|
import { Ionicons } from "@expo/vector-icons";
|
|
14
15
|
import { StyleSheet, useUnistyles } from "react-native-unistyles";
|
|
@@ -18,21 +19,22 @@ const generateAPIUrl = (relativePath: string) => {
|
|
|
18
19
|
const serverUrl = process.env.EXPO_PUBLIC_SERVER_URL;
|
|
19
20
|
if (!serverUrl) {
|
|
20
21
|
throw new Error(
|
|
21
|
-
"EXPO_PUBLIC_SERVER_URL environment variable is not defined"
|
|
22
|
+
"EXPO_PUBLIC_SERVER_URL environment variable is not defined"
|
|
22
23
|
);
|
|
23
24
|
}
|
|
24
|
-
|
|
25
25
|
const path = relativePath.startsWith("/") ? relativePath : `/${relativePath}`;
|
|
26
26
|
return serverUrl.concat(path);
|
|
27
27
|
};
|
|
28
28
|
|
|
29
29
|
export default function AIScreen() {
|
|
30
30
|
const { theme } = useUnistyles();
|
|
31
|
-
const
|
|
32
|
-
|
|
33
|
-
|
|
31
|
+
const [input, setInput] = useState("");
|
|
32
|
+
const { messages, error, sendMessage } = useChat({
|
|
33
|
+
transport: new DefaultChatTransport({
|
|
34
|
+
fetch: expoFetch as unknown as typeof globalThis.fetch,
|
|
35
|
+
api: generateAPIUrl("/ai"),
|
|
36
|
+
}),
|
|
34
37
|
onError: (error) => console.error(error, "AI Chat Error"),
|
|
35
|
-
maxSteps: 5,
|
|
36
38
|
});
|
|
37
39
|
|
|
38
40
|
const scrollViewRef = useRef<ScrollView>(null);
|
|
@@ -42,8 +44,10 @@ export default function AIScreen() {
|
|
|
42
44
|
}, [messages]);
|
|
43
45
|
|
|
44
46
|
const onSubmit = () => {
|
|
45
|
-
|
|
46
|
-
|
|
47
|
+
const value = input.trim();
|
|
48
|
+
if (value) {
|
|
49
|
+
sendMessage({ text: value });
|
|
50
|
+
setInput("");
|
|
47
51
|
}
|
|
48
52
|
};
|
|
49
53
|
|
|
@@ -100,7 +104,28 @@ export default function AIScreen() {
|
|
|
100
104
|
<Text style={styles.messageRole}>
|
|
101
105
|
{message.role === "user" ? "You" : "AI Assistant"}
|
|
102
106
|
</Text>
|
|
103
|
-
<
|
|
107
|
+
<View style={styles.messageContentWrapper}>
|
|
108
|
+
{message.parts.map((part, i) => {
|
|
109
|
+
if (part.type === "text") {
|
|
110
|
+
return (
|
|
111
|
+
<Text
|
|
112
|
+
key={`${message.id}-${i}`}
|
|
113
|
+
style={styles.messageContent}
|
|
114
|
+
>
|
|
115
|
+
{part.text}
|
|
116
|
+
</Text>
|
|
117
|
+
);
|
|
118
|
+
}
|
|
119
|
+
return (
|
|
120
|
+
<Text
|
|
121
|
+
key={`${message.id}-${i}`}
|
|
122
|
+
style={styles.messageContent}
|
|
123
|
+
>
|
|
124
|
+
{JSON.stringify(part)}
|
|
125
|
+
</Text>
|
|
126
|
+
);
|
|
127
|
+
})}
|
|
128
|
+
</View>
|
|
104
129
|
</View>
|
|
105
130
|
))}
|
|
106
131
|
</View>
|
|
@@ -111,21 +136,13 @@ export default function AIScreen() {
|
|
|
111
136
|
<View style={styles.inputContainer}>
|
|
112
137
|
<TextInput
|
|
113
138
|
value={input}
|
|
114
|
-
|
|
115
|
-
handleInputChange({
|
|
116
|
-
...e,
|
|
117
|
-
target: {
|
|
118
|
-
...e.target,
|
|
119
|
-
value: e.nativeEvent.text,
|
|
120
|
-
},
|
|
121
|
-
} as unknown as React.ChangeEvent<HTMLInputElement>)
|
|
122
|
-
}
|
|
139
|
+
onChangeText={setInput}
|
|
123
140
|
placeholder="Type your message..."
|
|
124
141
|
placeholderTextColor={theme.colors.border}
|
|
125
142
|
style={styles.textInput}
|
|
126
143
|
onSubmitEditing={(e) => {
|
|
127
|
-
handleSubmit(e);
|
|
128
144
|
e.preventDefault();
|
|
145
|
+
onSubmit();
|
|
129
146
|
}}
|
|
130
147
|
autoFocus={true}
|
|
131
148
|
/>
|
|
@@ -141,7 +158,9 @@ export default function AIScreen() {
|
|
|
141
158
|
name="send"
|
|
142
159
|
size={20}
|
|
143
160
|
color={
|
|
144
|
-
input.trim()
|
|
161
|
+
input.trim()
|
|
162
|
+
? theme.colors.background
|
|
163
|
+
: theme.colors.border
|
|
145
164
|
}
|
|
146
165
|
/>
|
|
147
166
|
</TouchableOpacity>
|
|
@@ -230,6 +249,9 @@ const styles = StyleSheet.create((theme) => ({
|
|
|
230
249
|
marginBottom: theme.spacing.sm,
|
|
231
250
|
color: theme.colors.typography,
|
|
232
251
|
},
|
|
252
|
+
messageContentWrapper: {
|
|
253
|
+
gap: theme.spacing.xs,
|
|
254
|
+
},
|
|
233
255
|
messageContent: {
|
|
234
256
|
color: theme.colors.typography,
|
|
235
257
|
lineHeight: 20,
|
|
@@ -276,4 +298,4 @@ const styles = StyleSheet.create((theme) => ({
|
|
|
276
298
|
sendButtonDisabled: {
|
|
277
299
|
backgroundColor: theme.colors.border,
|
|
278
300
|
},
|
|
279
|
-
}));
|
|
301
|
+
}));
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { google } from '@ai-sdk/google';
|
|
2
|
+
import { streamText, type UIMessage, convertToModelMessages } from 'ai';
|
|
3
|
+
|
|
4
|
+
export const maxDuration = 30;
|
|
5
|
+
|
|
6
|
+
export async function POST(req: Request) {
|
|
7
|
+
const { messages }: { messages: UIMessage[] } = await req.json();
|
|
8
|
+
|
|
9
|
+
const result = streamText({
|
|
10
|
+
model: google('gemini-2.0-flash'),
|
|
11
|
+
messages: convertToModelMessages(messages),
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
return result.toUIMessageStreamResponse();
|
|
15
|
+
}
|