nextjs-hackathon-stack 0.1.7 → 0.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "nextjs-hackathon-stack",
3
- "version": "0.1.7",
3
+ "version": "0.1.9",
4
4
  "description": "Scaffold a full-stack Next.js hackathon starter",
5
5
  "type": "module",
6
6
  "bin": {
@@ -9,12 +9,11 @@ NEXT_PUBLIC_SUPABASE_ANON_KEY=your-anon-key
9
9
  # Supabase DB — https://supabase.com > Project Settings > Database > Connection string (URI)
10
10
  DATABASE_URL=postgresql://postgres:[password]@db.your-project-id.supabase.co:5432/postgres
11
11
 
12
- # MiniMax — https://www.minimaxi.chat > API Keys
13
- MINIMAX_API_KEY=your-minimax-api-key
14
-
15
12
  # Vercel AI Gateway — https://vercel.com > AI > Gateways
16
- # Format: https://gateway.ai.vercel.app/v1/{team-id}/{gateway-id}
17
- AI_GATEWAY_URL=https://gateway.ai.vercel.app/v1/your-team-id/your-gateway-id
13
+ AI_GATEWAY_API_KEY=your-ai-gateway-api-key
14
+
15
+ # Optional: override the gateway base URL (defaults to https://gateway.ai.vercel.app/v1)
16
+ # AI_GATEWAY_URL=https://gateway.ai.vercel.app/v1
18
17
 
19
18
  # =============================================================================
20
19
  # OPTIONAL
@@ -0,0 +1,25 @@
1
+ import { streamText, type CoreMessage } from "ai";
2
+
3
+ import { aiModel } from "@/shared/lib/ai";
4
+
5
+ export const runtime = "edge";
6
+
7
+ export async function POST(req: Request) {
8
+ const { messages } = (await req.json()) as { messages: CoreMessage[] };
9
+
10
+ try {
11
+ const result = streamText({
12
+ model: aiModel,
13
+ messages,
14
+ maxTokens: 2048,
15
+ });
16
+
17
+ return result.toDataStreamResponse();
18
+ } catch (error) {
19
+ console.error("[chat] streamText error:", error);
20
+ return new Response(
21
+ JSON.stringify({ error: error instanceof Error ? error.message : "AI request failed" }),
22
+ { status: 500, headers: { "Content-Type": "application/json" } }
23
+ );
24
+ }
25
+ }
@@ -0,0 +1,28 @@
1
+ "use client";
2
+
3
+ import { useEffect } from "react";
4
+
5
+ export default function Error({
6
+ error,
7
+ reset,
8
+ }: {
9
+ error: Error & { digest?: string };
10
+ reset: () => void;
11
+ }) {
12
+ useEffect(() => {
13
+ console.error(error);
14
+ }, [error]);
15
+
16
+ return (
17
+ <div className="flex min-h-screen flex-col items-center justify-center gap-4">
18
+ <h2 className="text-xl font-semibold">Something went wrong</h2>
19
+ <p className="text-muted-foreground text-sm">{error.message}</p>
20
+ <button
21
+ onClick={reset}
22
+ className="rounded bg-primary px-4 py-2 text-primary-foreground"
23
+ >
24
+ Try again
25
+ </button>
26
+ </div>
27
+ );
28
+ }
@@ -4,7 +4,7 @@ import { useChat } from "@ai-sdk/react";
4
4
 
5
5
  export function ChatUi() {
6
6
  const { messages, input, handleInputChange, handleSubmit, status } = useChat({
7
- api: "/features/chat/api",
7
+ api: "/api/chat",
8
8
  });
9
9
  const isLoading = status === "streaming" || status === "submitted";
10
10
 
@@ -1,8 +1,8 @@
1
1
  import { createOpenAI } from "@ai-sdk/openai";
2
2
 
3
3
  const gateway = createOpenAI({
4
- baseURL: process.env.AI_GATEWAY_URL ?? "https://api.minimaxi.chat/v1",
5
- apiKey: process.env.MINIMAX_API_KEY ?? "",
4
+ baseURL: process.env.AI_GATEWAY_URL ?? "https://gateway.ai.vercel.app/v1",
5
+ apiKey: process.env.AI_GATEWAY_API_KEY ?? "",
6
6
  });
7
7
 
8
8
  export const aiModel = gateway("minimax/minimax-m2.7");
@@ -1,17 +0,0 @@
1
- import { streamText, type CoreMessage } from "ai";
2
-
3
- import { aiModel } from "@/shared/lib/ai";
4
-
5
- export const runtime = "edge";
6
-
7
- export async function POST(req: Request) {
8
- const { messages } = (await req.json()) as { messages: CoreMessage[] };
9
-
10
- const result = streamText({
11
- model: aiModel,
12
- messages,
13
- maxTokens: 2048,
14
- });
15
-
16
- return result.toDataStreamResponse();
17
- }