create-featurebased-architecture 1.0.0 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +29 -9
- package/dist/index.js +4 -2
- package/package.json +2 -2
- package/templates/ollama-chatbot-backend/.env.example +9 -0
- package/templates/ollama-chatbot-backend/README.md +132 -0
- package/templates/ollama-chatbot-backend/package.json +21 -0
- package/templates/ollama-chatbot-backend/src/config/database.ts +4 -0
- package/templates/ollama-chatbot-backend/src/config/env.ts +8 -0
- package/templates/ollama-chatbot-backend/src/config/index.ts +3 -0
- package/templates/ollama-chatbot-backend/src/config/ollama.ts +14 -0
- package/templates/ollama-chatbot-backend/src/features/chat/controller.ts +49 -0
- package/templates/ollama-chatbot-backend/src/features/chat/index.ts +5 -0
- package/templates/ollama-chatbot-backend/src/features/chat/routes.ts +7 -0
- package/templates/ollama-chatbot-backend/src/features/chat/schema.ts +13 -0
- package/templates/ollama-chatbot-backend/src/features/chat/service.ts +91 -0
- package/templates/ollama-chatbot-backend/src/features/chat/types.ts +22 -0
- package/templates/ollama-chatbot-backend/src/features/conversations/controller.ts +114 -0
- package/templates/ollama-chatbot-backend/src/features/conversations/index.ts +6 -0
- package/templates/ollama-chatbot-backend/src/features/conversations/repository.ts +61 -0
- package/templates/ollama-chatbot-backend/src/features/conversations/routes.ts +11 -0
- package/templates/ollama-chatbot-backend/src/features/conversations/schema.ts +9 -0
- package/templates/ollama-chatbot-backend/src/features/conversations/service.ts +28 -0
- package/templates/ollama-chatbot-backend/src/features/conversations/types.ts +23 -0
- package/templates/ollama-chatbot-backend/src/index.ts +22 -0
- package/templates/ollama-chatbot-backend/src/routes/index.ts +10 -0
- package/templates/ollama-chatbot-backend/src/shared/index.ts +2 -0
- package/templates/ollama-chatbot-backend/src/shared/types/index.ts +16 -0
- package/templates/ollama-chatbot-backend/src/shared/utils/index.ts +1 -0
- package/templates/ollama-chatbot-backend/src/shared/utils/response.ts +10 -0
- package/templates/ollama-chatbot-backend/tsconfig.json +22 -0
- package/templates/ollama-chatbot-frontend/.env.example +1 -0
- package/templates/ollama-chatbot-frontend/README.md +65 -0
- package/templates/ollama-chatbot-frontend/index.html +12 -0
- package/templates/ollama-chatbot-frontend/package.json +23 -0
- package/templates/ollama-chatbot-frontend/src/App.tsx +17 -0
- package/templates/ollama-chatbot-frontend/src/config/env.ts +1 -0
- package/templates/ollama-chatbot-frontend/src/config/index.ts +1 -0
- package/templates/ollama-chatbot-frontend/src/features/chat/components/ChatPage.tsx +94 -0
- package/templates/ollama-chatbot-frontend/src/features/chat/components/index.ts +1 -0
- package/templates/ollama-chatbot-frontend/src/features/chat/hooks/index.ts +1 -0
- package/templates/ollama-chatbot-frontend/src/features/chat/hooks/useChat.ts +149 -0
- package/templates/ollama-chatbot-frontend/src/features/chat/index.ts +4 -0
- package/templates/ollama-chatbot-frontend/src/features/chat/services/chatService.ts +81 -0
- package/templates/ollama-chatbot-frontend/src/features/chat/services/index.ts +1 -0
- package/templates/ollama-chatbot-frontend/src/features/chat/types.ts +33 -0
- package/templates/ollama-chatbot-frontend/src/index.css +281 -0
- package/templates/ollama-chatbot-frontend/src/main.tsx +13 -0
- package/templates/ollama-chatbot-frontend/src/shared/components/Sidebar.tsx +56 -0
- package/templates/ollama-chatbot-frontend/src/shared/components/index.ts +1 -0
- package/templates/ollama-chatbot-frontend/tsconfig.json +27 -0
- package/templates/ollama-chatbot-frontend/tsconfig.node.json +11 -0
- package/templates/ollama-chatbot-frontend/vite.config.ts +12 -0
package/README.md
CHANGED
|
@@ -1,17 +1,17 @@
|
|
|
1
|
-
# create-featurebased-architecture
|
|
1
|
+
# create-featurebased-architecture
|
|
2
2
|
|
|
3
3
|
A CLI tool to scaffold feature-based architecture projects with Bun, Hono, and React.
|
|
4
4
|
|
|
5
5
|
## Usage
|
|
6
6
|
|
|
7
7
|
```bash
|
|
8
|
-
npx create-featurebased-architecture
|
|
8
|
+
npx create-featurebased-architecture
|
|
9
9
|
```
|
|
10
10
|
|
|
11
11
|
Or with bun:
|
|
12
12
|
|
|
13
13
|
```bash
|
|
14
|
-
bunx create-featurebased-architecture
|
|
14
|
+
bunx create-featurebased-architecture
|
|
15
15
|
```
|
|
16
16
|
|
|
17
17
|
## Templates
|
|
@@ -23,6 +23,8 @@ bunx create-featurebased-architecture-backend
|
|
|
23
23
|
| **React** | Feature-based React frontend with Vite |
|
|
24
24
|
| **User Management Backend** | Complete CRUD backend for users with NeonDB |
|
|
25
25
|
| **User Management Frontend** | React frontend for user management |
|
|
26
|
+
| **Ollama Chatbot Backend** | AI chatbot backend with @langchain/ollama and NeonDB |
|
|
27
|
+
| **Ollama Chatbot Frontend** | Chat UI with react-icons for the chatbot backend |
|
|
26
28
|
|
|
27
29
|
## Feature-Based Architecture
|
|
28
30
|
|
|
@@ -65,6 +67,30 @@ CREATE TRIGGER update_users_updated_at
|
|
|
65
67
|
EXECUTE FUNCTION update_updated_at_column();
|
|
66
68
|
```
|
|
67
69
|
|
|
70
|
+
### Ollama Chatbot Tables
|
|
71
|
+
|
|
72
|
+
```sql
|
|
73
|
+
-- Conversations table
|
|
74
|
+
CREATE TABLE conversations (
|
|
75
|
+
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
|
76
|
+
title VARCHAR(200) NOT NULL,
|
|
77
|
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
|
78
|
+
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
|
79
|
+
);
|
|
80
|
+
|
|
81
|
+
-- Messages table
|
|
82
|
+
CREATE TABLE messages (
|
|
83
|
+
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
|
84
|
+
conversation_id UUID NOT NULL REFERENCES conversations(id) ON DELETE CASCADE,
|
|
85
|
+
role VARCHAR(20) NOT NULL CHECK (role IN ('user', 'assistant', 'system')),
|
|
86
|
+
content TEXT NOT NULL,
|
|
87
|
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
|
88
|
+
);
|
|
89
|
+
|
|
90
|
+
CREATE INDEX idx_messages_conversation ON messages(conversation_id);
|
|
91
|
+
CREATE INDEX idx_conversations_updated ON conversations(updated_at DESC);
|
|
92
|
+
```
|
|
93
|
+
|
|
68
94
|
## Development
|
|
69
95
|
|
|
70
96
|
```bash
|
|
@@ -78,12 +104,6 @@ bun run dev
|
|
|
78
104
|
bun run build
|
|
79
105
|
```
|
|
80
106
|
|
|
81
|
-
## Publishing
|
|
82
|
-
|
|
83
|
-
```bash
|
|
84
|
-
npm publish
|
|
85
|
-
```
|
|
86
|
-
|
|
87
107
|
## License
|
|
88
108
|
|
|
89
109
|
MIT
|
package/dist/index.js
CHANGED
|
@@ -740,7 +740,9 @@ var TEMPLATES = {
|
|
|
740
740
|
"blank-hono": "Blank Hono",
|
|
741
741
|
react: "React",
|
|
742
742
|
"user-management-backend": "User Management System Backend",
|
|
743
|
-
"user-management-frontend": "User Management System Frontend"
|
|
743
|
+
"user-management-frontend": "User Management System Frontend",
|
|
744
|
+
"ollama-chatbot-backend": "Ollama Chatbot Backend",
|
|
745
|
+
"ollama-chatbot-frontend": "Ollama Chatbot Frontend"
|
|
744
746
|
};
|
|
745
747
|
function copyDir(src, dest) {
|
|
746
748
|
fs.mkdirSync(dest, { recursive: true });
|
|
@@ -766,7 +768,7 @@ function replaceInFile(filePath, replacements) {
|
|
|
766
768
|
}
|
|
767
769
|
async function main() {
|
|
768
770
|
console.clear();
|
|
769
|
-
oe(import_picocolors3.default.bgCyan(import_picocolors3.default.black(" create-featurebased-architecture
|
|
771
|
+
oe(import_picocolors3.default.bgCyan(import_picocolors3.default.black(" create-featurebased-architecture ")));
|
|
770
772
|
const project = await he({
|
|
771
773
|
name: () => te({
|
|
772
774
|
message: "Project name:",
|
package/package.json
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "create-featurebased-architecture",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.1",
|
|
4
4
|
"description": "CLI to scaffold feature-based architecture projects with Bun, Hono, React",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
|
7
|
-
"create-featurebased-architecture
|
|
7
|
+
"create-featurebased-architecture": "./dist/index.js"
|
|
8
8
|
},
|
|
9
9
|
"files": [
|
|
10
10
|
"dist",
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
# Environment Variables
|
|
2
|
+
DATABASE_URL=postgresql://user:password@your-neon-host.neon.tech/dbname?sslmode=require
|
|
3
|
+
PORT=3000
|
|
4
|
+
NODE_ENV=development
|
|
5
|
+
|
|
6
|
+
# Ollama Cloud Configuration
|
|
7
|
+
OLLAMA_API_KEY=your_ollama_api_key_here
|
|
8
|
+
OLLAMA_BASE_URL=https://api.ollama.com
|
|
9
|
+
OLLAMA_MODEL=gemma3:27b-cloud
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
# {{PROJECT_NAME}}
|
|
2
|
+
|
|
3
|
+
An Ollama Chatbot backend with feature-based architecture using Bun, Hono, LangChain, and NeonDB.
|
|
4
|
+
|
|
5
|
+
## Project Structure
|
|
6
|
+
|
|
7
|
+
```
|
|
8
|
+
src/
|
|
9
|
+
├── config/ # Configuration (env, database, ollama)
|
|
10
|
+
├── features/
|
|
11
|
+
│ ├── chat/ # Chat messaging feature
|
|
12
|
+
│ │ ├── routes.ts
|
|
13
|
+
│ │ ├── controller.ts
|
|
14
|
+
│ │ ├── service.ts
|
|
15
|
+
│ │ ├── types.ts
|
|
16
|
+
│ │ └── schema.ts
|
|
17
|
+
│ └── conversations/ # Conversation management
|
|
18
|
+
│ ├── routes.ts
|
|
19
|
+
│ ├── controller.ts
|
|
20
|
+
│ ├── service.ts
|
|
21
|
+
│ ├── repository.ts
|
|
22
|
+
│ ├── types.ts
|
|
23
|
+
│ └── schema.ts
|
|
24
|
+
├── routes/ # Route aggregation
|
|
25
|
+
├── shared/ # Shared utilities
|
|
26
|
+
└── index.ts
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
## Getting Started
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
# Install dependencies
|
|
33
|
+
bun install
|
|
34
|
+
|
|
35
|
+
# Copy environment file
|
|
36
|
+
cp .env.example .env
|
|
37
|
+
|
|
38
|
+
# Update with your credentials
|
|
39
|
+
|
|
40
|
+
# Run development server
|
|
41
|
+
bun run dev
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Environment Variables
|
|
45
|
+
|
|
46
|
+
| Variable | Description |
|
|
47
|
+
|----------|-------------|
|
|
48
|
+
| `DATABASE_URL` | NeonDB PostgreSQL connection string |
|
|
49
|
+
| `OLLAMA_API_KEY` | Ollama Cloud API key |
|
|
50
|
+
| `OLLAMA_BASE_URL` | Ollama API URL (default: https://api.ollama.com) |
|
|
51
|
+
| `OLLAMA_MODEL` | Default model (default: gemma3:27b-cloud) |
|
|
52
|
+
| `PORT` | Server port (default: 3000) |
|
|
53
|
+
|
|
54
|
+
## Database Setup (NeonDB)
|
|
55
|
+
|
|
56
|
+
Run the following SQL in your NeonDB console:
|
|
57
|
+
|
|
58
|
+
```sql
|
|
59
|
+
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
|
60
|
+
|
|
61
|
+
-- Conversations table
|
|
62
|
+
CREATE TABLE conversations (
|
|
63
|
+
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
|
64
|
+
title VARCHAR(200) NOT NULL,
|
|
65
|
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
|
|
66
|
+
updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
|
67
|
+
);
|
|
68
|
+
|
|
69
|
+
-- Messages table
|
|
70
|
+
CREATE TABLE messages (
|
|
71
|
+
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
|
72
|
+
conversation_id UUID NOT NULL REFERENCES conversations(id) ON DELETE CASCADE,
|
|
73
|
+
role VARCHAR(20) NOT NULL CHECK (role IN ('user', 'assistant', 'system')),
|
|
74
|
+
content TEXT NOT NULL,
|
|
75
|
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
|
|
76
|
+
);
|
|
77
|
+
|
|
78
|
+
-- Indexes
|
|
79
|
+
CREATE INDEX idx_messages_conversation ON messages(conversation_id);
|
|
80
|
+
CREATE INDEX idx_conversations_updated ON conversations(updated_at DESC);
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
## API Endpoints
|
|
84
|
+
|
|
85
|
+
### Chat
|
|
86
|
+
|
|
87
|
+
| Method | Endpoint | Description |
|
|
88
|
+
|--------|----------|-------------|
|
|
89
|
+
| POST | `/api/chat` | Send message and get response |
|
|
90
|
+
| POST | `/api/chat/stream` | Stream response (SSE) |
|
|
91
|
+
|
|
92
|
+
### Conversations
|
|
93
|
+
|
|
94
|
+
| Method | Endpoint | Description |
|
|
95
|
+
|--------|----------|-------------|
|
|
96
|
+
| GET | `/api/conversations` | Get all conversations |
|
|
97
|
+
| GET | `/api/conversations/:id` | Get conversation by ID |
|
|
98
|
+
| GET | `/api/conversations/:id/messages` | Get conversation messages |
|
|
99
|
+
| POST | `/api/conversations` | Create conversation |
|
|
100
|
+
| PUT | `/api/conversations/:id` | Update conversation title |
|
|
101
|
+
| DELETE | `/api/conversations/:id` | Delete conversation |
|
|
102
|
+
|
|
103
|
+
### Request Examples
|
|
104
|
+
|
|
105
|
+
**Send Message:**
|
|
106
|
+
```bash
|
|
107
|
+
curl -X POST http://localhost:3000/api/chat \
|
|
108
|
+
-H "Content-Type: application/json" \
|
|
109
|
+
-d '{"message": "Hello, how are you?"}'
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
**Send Message with Specific Model:**
|
|
113
|
+
```bash
|
|
114
|
+
curl -X POST http://localhost:3000/api/chat \
|
|
115
|
+
-H "Content-Type: application/json" \
|
|
116
|
+
-d '{"message": "Hello", "model": "llama3:70b-cloud"}'
|
|
117
|
+
```
|
|
118
|
+
|
|
119
|
+
**Continue Conversation:**
|
|
120
|
+
```bash
|
|
121
|
+
curl -X POST http://localhost:3000/api/chat \
|
|
122
|
+
-H "Content-Type: application/json" \
|
|
123
|
+
-d '{"message": "Tell me more", "conversationId": "uuid-here"}'
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
## Changing the Model
|
|
127
|
+
|
|
128
|
+
The model can be changed in three ways:
|
|
129
|
+
|
|
130
|
+
1. **Environment variable:** Set `OLLAMA_MODEL` in `.env`
|
|
131
|
+
2. **Per request:** Pass `model` in the request body
|
|
132
|
+
3. **Code:** Modify `src/config/env.ts`
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "{{PROJECT_NAME}}",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"scripts": {
|
|
6
|
+
"dev": "bun run --watch src/index.ts",
|
|
7
|
+
"start": "bun run src/index.ts",
|
|
8
|
+
"build": "bun build src/index.ts --outdir ./dist --target bun"
|
|
9
|
+
},
|
|
10
|
+
"dependencies": {
|
|
11
|
+
"hono": "^4.11.3",
|
|
12
|
+
"@neondatabase/serverless": "^0.9.0",
|
|
13
|
+
"@langchain/ollama": "^0.2.0",
|
|
14
|
+
"@langchain/core": "^0.2.0",
|
|
15
|
+
"zod": "^3.22.0"
|
|
16
|
+
},
|
|
17
|
+
"devDependencies": {
|
|
18
|
+
"@types/bun": "latest",
|
|
19
|
+
"typescript": "^5.0.0"
|
|
20
|
+
}
|
|
21
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
export const env = {
|
|
2
|
+
DATABASE_URL: process.env.DATABASE_URL || "",
|
|
3
|
+
PORT: Number(process.env.PORT) || 3000,
|
|
4
|
+
NODE_ENV: process.env.NODE_ENV || "development",
|
|
5
|
+
OLLAMA_API_KEY: process.env.OLLAMA_API_KEY || "",
|
|
6
|
+
OLLAMA_BASE_URL: process.env.OLLAMA_BASE_URL || "https://api.ollama.com",
|
|
7
|
+
OLLAMA_MODEL: process.env.OLLAMA_MODEL || "gemma3:27b-cloud",
|
|
8
|
+
} as const;
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { ChatOllama } from "@langchain/ollama";
|
|
2
|
+
import { env } from "./env";
|
|
3
|
+
|
|
4
|
+
export const createOllamaClient = (model?: string) => {
|
|
5
|
+
return new ChatOllama({
|
|
6
|
+
baseUrl: env.OLLAMA_BASE_URL,
|
|
7
|
+
model: model || env.OLLAMA_MODEL,
|
|
8
|
+
headers: {
|
|
9
|
+
Authorization: `Bearer ${env.OLLAMA_API_KEY}`,
|
|
10
|
+
},
|
|
11
|
+
});
|
|
12
|
+
};
|
|
13
|
+
|
|
14
|
+
export const ollamaClient = createOllamaClient();
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import type { Context } from "hono";
|
|
2
|
+
import { streamSSE } from "hono/streaming";
|
|
3
|
+
import { chatService } from "./service";
|
|
4
|
+
import { chatRequestSchema, streamChatRequestSchema } from "./schema";
|
|
5
|
+
import { successResponse, errorResponse } from "../../shared/utils/response";
|
|
6
|
+
|
|
7
|
+
export const chatController = {
|
|
8
|
+
async sendMessage(c: Context) {
|
|
9
|
+
try {
|
|
10
|
+
const body = await c.req.json();
|
|
11
|
+
const validation = chatRequestSchema.safeParse(body);
|
|
12
|
+
|
|
13
|
+
if (!validation.success) {
|
|
14
|
+
return errorResponse(c, validation.error.errors[0].message);
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const response = await chatService.sendMessage(validation.data);
|
|
18
|
+
return successResponse(c, response);
|
|
19
|
+
} catch (error) {
|
|
20
|
+
console.error("Chat error:", error);
|
|
21
|
+
return errorResponse(c, "Failed to process message", 500);
|
|
22
|
+
}
|
|
23
|
+
},
|
|
24
|
+
|
|
25
|
+
async streamMessage(c: Context) {
|
|
26
|
+
try {
|
|
27
|
+
const body = await c.req.json();
|
|
28
|
+
const validation = streamChatRequestSchema.safeParse(body);
|
|
29
|
+
|
|
30
|
+
if (!validation.success) {
|
|
31
|
+
return errorResponse(c, validation.error.errors[0].message);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
return streamSSE(c, async (stream) => {
|
|
35
|
+
try {
|
|
36
|
+
for await (const chunk of chatService.streamMessage(validation.data)) {
|
|
37
|
+
await stream.writeSSE({ data: chunk });
|
|
38
|
+
}
|
|
39
|
+
} catch (error) {
|
|
40
|
+
console.error("Stream error:", error);
|
|
41
|
+
await stream.writeSSE({ data: JSON.stringify({ error: "Stream failed" }) });
|
|
42
|
+
}
|
|
43
|
+
});
|
|
44
|
+
} catch (error) {
|
|
45
|
+
console.error("Stream setup error:", error);
|
|
46
|
+
return errorResponse(c, "Failed to setup stream", 500);
|
|
47
|
+
}
|
|
48
|
+
},
|
|
49
|
+
};
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
|
|
3
|
+
export const chatRequestSchema = z.object({
|
|
4
|
+
message: z.string().min(1, "Message is required"),
|
|
5
|
+
conversationId: z.string().uuid().optional(),
|
|
6
|
+
model: z.string().optional(),
|
|
7
|
+
});
|
|
8
|
+
|
|
9
|
+
export const streamChatRequestSchema = z.object({
|
|
10
|
+
message: z.string().min(1, "Message is required"),
|
|
11
|
+
conversationId: z.string().uuid().optional(),
|
|
12
|
+
model: z.string().optional(),
|
|
13
|
+
});
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import { HumanMessage, AIMessage, SystemMessage } from "@langchain/core/messages";
|
|
2
|
+
import { createOllamaClient, ollamaClient } from "../../config/ollama";
|
|
3
|
+
import { conversationRepository } from "../conversations/repository";
|
|
4
|
+
import type { ChatRequest, ChatResponse, ChatMessage } from "./types";
|
|
5
|
+
|
|
6
|
+
export const chatService = {
|
|
7
|
+
async sendMessage(request: ChatRequest): Promise<ChatResponse> {
|
|
8
|
+
const { message, conversationId, model } = request;
|
|
9
|
+
|
|
10
|
+
// Get or create conversation
|
|
11
|
+
let convId = conversationId;
|
|
12
|
+
if (!convId) {
|
|
13
|
+
const conversation = await conversationRepository.create({
|
|
14
|
+
title: message.slice(0, 50) + (message.length > 50 ? "..." : ""),
|
|
15
|
+
});
|
|
16
|
+
convId = conversation.id;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
// Get conversation history
|
|
20
|
+
const history = await conversationRepository.getMessages(convId);
|
|
21
|
+
|
|
22
|
+
// Build message history for context
|
|
23
|
+
const messages = history.map((msg) => {
|
|
24
|
+
if (msg.role === "user") return new HumanMessage(msg.content);
|
|
25
|
+
if (msg.role === "assistant") return new AIMessage(msg.content);
|
|
26
|
+
return new SystemMessage(msg.content);
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
// Add current message
|
|
30
|
+
messages.push(new HumanMessage(message));
|
|
31
|
+
|
|
32
|
+
// Save user message
|
|
33
|
+
await conversationRepository.addMessage(convId, { role: "user", content: message });
|
|
34
|
+
|
|
35
|
+
// Get response from Ollama
|
|
36
|
+
const client = model ? createOllamaClient(model) : ollamaClient;
|
|
37
|
+
const response = await client.invoke(messages);
|
|
38
|
+
const assistantMessage = typeof response.content === "string" ? response.content : JSON.stringify(response.content);
|
|
39
|
+
|
|
40
|
+
// Save assistant response
|
|
41
|
+
await conversationRepository.addMessage(convId, { role: "assistant", content: assistantMessage });
|
|
42
|
+
|
|
43
|
+
return {
|
|
44
|
+
message: assistantMessage,
|
|
45
|
+
conversationId: convId,
|
|
46
|
+
model: model || (await import("../../config/env")).env.OLLAMA_MODEL,
|
|
47
|
+
};
|
|
48
|
+
},
|
|
49
|
+
|
|
50
|
+
async *streamMessage(request: ChatRequest): AsyncGenerator<string> {
|
|
51
|
+
const { message, conversationId, model } = request;
|
|
52
|
+
|
|
53
|
+
// Get or create conversation
|
|
54
|
+
let convId = conversationId;
|
|
55
|
+
if (!convId) {
|
|
56
|
+
const conversation = await conversationRepository.create({
|
|
57
|
+
title: message.slice(0, 50) + (message.length > 50 ? "..." : ""),
|
|
58
|
+
});
|
|
59
|
+
convId = conversation.id;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// Get conversation history
|
|
63
|
+
const history = await conversationRepository.getMessages(convId);
|
|
64
|
+
|
|
65
|
+
// Build message history
|
|
66
|
+
const messages = history.map((msg) => {
|
|
67
|
+
if (msg.role === "user") return new HumanMessage(msg.content);
|
|
68
|
+
if (msg.role === "assistant") return new AIMessage(msg.content);
|
|
69
|
+
return new SystemMessage(msg.content);
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
messages.push(new HumanMessage(message));
|
|
73
|
+
|
|
74
|
+
// Save user message
|
|
75
|
+
await conversationRepository.addMessage(convId, { role: "user", content: message });
|
|
76
|
+
|
|
77
|
+
// Stream response
|
|
78
|
+
const client = model ? createOllamaClient(model) : ollamaClient;
|
|
79
|
+
const stream = await client.stream(messages);
|
|
80
|
+
|
|
81
|
+
let fullResponse = "";
|
|
82
|
+
for await (const chunk of stream) {
|
|
83
|
+
const content = typeof chunk.content === "string" ? chunk.content : "";
|
|
84
|
+
fullResponse += content;
|
|
85
|
+
yield JSON.stringify({ content, conversationId: convId }) + "\n";
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// Save complete assistant response
|
|
89
|
+
await conversationRepository.addMessage(convId, { role: "assistant", content: fullResponse });
|
|
90
|
+
},
|
|
91
|
+
};
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
export interface ChatMessage {
|
|
2
|
+
role: "user" | "assistant" | "system";
|
|
3
|
+
content: string;
|
|
4
|
+
}
|
|
5
|
+
|
|
6
|
+
export interface ChatRequest {
|
|
7
|
+
message: string;
|
|
8
|
+
conversationId?: string;
|
|
9
|
+
model?: string;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export interface ChatResponse {
|
|
13
|
+
message: string;
|
|
14
|
+
conversationId: string;
|
|
15
|
+
model: string;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export interface StreamChatRequest {
|
|
19
|
+
message: string;
|
|
20
|
+
conversationId?: string;
|
|
21
|
+
model?: string;
|
|
22
|
+
}
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import type { Context } from "hono";
|
|
2
|
+
import { conversationService } from "./service";
|
|
3
|
+
import { createConversationSchema, conversationIdSchema } from "./schema";
|
|
4
|
+
import { successResponse, errorResponse } from "../../shared/utils/response";
|
|
5
|
+
|
|
6
|
+
export const conversationController = {
|
|
7
|
+
async getAll(c: Context) {
|
|
8
|
+
try {
|
|
9
|
+
const conversations = await conversationService.getAllConversations();
|
|
10
|
+
return successResponse(c, conversations);
|
|
11
|
+
} catch (error) {
|
|
12
|
+
return errorResponse(c, "Failed to fetch conversations", 500);
|
|
13
|
+
}
|
|
14
|
+
},
|
|
15
|
+
|
|
16
|
+
async getById(c: Context) {
|
|
17
|
+
try {
|
|
18
|
+
const { id } = c.req.param();
|
|
19
|
+
const validation = conversationIdSchema.safeParse({ id });
|
|
20
|
+
|
|
21
|
+
if (!validation.success) {
|
|
22
|
+
return errorResponse(c, validation.error.errors[0].message);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const conversation = await conversationService.getConversationById(id);
|
|
26
|
+
if (!conversation) {
|
|
27
|
+
return errorResponse(c, "Conversation not found", 404);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
return successResponse(c, conversation);
|
|
31
|
+
} catch (error) {
|
|
32
|
+
return errorResponse(c, "Failed to fetch conversation", 500);
|
|
33
|
+
}
|
|
34
|
+
},
|
|
35
|
+
|
|
36
|
+
async getMessages(c: Context) {
|
|
37
|
+
try {
|
|
38
|
+
const { id } = c.req.param();
|
|
39
|
+
const validation = conversationIdSchema.safeParse({ id });
|
|
40
|
+
|
|
41
|
+
if (!validation.success) {
|
|
42
|
+
return errorResponse(c, validation.error.errors[0].message);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const messages = await conversationService.getConversationMessages(id);
|
|
46
|
+
return successResponse(c, messages);
|
|
47
|
+
} catch (error) {
|
|
48
|
+
return errorResponse(c, "Failed to fetch messages", 500);
|
|
49
|
+
}
|
|
50
|
+
},
|
|
51
|
+
|
|
52
|
+
async create(c: Context) {
|
|
53
|
+
try {
|
|
54
|
+
const body = await c.req.json();
|
|
55
|
+
const validation = createConversationSchema.safeParse(body);
|
|
56
|
+
|
|
57
|
+
if (!validation.success) {
|
|
58
|
+
return errorResponse(c, validation.error.errors[0].message);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const conversation = await conversationService.createConversation(validation.data);
|
|
62
|
+
return successResponse(c, conversation, "Conversation created", 201);
|
|
63
|
+
} catch (error) {
|
|
64
|
+
return errorResponse(c, "Failed to create conversation", 500);
|
|
65
|
+
}
|
|
66
|
+
},
|
|
67
|
+
|
|
68
|
+
async updateTitle(c: Context) {
|
|
69
|
+
try {
|
|
70
|
+
const { id } = c.req.param();
|
|
71
|
+
const idValidation = conversationIdSchema.safeParse({ id });
|
|
72
|
+
|
|
73
|
+
if (!idValidation.success) {
|
|
74
|
+
return errorResponse(c, idValidation.error.errors[0].message);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
const body = await c.req.json();
|
|
78
|
+
const validation = createConversationSchema.safeParse(body);
|
|
79
|
+
|
|
80
|
+
if (!validation.success) {
|
|
81
|
+
return errorResponse(c, validation.error.errors[0].message);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
const conversation = await conversationService.updateConversationTitle(id, validation.data.title);
|
|
85
|
+
if (!conversation) {
|
|
86
|
+
return errorResponse(c, "Conversation not found", 404);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
return successResponse(c, conversation, "Conversation updated");
|
|
90
|
+
} catch (error) {
|
|
91
|
+
return errorResponse(c, "Failed to update conversation", 500);
|
|
92
|
+
}
|
|
93
|
+
},
|
|
94
|
+
|
|
95
|
+
async delete(c: Context) {
|
|
96
|
+
try {
|
|
97
|
+
const { id } = c.req.param();
|
|
98
|
+
const validation = conversationIdSchema.safeParse({ id });
|
|
99
|
+
|
|
100
|
+
if (!validation.success) {
|
|
101
|
+
return errorResponse(c, validation.error.errors[0].message);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
const deleted = await conversationService.deleteConversation(id);
|
|
105
|
+
if (!deleted) {
|
|
106
|
+
return errorResponse(c, "Conversation not found", 404);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
return successResponse(c, null, "Conversation deleted");
|
|
110
|
+
} catch (error) {
|
|
111
|
+
return errorResponse(c, "Failed to delete conversation", 500);
|
|
112
|
+
}
|
|
113
|
+
},
|
|
114
|
+
};
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { sql } from "../../config/database";
|
|
2
|
+
import type { Conversation, Message, CreateConversationDto, AddMessageDto } from "./types";
|
|
3
|
+
|
|
4
|
+
export const conversationRepository = {
|
|
5
|
+
async findAll(): Promise<Conversation[]> {
|
|
6
|
+
const result = await sql`SELECT * FROM conversations ORDER BY updated_at DESC`;
|
|
7
|
+
return result as Conversation[];
|
|
8
|
+
},
|
|
9
|
+
|
|
10
|
+
async findById(id: string): Promise<Conversation | null> {
|
|
11
|
+
const result = await sql`SELECT * FROM conversations WHERE id = ${id}`;
|
|
12
|
+
return (result[0] as Conversation) || null;
|
|
13
|
+
},
|
|
14
|
+
|
|
15
|
+
async create(data: CreateConversationDto): Promise<Conversation> {
|
|
16
|
+
const result = await sql`
|
|
17
|
+
INSERT INTO conversations (title)
|
|
18
|
+
VALUES (${data.title})
|
|
19
|
+
RETURNING *
|
|
20
|
+
`;
|
|
21
|
+
return result[0] as Conversation;
|
|
22
|
+
},
|
|
23
|
+
|
|
24
|
+
async updateTitle(id: string, title: string): Promise<Conversation | null> {
|
|
25
|
+
const result = await sql`
|
|
26
|
+
UPDATE conversations
|
|
27
|
+
SET title = ${title}, updated_at = CURRENT_TIMESTAMP
|
|
28
|
+
WHERE id = ${id}
|
|
29
|
+
RETURNING *
|
|
30
|
+
`;
|
|
31
|
+
return (result[0] as Conversation) || null;
|
|
32
|
+
},
|
|
33
|
+
|
|
34
|
+
async delete(id: string): Promise<boolean> {
|
|
35
|
+
// Delete messages first
|
|
36
|
+
await sql`DELETE FROM messages WHERE conversation_id = ${id}`;
|
|
37
|
+
const result = await sql`DELETE FROM conversations WHERE id = ${id} RETURNING id`;
|
|
38
|
+
return result.length > 0;
|
|
39
|
+
},
|
|
40
|
+
|
|
41
|
+
async getMessages(conversationId: string): Promise<Message[]> {
|
|
42
|
+
const result = await sql`
|
|
43
|
+
SELECT * FROM messages
|
|
44
|
+
WHERE conversation_id = ${conversationId}
|
|
45
|
+
ORDER BY created_at ASC
|
|
46
|
+
`;
|
|
47
|
+
return result as Message[];
|
|
48
|
+
},
|
|
49
|
+
|
|
50
|
+
async addMessage(conversationId: string, data: AddMessageDto): Promise<Message> {
|
|
51
|
+
// Update conversation timestamp
|
|
52
|
+
await sql`UPDATE conversations SET updated_at = CURRENT_TIMESTAMP WHERE id = ${conversationId}`;
|
|
53
|
+
|
|
54
|
+
const result = await sql`
|
|
55
|
+
INSERT INTO messages (conversation_id, role, content)
|
|
56
|
+
VALUES (${conversationId}, ${data.role}, ${data.content})
|
|
57
|
+
RETURNING *
|
|
58
|
+
`;
|
|
59
|
+
return result[0] as Message;
|
|
60
|
+
},
|
|
61
|
+
};
|