@weebet/spike 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +110 -0
- package/package.json +27 -0
- package/templates/.env.dev +16 -0
- package/templates/Makefile +169 -0
- package/templates/docker-compose.yml +83 -0
- package/templates/scripts/seed.ts +293 -0
package/dist/cli.js
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { Command } from "commander";
|
|
3
|
+
import { mkdir, copyFile, stat } from "node:fs/promises";
|
|
4
|
+
import path from "node:path";
|
|
5
|
+
import { fileURLToPath } from "node:url";
|
|
6
|
+
const program = new Command();
|
|
7
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
8
|
+
const __dirname = path.dirname(__filename);
|
|
9
|
+
// dist/cli.js -> package root is one level up from dist/
|
|
10
|
+
const packageRoot = path.resolve(__dirname, "..");
|
|
11
|
+
const templatesRoot = path.join(packageRoot, "templates");
|
|
12
|
+
async function exists(filePath) {
|
|
13
|
+
try {
|
|
14
|
+
await stat(filePath);
|
|
15
|
+
return true;
|
|
16
|
+
}
|
|
17
|
+
catch {
|
|
18
|
+
return false;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
async function ensureDir(dirPath, dryRun) {
|
|
22
|
+
if (dryRun)
|
|
23
|
+
return;
|
|
24
|
+
await mkdir(dirPath, { recursive: true });
|
|
25
|
+
}
|
|
26
|
+
async function copyTemplateFile(params) {
|
|
27
|
+
const { from, to, force, dryRun } = params;
|
|
28
|
+
const targetExists = await exists(to);
|
|
29
|
+
if (targetExists && !force) {
|
|
30
|
+
console.log(`โญ๏ธ Skipped (exists): ${path.relative(process.cwd(), to)}`);
|
|
31
|
+
return;
|
|
32
|
+
}
|
|
33
|
+
if (dryRun) {
|
|
34
|
+
console.log(`๐งช Would write: ${path.relative(process.cwd(), to)}`);
|
|
35
|
+
return;
|
|
36
|
+
}
|
|
37
|
+
await ensureDir(path.dirname(to), dryRun);
|
|
38
|
+
try {
|
|
39
|
+
await copyFile(from, to);
|
|
40
|
+
console.log(`โ
Wrote: ${path.relative(process.cwd(), to)}`);
|
|
41
|
+
}
|
|
42
|
+
catch (err) {
|
|
43
|
+
console.error(`โ Failed to write: ${path.relative(process.cwd(), to)}`);
|
|
44
|
+
console.error(err);
|
|
45
|
+
process.exitCode = 1;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
async function initCommand(options) {
|
|
49
|
+
const force = Boolean(options.force);
|
|
50
|
+
const dryRun = Boolean(options.dryRun);
|
|
51
|
+
const targetDir = options.dir
|
|
52
|
+
? path.resolve(process.cwd(), options.dir)
|
|
53
|
+
: process.cwd();
|
|
54
|
+
const plan = [
|
|
55
|
+
{
|
|
56
|
+
from: path.join(templatesRoot, "docker-compose.yml"),
|
|
57
|
+
to: path.join(targetDir, "docker-compose.yml"),
|
|
58
|
+
},
|
|
59
|
+
{
|
|
60
|
+
from: path.join(templatesRoot, ".env.dev"),
|
|
61
|
+
to: path.join(targetDir, ".env.dev"),
|
|
62
|
+
},
|
|
63
|
+
{
|
|
64
|
+
from: path.join(templatesRoot, "Makefile"),
|
|
65
|
+
to: path.join(targetDir, "Makefile"),
|
|
66
|
+
},
|
|
67
|
+
{
|
|
68
|
+
from: path.join(templatesRoot, "scripts", "seed.ts"),
|
|
69
|
+
to: path.join(targetDir, "scripts", "seed.ts"),
|
|
70
|
+
},
|
|
71
|
+
];
|
|
72
|
+
// sanity check: templates exist inside the installed package
|
|
73
|
+
const missingTemplates = [];
|
|
74
|
+
for (const item of plan) {
|
|
75
|
+
if (!(await exists(item.from)))
|
|
76
|
+
missingTemplates.push(item.from);
|
|
77
|
+
}
|
|
78
|
+
if (missingTemplates.length > 0) {
|
|
79
|
+
console.error("โ Missing templates inside package:");
|
|
80
|
+
for (const p of missingTemplates)
|
|
81
|
+
console.error(" -", p);
|
|
82
|
+
process.exit(1);
|
|
83
|
+
}
|
|
84
|
+
console.log("๐ spike init");
|
|
85
|
+
console.log(`๐ Target: ${targetDir}`);
|
|
86
|
+
if (dryRun)
|
|
87
|
+
console.log("๐งช Dry run enabled (no files will be written)");
|
|
88
|
+
if (force)
|
|
89
|
+
console.log("โ ๏ธ Force enabled (existing files may be overwritten)");
|
|
90
|
+
for (const item of plan) {
|
|
91
|
+
await copyTemplateFile({ ...item, force, dryRun });
|
|
92
|
+
}
|
|
93
|
+
console.log("\n๐ Done. Next steps:");
|
|
94
|
+
console.log(" 1) make help");
|
|
95
|
+
console.log(" 2) make up store=all");
|
|
96
|
+
console.log(" 3) make health");
|
|
97
|
+
console.log(" 4) make seed");
|
|
98
|
+
}
|
|
99
|
+
program
|
|
100
|
+
.name("spike")
|
|
101
|
+
.description("Internal devtools scaffolder (docker-compose + Makefile + seed)")
|
|
102
|
+
.version("0.1.0");
|
|
103
|
+
program
|
|
104
|
+
.command("init")
|
|
105
|
+
.description("Copy dev environment templates into the current repo")
|
|
106
|
+
.option("-f, --force", "overwrite existing files")
|
|
107
|
+
.option("--dry-run", "print actions without writing files")
|
|
108
|
+
.option("-d, --dir <path>", "target directory (default: current directory)")
|
|
109
|
+
.action(initCommand);
|
|
110
|
+
program.parse(process.argv);
|
package/package.json
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@weebet/spike",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"description": "",
|
|
6
|
+
"bin": {
|
|
7
|
+
"weebet-spike": "./dist/cli.js"
|
|
8
|
+
},
|
|
9
|
+
"files": ["dist", "templates"],
|
|
10
|
+
"scripts": {
|
|
11
|
+
"test": "echo \"Error: no test specified\" && exit 1",
|
|
12
|
+
"build": "tsc -p tsconfig.json",
|
|
13
|
+
"dev": "tsx src/cli.ts",
|
|
14
|
+
"prepublishOnly": "npm run build"
|
|
15
|
+
},
|
|
16
|
+
"keywords": [],
|
|
17
|
+
"author": "",
|
|
18
|
+
"license": "ISC",
|
|
19
|
+
"dependencies": {
|
|
20
|
+
"commander": "^14.0.3"
|
|
21
|
+
},
|
|
22
|
+
"devDependencies": {
|
|
23
|
+
"@types/node": "^25.2.3",
|
|
24
|
+
"tsx": "^4.21.0",
|
|
25
|
+
"typescript": "^5.9.3"
|
|
26
|
+
}
|
|
27
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# Local dev defaults (NO secrets)
|
|
2
|
+
|
|
3
|
+
# Postgres
|
|
4
|
+
POSTGRES_DB=app_dev
|
|
5
|
+
POSTGRES_USER=postgres
|
|
6
|
+
POSTGRES_PASSWORD=postgres
|
|
7
|
+
POSTGRES_PORT=5433
|
|
8
|
+
# POSTGRES_HOST isnt required since we are utilize container
|
|
9
|
+
|
|
10
|
+
# Connection string the application app can use later (optional but convenient)
|
|
11
|
+
DATABASE_URL=postgresql://postgres:postgres@localhost:5433/app_dev
|
|
12
|
+
|
|
13
|
+
# Elasticsearch
|
|
14
|
+
ELASTICSEARCH_PORT=9200
|
|
15
|
+
ELASTICSEARCH_URL=http://localhost:9200
|
|
16
|
+
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
# Use bash for better scripting support
|
|
2
|
+
SHELL := /bin/bash
|
|
3
|
+
|
|
4
|
+
# Detect operating system (Darwin = macOS)
|
|
5
|
+
OS := $(shell uname -s)
|
|
6
|
+
|
|
7
|
+
# Detect docker compose command (v1 vs v2)
|
|
8
|
+
DOCKER_COMPOSE := $(shell \
|
|
9
|
+
if command -v docker-compose >/dev/null 2>&1; then \
|
|
10
|
+
echo docker-compose; \
|
|
11
|
+
else \
|
|
12
|
+
echo "docker compose"; \
|
|
13
|
+
fi \
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
# Declare non-file targets
|
|
17
|
+
.PHONY: help up down reset logs health seed system-deps \
|
|
18
|
+
_ensure-docker _ensure-colima _start-containers _wait-for-health \
|
|
19
|
+
_install_logic_macos _install_logic_linux
|
|
20
|
+
|
|
21
|
+
## ---------------------------
|
|
22
|
+
## Public developer commands
|
|
23
|
+
## ---------------------------
|
|
24
|
+
|
|
25
|
+
help: ## Show available commands
|
|
26
|
+
@echo "Available commands:"
|
|
27
|
+
@echo " make up [store=postgres|es|all] - Start local dev environment"
|
|
28
|
+
@echo " make down - Stop local dev environment"
|
|
29
|
+
@echo " make reset - Tear down and restart with clean volumes"
|
|
30
|
+
@echo " make logs - Tail logs of all containers"
|
|
31
|
+
@echo " make health - Check if all services are healthy"
|
|
32
|
+
@echo " make seed - Populate local dev data"
|
|
33
|
+
@echo " make system-deps - Install required system dependencies"
|
|
34
|
+
|
|
35
|
+
up: _ensure-docker ## Start local dev environment
|
|
36
|
+
@echo "๐ Starting local development environment..."
|
|
37
|
+
@if [ "$(OS)" = "Darwin" ]; then $(MAKE) _ensure-colima; fi
|
|
38
|
+
@$(MAKE) _start-containers
|
|
39
|
+
@$(MAKE) _wait-for-health
|
|
40
|
+
@echo "๐ Local development environment is ready!"
|
|
41
|
+
|
|
42
|
+
down: ## Stop containers and Colima
|
|
43
|
+
@echo "๐ Stopping local dev environment..."
|
|
44
|
+
@$(DOCKER_COMPOSE) down
|
|
45
|
+
@if [ "$(OS)" = "Darwin" ]; then colima stop; fi
|
|
46
|
+
|
|
47
|
+
reset: ## Reset containers and volumes
|
|
48
|
+
@echo "โป๏ธ Resetting local dev environment..."
|
|
49
|
+
@$(DOCKER_COMPOSE) --env-file .env.dev down -v
|
|
50
|
+
@$(MAKE) up
|
|
51
|
+
|
|
52
|
+
logs: ## Tail container logs
|
|
53
|
+
@echo "๐ Tailing container logs..."
|
|
54
|
+
@$(DOCKER_COMPOSE) logs -f
|
|
55
|
+
|
|
56
|
+
seed: ## Seed development data
|
|
57
|
+
@echo "๐ฑ Seeding data for $(store)..."
|
|
58
|
+
@if [ "$(store)" = "postgres" ]; then \
|
|
59
|
+
$(DOCKER_COMPOSE) --env-file .env.dev run --rm seed npm run seed:postgres; \
|
|
60
|
+
elif [ "$(store)" = "es" ]; then \
|
|
61
|
+
$(DOCKER_COMPOSE) --env-file .env.dev run --rm seed npm run seed:es; \
|
|
62
|
+
else \
|
|
63
|
+
$(DOCKER_COMPOSE) --env-file .env.dev run --rm seed npm run seed; \
|
|
64
|
+
fi
|
|
65
|
+
|
|
66
|
+
## ---------------------------
|
|
67
|
+
## Health & readiness
|
|
68
|
+
## ---------------------------
|
|
69
|
+
|
|
70
|
+
_wait-for-health:
|
|
71
|
+
@echo "โณ Waiting for services to become healthy..."
|
|
72
|
+
@$(MAKE) health
|
|
73
|
+
|
|
74
|
+
health: ## Check service health via service-level probes
|
|
75
|
+
@echo "๐ Checking health status..."
|
|
76
|
+
|
|
77
|
+
@# --- Postgres ---
|
|
78
|
+
@if $(DOCKER_COMPOSE) ps -q postgres >/dev/null 2>&1; then \
|
|
79
|
+
echo "๐ Checking Postgres..."; \
|
|
80
|
+
until $(DOCKER_COMPOSE) exec postgres pg_isready >/dev/null 2>&1; do \
|
|
81
|
+
echo " โณ Postgres is starting..."; \
|
|
82
|
+
sleep 2; \
|
|
83
|
+
done; \
|
|
84
|
+
echo " โ
Postgres is healthy"; \
|
|
85
|
+
fi
|
|
86
|
+
|
|
87
|
+
@# --- Elasticsearch ---
|
|
88
|
+
@if $(DOCKER_COMPOSE) ps -q elasticsearch >/dev/null 2>&1; then \
|
|
89
|
+
echo "๐ Checking Elasticsearch..."; \
|
|
90
|
+
until curl -s http://localhost:9200/_cluster/health >/dev/null; do \
|
|
91
|
+
echo " โณ Elasticsearch is starting..."; \
|
|
92
|
+
sleep 2; \
|
|
93
|
+
done; \
|
|
94
|
+
echo " โ
Elasticsearch is healthy"; \
|
|
95
|
+
fi
|
|
96
|
+
|
|
97
|
+
## ---------------------------
|
|
98
|
+
## Internal helper targets
|
|
99
|
+
## ---------------------------
|
|
100
|
+
|
|
101
|
+
_ensure-docker:
|
|
102
|
+
@echo "๐ณ Checking Docker availability..."
|
|
103
|
+
@command -v docker >/dev/null 2>&1 || \
|
|
104
|
+
(echo "โ Docker not found. ๐ Run: make system-deps"; exit 1)
|
|
105
|
+
|
|
106
|
+
_ensure-colima:
|
|
107
|
+
@echo "๐ฅ๏ธ Ensuring Colima is running..."
|
|
108
|
+
@colima status >/dev/null 2>&1 || \
|
|
109
|
+
(echo "๐ Starting Colima..." && colima start)
|
|
110
|
+
|
|
111
|
+
_start-containers:
|
|
112
|
+
@echo "๐ฆ Starting containers..."
|
|
113
|
+
@if [ "$(store)" = "postgres" ]; then \
|
|
114
|
+
$(DOCKER_COMPOSE) --env-file .env.dev up -d postgres; \
|
|
115
|
+
elif [ "$(store)" = "elasticsearch" ] || [ "$(store)" = "es" ]; then \
|
|
116
|
+
$(DOCKER_COMPOSE) --env-file .env.dev up -d elasticsearch; \
|
|
117
|
+
else \
|
|
118
|
+
$(DOCKER_COMPOSE) --env-file .env.dev up -d; \
|
|
119
|
+
fi
|
|
120
|
+
|
|
121
|
+
## ---------------------------
|
|
122
|
+
## System dependency setup
|
|
123
|
+
## ---------------------------
|
|
124
|
+
|
|
125
|
+
system-deps: ## Install required system dependencies
|
|
126
|
+
@echo "๐ Detecting OS..."
|
|
127
|
+
@case "$(OS)" in \
|
|
128
|
+
"Darwin") \
|
|
129
|
+
echo "๐ macOS detected"; \
|
|
130
|
+
$(MAKE) _install_logic_macos ;; \
|
|
131
|
+
"Linux") \
|
|
132
|
+
echo "๐ง Linux detected"; \
|
|
133
|
+
$(MAKE) _install_logic_linux ;; \
|
|
134
|
+
*) \
|
|
135
|
+
echo "๐ช Windows/WSL detected"; \
|
|
136
|
+
echo "๐ Search: 'install docker on windows wsl2'"; \
|
|
137
|
+
exit 1 ;; \
|
|
138
|
+
esac
|
|
139
|
+
|
|
140
|
+
_install_logic_macos:
|
|
141
|
+
@echo "๐ Checking dependencies (brew, docker, colima)..."
|
|
142
|
+
@HAS_BREW=$$(command -v brew >/dev/null 2>&1 && echo 1 || echo 0); \
|
|
143
|
+
HAS_DOCKER=$$(command -v docker >/dev/null 2>&1 && echo 1 || echo 0); \
|
|
144
|
+
HAS_COLIMA=$$(command -v colima >/dev/null 2>&1 && echo 1 || echo 0); \
|
|
145
|
+
if [ $$HAS_BREW -eq 1 ] && [ $$HAS_DOCKER -eq 1 ] && [ $$HAS_COLIMA -eq 1 ]; then \
|
|
146
|
+
echo "โ
All dependencies already installed"; exit 0; \
|
|
147
|
+
fi; \
|
|
148
|
+
echo "๐ Search: 'install docker on macos'"; \
|
|
149
|
+
echo "๐ Search: 'install colima macos'"; \
|
|
150
|
+
read -p "โ Attempt automatic installation via Homebrew? (y/n) " yn; \
|
|
151
|
+
if [ "$$yn" = "y" ]; then \
|
|
152
|
+
if [ $$HAS_BREW -eq 0 ]; then /bin/bash -c "$$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" || exit 1; fi; \
|
|
153
|
+
if [ $$HAS_DOCKER -eq 0 ]; then brew install docker || exit 1; fi; \
|
|
154
|
+
if [ $$HAS_COLIMA -eq 0 ]; then brew install colima || exit 1; fi; \
|
|
155
|
+
echo "โ
Installation successful"; \
|
|
156
|
+
else \
|
|
157
|
+
echo "โ Installation aborted"; exit 1; \
|
|
158
|
+
fi
|
|
159
|
+
|
|
160
|
+
_install_logic_linux:
|
|
161
|
+
@if command -v docker >/dev/null 2>&1; then \
|
|
162
|
+
echo "โ
Docker already installed"; \
|
|
163
|
+
else \
|
|
164
|
+
echo "๐ Search: 'install docker on linux'"; \
|
|
165
|
+
read -p "โ Attempt installation via get.docker.com? (y/n) " yn; \
|
|
166
|
+
if [ "$$yn" = "y" ]; then \
|
|
167
|
+
curl -fsSL https://get.docker.com | sh || exit 1; \
|
|
168
|
+
fi; \
|
|
169
|
+
fi
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
services:
|
|
2
|
+
postgres:
|
|
3
|
+
image: postgres:16-alpine
|
|
4
|
+
restart: unless-stopped
|
|
5
|
+
|
|
6
|
+
# Loads defaults from .env.dev (Whne you run: docker compose --env-file .env.dev ....)
|
|
7
|
+
environment:
|
|
8
|
+
POSTGRES_USER: ${POSTGRES_USER}
|
|
9
|
+
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
|
10
|
+
POSTGRES_DB: ${POSTGRES_DB}
|
|
11
|
+
|
|
12
|
+
ports:
|
|
13
|
+
- "${POSTGRES_PORT:-5433}:5432"
|
|
14
|
+
|
|
15
|
+
volumes:
|
|
16
|
+
- postgres_data:/var/lib/postgresql/data
|
|
17
|
+
|
|
18
|
+
# Required healthcheck: pg_isready
|
|
19
|
+
healthcheck:
|
|
20
|
+
test:
|
|
21
|
+
[
|
|
22
|
+
"CMD-SHELL",
|
|
23
|
+
"pg_isready -U $${POSTGRES_USER} -d $${POSTGRES_DB} -h 127.0.0.1 -p 5432",
|
|
24
|
+
]
|
|
25
|
+
interval: 5s
|
|
26
|
+
timeout: 3s
|
|
27
|
+
retries: 20
|
|
28
|
+
start_period: 10s
|
|
29
|
+
|
|
30
|
+
elasticsearch:
|
|
31
|
+
image: docker.elastic.co/elasticsearch/elasticsearch:8.13.4
|
|
32
|
+
restart: unless-stopped
|
|
33
|
+
|
|
34
|
+
environment:
|
|
35
|
+
- discovery.type=single-node
|
|
36
|
+
- xpack.security.enabled=false
|
|
37
|
+
- ES_JAVA_OPTS=-Xms512m -Xmx512m
|
|
38
|
+
|
|
39
|
+
ports:
|
|
40
|
+
- "${ELASTICSEARCH_PORT:-9200}:9200"
|
|
41
|
+
- "9300:9300"
|
|
42
|
+
|
|
43
|
+
volumes:
|
|
44
|
+
- elasticsearch_data:/usr/share/elasticsearch/data
|
|
45
|
+
|
|
46
|
+
# Useful for ES stability on Linux; harmless elsewhere
|
|
47
|
+
ulimits:
|
|
48
|
+
memlock:
|
|
49
|
+
soft: -1
|
|
50
|
+
hard: -1
|
|
51
|
+
|
|
52
|
+
# Required healthcheck: curl http://localhost:9200/_cluster/health
|
|
53
|
+
healthcheck:
|
|
54
|
+
test:
|
|
55
|
+
[
|
|
56
|
+
"CMD-SHELL",
|
|
57
|
+
"curl -fsS http://127.0.0.1:9200/_cluster/health?wait_for_status=yellow&timeout=5s > /dev/null || exit 1",
|
|
58
|
+
]
|
|
59
|
+
interval: 5s
|
|
60
|
+
timeout: 5s
|
|
61
|
+
retries: 30
|
|
62
|
+
start_period: 20s
|
|
63
|
+
|
|
64
|
+
seed:
|
|
65
|
+
image: node:20-alpine
|
|
66
|
+
container_name: dev-seed
|
|
67
|
+
working_dir: /app
|
|
68
|
+
volumes:
|
|
69
|
+
- .:/app
|
|
70
|
+
env_file:
|
|
71
|
+
- .env.dev
|
|
72
|
+
environment:
|
|
73
|
+
POSTGRES_HOST: postgres
|
|
74
|
+
|
|
75
|
+
depends_on:
|
|
76
|
+
postgres:
|
|
77
|
+
condition: service_healthy
|
|
78
|
+
elasticsearch:
|
|
79
|
+
condition: service_healthy
|
|
80
|
+
|
|
81
|
+
volumes:
|
|
82
|
+
postgres_data:
|
|
83
|
+
elasticsearch_data:
|
|
@@ -0,0 +1,293 @@
|
|
|
1
|
+
import { Pool } from "pg";
|
|
2
|
+
import dotenv from "dotenv";
|
|
3
|
+
import path from "path";
|
|
4
|
+
import { fileURLToPath } from "url";
|
|
5
|
+
|
|
6
|
+
// Get the directory of the current file
|
|
7
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
8
|
+
const __dirname = path.dirname(__filename);
|
|
9
|
+
|
|
10
|
+
// Load .env.dev from the project root
|
|
11
|
+
dotenv.config({ path: path.join(__dirname, "../.env.dev") });
|
|
12
|
+
|
|
13
|
+
// ========== SEED DATA Objects ==========
|
|
14
|
+
|
|
15
|
+
interface UserData {
|
|
16
|
+
id: string;
|
|
17
|
+
name: string;
|
|
18
|
+
email: string;
|
|
19
|
+
password: string;
|
|
20
|
+
created_at: Date;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
interface ConversationData {
|
|
24
|
+
id: string;
|
|
25
|
+
user_id: string;
|
|
26
|
+
title: string;
|
|
27
|
+
created_at: Date;
|
|
28
|
+
updated_at: Date;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
interface MessageData {
|
|
32
|
+
id: string;
|
|
33
|
+
conversation_id: string;
|
|
34
|
+
user_id: string;
|
|
35
|
+
content: string;
|
|
36
|
+
role: string;
|
|
37
|
+
created_at: Date;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
interface MessageDataList extends Array<MessageData> {}
|
|
41
|
+
|
|
42
|
+
// create a postgres database connection pool
|
|
43
|
+
const PostgresDatabasePool = {
|
|
44
|
+
user: process.env["POSTGRES_USER"],
|
|
45
|
+
password: process.env["POSTGRES_PASSWORD"],
|
|
46
|
+
port:
|
|
47
|
+
process.env["POSTGRES_HOST"] === "postgres"
|
|
48
|
+
? 5432
|
|
49
|
+
: parseInt(process.env["POSTGRES_PORT"] || "5432"),
|
|
50
|
+
database: process.env["POSTGRES_DB"],
|
|
51
|
+
host: process.env["POSTGRES_HOST"] || "localhost",
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
console.log("๐ Database config:", PostgresDatabasePool);
|
|
55
|
+
|
|
56
|
+
const pool = new Pool(PostgresDatabasePool);
|
|
57
|
+
|
|
58
|
+
// ========== MIGRATION FUNCTIONS ==========
|
|
59
|
+
async function runMigrations(): Promise<void> {
|
|
60
|
+
log("๐", "Running database migrations...");
|
|
61
|
+
|
|
62
|
+
const createTablesSQL = `
|
|
63
|
+
-- Create users table
|
|
64
|
+
CREATE TABLE IF NOT EXISTS users (
|
|
65
|
+
id VARCHAR(255) PRIMARY KEY,
|
|
66
|
+
email VARCHAR(255) UNIQUE NOT NULL,
|
|
67
|
+
password VARCHAR(255) NOT NULL,
|
|
68
|
+
name VARCHAR(255) NOT NULL,
|
|
69
|
+
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
|
70
|
+
);
|
|
71
|
+
|
|
72
|
+
-- Create conversations table
|
|
73
|
+
CREATE TABLE IF NOT EXISTS conversations (
|
|
74
|
+
id VARCHAR(255) PRIMARY KEY,
|
|
75
|
+
user_id VARCHAR(255) NOT NULL REFERENCES users(id),
|
|
76
|
+
title VARCHAR(255) NOT NULL,
|
|
77
|
+
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
78
|
+
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
|
79
|
+
);
|
|
80
|
+
|
|
81
|
+
-- Create messages table
|
|
82
|
+
CREATE TABLE IF NOT EXISTS messages (
|
|
83
|
+
id VARCHAR(255) PRIMARY KEY,
|
|
84
|
+
conversation_id VARCHAR(255) NOT NULL REFERENCES conversations(id),
|
|
85
|
+
user_id VARCHAR(255) NOT NULL REFERENCES users(id),
|
|
86
|
+
content TEXT NOT NULL,
|
|
87
|
+
role VARCHAR(50) NOT NULL,
|
|
88
|
+
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
|
89
|
+
);
|
|
90
|
+
`;
|
|
91
|
+
|
|
92
|
+
try {
|
|
93
|
+
await pool.query(createTablesSQL);
|
|
94
|
+
log("โ
", "Migrations completed successfully");
|
|
95
|
+
} catch (error) {
|
|
96
|
+
await logError(error as Error, "runMigrations");
|
|
97
|
+
throw error;
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// ========== HELPER FUNCTIONS ==========
|
|
102
|
+
async function userExists(email: string): Promise<boolean> {
|
|
103
|
+
// this function checks if user data is already exists
|
|
104
|
+
const result = await pool.query(
|
|
105
|
+
"SELECT COUNT(*) FROM users WHERE email = $1",
|
|
106
|
+
[email],
|
|
107
|
+
);
|
|
108
|
+
return parseInt(result.rows[0].count) > 0;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
function log(emoji: string, message: string): void {
|
|
112
|
+
// this function displays output friendly and scannable
|
|
113
|
+
console.log(`${emoji} ${message}`);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
async function logError(error: Error, context: string): Promise<void> {
|
|
117
|
+
// this function provides consistent error logging
|
|
118
|
+
console.error(`โ Error in ${context}:`, error.message);
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// ========== SEED DATA ==========
|
|
122
|
+
const sampleUserData: UserData = {
|
|
123
|
+
id: "user-001",
|
|
124
|
+
email: "dev@example.com",
|
|
125
|
+
password: "devpass123", // In production, this would be hashed
|
|
126
|
+
name: "Dev User",
|
|
127
|
+
created_at: new Date("2024-01-01T08:00:00Z"),
|
|
128
|
+
};
|
|
129
|
+
|
|
130
|
+
const sampleConversationData: ConversationData = {
|
|
131
|
+
id: "conv-001",
|
|
132
|
+
user_id: "user-001",
|
|
133
|
+
title: "Getting Started guide",
|
|
134
|
+
created_at: new Date("2024-01-01T10:00:00"),
|
|
135
|
+
updated_at: new Date("2024-01-01T10:30:00"),
|
|
136
|
+
};
|
|
137
|
+
|
|
138
|
+
const sampleMessages: MessageDataList = [
|
|
139
|
+
{
|
|
140
|
+
id: "msg-001",
|
|
141
|
+
conversation_id: "conv-001",
|
|
142
|
+
user_id: "user-001",
|
|
143
|
+
content: "Hello! How do I get started?",
|
|
144
|
+
role: "user",
|
|
145
|
+
created_at: new Date("2024-01-01T10:00:00"),
|
|
146
|
+
},
|
|
147
|
+
{
|
|
148
|
+
id: "msg-002",
|
|
149
|
+
conversation_id: "conv-001",
|
|
150
|
+
user_id: "user-001",
|
|
151
|
+
content: "Welcome! Here are the first steps",
|
|
152
|
+
role: "assistant",
|
|
153
|
+
created_at: new Date("2024-01-01T10:05:00"),
|
|
154
|
+
},
|
|
155
|
+
];
|
|
156
|
+
|
|
157
|
+
// ========== SEED FUNCTIONS ==========
|
|
158
|
+
async function seedUsers(): Promise<void> {
|
|
159
|
+
log("๐ฑ", "Seeding users...");
|
|
160
|
+
|
|
161
|
+
// Check if user already exists (idempotency)
|
|
162
|
+
const exists = await userExists(sampleUserData.email);
|
|
163
|
+
if (exists) {
|
|
164
|
+
log("โญ๏ธ", `User ${sampleUserData.email} already exists, skipping`);
|
|
165
|
+
return;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
// Insert the user
|
|
169
|
+
const query = `
|
|
170
|
+
INSERT INTO users (id, email, password, name, created_at)
|
|
171
|
+
VALUES ($1, $2, $3, $4, $5)
|
|
172
|
+
ON CONFLICT (email) DO NOTHING
|
|
173
|
+
`;
|
|
174
|
+
|
|
175
|
+
try {
|
|
176
|
+
await pool.query(query, [
|
|
177
|
+
sampleUserData.id,
|
|
178
|
+
sampleUserData.email,
|
|
179
|
+
sampleUserData.password,
|
|
180
|
+
sampleUserData.name,
|
|
181
|
+
sampleUserData.created_at,
|
|
182
|
+
]);
|
|
183
|
+
log("โ
", `Created user: ${sampleUserData.email}`);
|
|
184
|
+
} catch (error) {
|
|
185
|
+
await logError(error as Error, "seedUsers");
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
async function seedConversations(): Promise<void> {
|
|
190
|
+
log("๐ฑ", "Seeding conversations...");
|
|
191
|
+
|
|
192
|
+
// Check if conversation already exists
|
|
193
|
+
const existsResult = await pool.query(
|
|
194
|
+
"SELECT COUNT(*) FROM conversations WHERE id = $1",
|
|
195
|
+
[sampleConversationData.id],
|
|
196
|
+
);
|
|
197
|
+
|
|
198
|
+
if (parseInt(existsResult.rows[0].count) > 0) {
|
|
199
|
+
log("โญ๏ธ", "Sample conversation already exists, skipping");
|
|
200
|
+
return;
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// Insert conversation
|
|
204
|
+
const insertQuery = `
|
|
205
|
+
INSERT INTO conversations (id, user_id, title, created_at, updated_at)
|
|
206
|
+
VALUES ($1, $2, $3, $4, $5)
|
|
207
|
+
`;
|
|
208
|
+
|
|
209
|
+
try {
|
|
210
|
+
await pool.query(insertQuery, [
|
|
211
|
+
sampleConversationData.id,
|
|
212
|
+
sampleConversationData.user_id,
|
|
213
|
+
sampleConversationData.title,
|
|
214
|
+
sampleConversationData.created_at,
|
|
215
|
+
sampleConversationData.updated_at,
|
|
216
|
+
]);
|
|
217
|
+
log("โ
", "Sample conversation created");
|
|
218
|
+
} catch (error) {
|
|
219
|
+
await logError(error as Error, "seedConversations");
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
async function seedMessages(): Promise<void> {
|
|
224
|
+
log("๐ฑ", "Seeding messages...");
|
|
225
|
+
let createdCount = 0;
|
|
226
|
+
|
|
227
|
+
for (const message of sampleMessages) {
|
|
228
|
+
// Check if message already exists
|
|
229
|
+
const existsResult = await pool.query(
|
|
230
|
+
"SELECT COUNT(*) FROM messages WHERE id = $1",
|
|
231
|
+
[message.id],
|
|
232
|
+
);
|
|
233
|
+
|
|
234
|
+
if (parseInt(existsResult.rows[0].count) > 0) {
|
|
235
|
+
log("โญ๏ธ", `Message ${message.id} already exists, skipping`);
|
|
236
|
+
continue;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
// Insert message
|
|
240
|
+
const insertQuery = `
|
|
241
|
+
INSERT INTO messages (id, conversation_id, user_id, content, role, created_at)
|
|
242
|
+
VALUES ($1, $2, $3, $4, $5, $6)
|
|
243
|
+
`;
|
|
244
|
+
|
|
245
|
+
try {
|
|
246
|
+
await pool.query(insertQuery, [
|
|
247
|
+
message.id,
|
|
248
|
+
message.conversation_id,
|
|
249
|
+
message.user_id,
|
|
250
|
+
message.content,
|
|
251
|
+
message.role,
|
|
252
|
+
message.created_at,
|
|
253
|
+
]);
|
|
254
|
+
createdCount++;
|
|
255
|
+
} catch (error) {
|
|
256
|
+
await logError(error as Error, `seedMessages for ${message.id}`);
|
|
257
|
+
}
|
|
258
|
+
log("โ
", `Messages seeded: ${createdCount} created`);
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
log("โ
", `All messages created (${sampleMessages.length} total)`);
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
// Orchestrate all seeding
|
|
265
|
+
async function seedDatabase(): Promise<void> {
|
|
266
|
+
try {
|
|
267
|
+
log("๐", "Starting database seeding...");
|
|
268
|
+
log(
|
|
269
|
+
"๐",
|
|
270
|
+
`Connecting to ${PostgresDatabasePool.host}:${PostgresDatabasePool.port}/${PostgresDatabasePool.database}`,
|
|
271
|
+
);
|
|
272
|
+
|
|
273
|
+
// Run migrations first
|
|
274
|
+
await runMigrations();
|
|
275
|
+
|
|
276
|
+
// Run seeds in order (respecting foreign key dependencies)
|
|
277
|
+
await seedUsers();
|
|
278
|
+
await seedConversations();
|
|
279
|
+
await seedMessages();
|
|
280
|
+
|
|
281
|
+
log("โจ", "Database migration and seeding complete!");
|
|
282
|
+
process.exit(0);
|
|
283
|
+
} catch (error) {
|
|
284
|
+
log("โ", "Seeding failed!");
|
|
285
|
+
console.error(error);
|
|
286
|
+
process.exit(1);
|
|
287
|
+
} finally {
|
|
288
|
+
// Always close the database connection
|
|
289
|
+
await pool.end();
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
seedDatabase();
|