@etus/bhono-app 0.1.3 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +1 -1
- package/package.json +1 -1
- package/templates/base/scripts/init.sh +314 -93
package/dist/cli.js
CHANGED
|
@@ -13,7 +13,7 @@ function buildProgram() {
|
|
|
13
13
|
return new Command()
|
|
14
14
|
.name('bhono-app')
|
|
15
15
|
.description('Create a new project from the Etus boilerplate')
|
|
16
|
-
.version('0.1.
|
|
16
|
+
.version('0.1.5')
|
|
17
17
|
.argument('<project-name>', 'Name of the project')
|
|
18
18
|
.option('-d, --domain <domain>', 'Production domain')
|
|
19
19
|
.option('-m, --modules <modules>', 'Comma-separated modules to include')
|
package/package.json
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
#!/bin/bash
|
|
2
2
|
|
|
3
|
-
#
|
|
4
|
-
#
|
|
3
|
+
# BHono - Development Environment Setup
|
|
4
|
+
# Bootstraps dependencies, configures Cloudflare bindings,
|
|
5
|
+
# seeds the local D1 (sqlite) database, and starts the dev server.
|
|
5
6
|
|
|
6
|
-
set -
|
|
7
|
+
set -euo pipefail
|
|
7
8
|
|
|
8
9
|
# Colors for output
|
|
9
10
|
RED='\033[0;31m'
|
|
@@ -12,110 +13,330 @@ YELLOW='\033[1;33m'
|
|
|
12
13
|
BLUE='\033[0;34m'
|
|
13
14
|
NC='\033[0m' # No Color
|
|
14
15
|
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
16
|
+
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
|
17
|
+
cd "$ROOT_DIR"
|
|
18
|
+
|
|
19
|
+
UPDATE_PACKAGES=0
|
|
20
|
+
SKIP_DEV=0
|
|
21
|
+
SKIP_PROVISION=0
|
|
22
|
+
SKIP_SEED=0
|
|
23
|
+
|
|
24
|
+
while [[ $# -gt 0 ]]; do
|
|
25
|
+
case "$1" in
|
|
26
|
+
--update)
|
|
27
|
+
UPDATE_PACKAGES=1
|
|
28
|
+
;;
|
|
29
|
+
--skip-dev)
|
|
30
|
+
SKIP_DEV=1
|
|
31
|
+
;;
|
|
32
|
+
--no-provision)
|
|
33
|
+
SKIP_PROVISION=1
|
|
34
|
+
;;
|
|
35
|
+
--skip-seed)
|
|
36
|
+
SKIP_SEED=1
|
|
37
|
+
;;
|
|
38
|
+
*)
|
|
39
|
+
echo -e "${YELLOW}Ignoring unknown argument: $1${NC}"
|
|
40
|
+
;;
|
|
41
|
+
esac
|
|
42
|
+
shift
|
|
43
|
+
done
|
|
44
|
+
|
|
45
|
+
log_info() { echo -e "${BLUE}$*${NC}"; }
|
|
46
|
+
log_ok() { echo -e "${GREEN}$*${NC}"; }
|
|
47
|
+
log_warn() { echo -e "${YELLOW}$*${NC}"; }
|
|
48
|
+
log_err() { echo -e "${RED}$*${NC}"; }
|
|
49
|
+
|
|
50
|
+
log_info "========================================"
|
|
51
|
+
log_info " BHono - Dev Environment Setup "
|
|
52
|
+
log_info "========================================"
|
|
19
53
|
|
|
20
54
|
# Check for required tools
|
|
21
|
-
|
|
55
|
+
log_info "Checking required tools..."
|
|
22
56
|
|
|
23
|
-
if ! command -v node
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
57
|
+
if ! command -v node >/dev/null 2>&1; then
|
|
58
|
+
log_err "Error: Node.js is not installed"
|
|
59
|
+
log_err "Install Node.js 18+ from https://nodejs.org/"
|
|
60
|
+
exit 1
|
|
27
61
|
fi
|
|
28
62
|
|
|
29
|
-
if ! command -v pnpm
|
|
30
|
-
|
|
31
|
-
|
|
63
|
+
if ! command -v pnpm >/dev/null 2>&1; then
|
|
64
|
+
log_warn "pnpm not found. Installing..."
|
|
65
|
+
npm install -g pnpm
|
|
32
66
|
fi
|
|
33
67
|
|
|
34
|
-
|
|
35
|
-
if [ "$
|
|
36
|
-
|
|
37
|
-
|
|
68
|
+
NODE_MAJOR=$(node -p "Number(process.versions.node.split('.')[0])")
|
|
69
|
+
if [[ "$NODE_MAJOR" -lt 18 ]]; then
|
|
70
|
+
log_err "Error: Node.js 18+ is required (found v${NODE_MAJOR})"
|
|
71
|
+
exit 1
|
|
38
72
|
fi
|
|
39
73
|
|
|
40
|
-
|
|
41
|
-
|
|
74
|
+
log_ok "Node.js $(node -v) detected"
|
|
75
|
+
log_ok "pnpm $(pnpm -v) detected"
|
|
42
76
|
|
|
43
77
|
# Install dependencies
|
|
44
|
-
|
|
45
|
-
echo -e "${YELLOW}Installing dependencies...${NC}"
|
|
78
|
+
log_info "Installing dependencies..."
|
|
46
79
|
pnpm install
|
|
47
80
|
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
81
|
+
if [[ "$UPDATE_PACKAGES" -eq 1 ]]; then
|
|
82
|
+
log_info "Updating dependencies..."
|
|
83
|
+
pnpm update
|
|
84
|
+
fi
|
|
85
|
+
|
|
86
|
+
# Check for .env files
|
|
87
|
+
if [[ ! -f .env && -f .env.example ]]; then
|
|
88
|
+
log_info "Creating .env from .env.example..."
|
|
89
|
+
cp .env.example .env
|
|
90
|
+
log_warn "Update .env with real values (GOOGLE_CLIENT_ID/SECRET, JWT_SECRET, SENDGRID_API_KEY)."
|
|
91
|
+
fi
|
|
92
|
+
|
|
93
|
+
if [[ ! -f .dev.vars && -f .dev.vars.example ]]; then
|
|
94
|
+
log_info "Creating .dev.vars from .dev.vars.example..."
|
|
95
|
+
cp .dev.vars.example .dev.vars
|
|
96
|
+
fi
|
|
97
|
+
|
|
98
|
+
# Determine project name
|
|
99
|
+
PROJECT_NAME_RAW=$(node -e "
|
|
100
|
+
const fs = require('fs');
|
|
101
|
+
const path = require('path');
|
|
102
|
+
let name = '';
|
|
103
|
+
try { name = JSON.parse(fs.readFileSync('etus.config.json','utf8')).name || ''; } catch (e) {}
|
|
104
|
+
if (!name) { try { name = JSON.parse(fs.readFileSync('package.json','utf8')).name || ''; } catch (e) {} }
|
|
105
|
+
if (!name) name = path.basename(process.cwd());
|
|
106
|
+
console.log(name);
|
|
107
|
+
" 2>/dev/null || echo "")
|
|
108
|
+
|
|
109
|
+
PROJECT_NAME="${PROJECT_NAME_RAW##*/}"
|
|
110
|
+
PROJECT_NAME=$(echo "$PROJECT_NAME" | tr '[:upper:]' '[:lower:]' | sed -E 's/[^a-z0-9-]+/-/g; s/^-+|-+$//g')
|
|
111
|
+
if [[ -z "$PROJECT_NAME" ]]; then
|
|
112
|
+
PROJECT_NAME=$(basename "$ROOT_DIR" | tr '[:upper:]' '[:lower:]' | sed -E 's/[^a-z0-9-]+/-/g; s/^-+|-+$//g')
|
|
113
|
+
fi
|
|
114
|
+
|
|
115
|
+
log_ok "Project name: $PROJECT_NAME"
|
|
116
|
+
|
|
117
|
+
# Ensure wrangler.json exists
|
|
118
|
+
if [[ ! -f config/wrangler.json ]]; then
|
|
119
|
+
log_err "Missing config/wrangler.json"
|
|
120
|
+
exit 1
|
|
121
|
+
fi
|
|
122
|
+
|
|
123
|
+
# Update wrangler.json placeholders
|
|
124
|
+
PROJECT_NAME="$PROJECT_NAME" node - <<'NODE'
|
|
125
|
+
const fs = require('fs');
|
|
126
|
+
const path = 'config/wrangler.json';
|
|
127
|
+
const data = JSON.parse(fs.readFileSync(path, 'utf8'));
|
|
128
|
+
const projectName = process.env.PROJECT_NAME || '';
|
|
129
|
+
|
|
130
|
+
function replacePlaceholders(value) {
|
|
131
|
+
if (typeof value === 'string') {
|
|
132
|
+
return value.split('{{projectName}}').join(projectName);
|
|
133
|
+
}
|
|
134
|
+
if (Array.isArray(value)) {
|
|
135
|
+
return value.map(replacePlaceholders);
|
|
136
|
+
}
|
|
137
|
+
if (value && typeof value === 'object') {
|
|
138
|
+
for (const key of Object.keys(value)) {
|
|
139
|
+
value[key] = replacePlaceholders(value[key]);
|
|
140
|
+
}
|
|
141
|
+
return value;
|
|
142
|
+
}
|
|
143
|
+
return value;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
replacePlaceholders(data);
|
|
147
|
+
if (!data.name || data.name.includes('{{projectName}}')) {
|
|
148
|
+
data.name = projectName;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
fs.writeFileSync(path, JSON.stringify(data, null, 2));
|
|
152
|
+
NODE
|
|
153
|
+
|
|
154
|
+
WRANGLER="pnpm exec wrangler"
|
|
155
|
+
WRANGLER_CONFIG="$WRANGLER --config config/wrangler.json"
|
|
156
|
+
WRANGLER_AVAILABLE=1
|
|
157
|
+
if ! $WRANGLER --version >/dev/null 2>&1; then
|
|
158
|
+
WRANGLER_AVAILABLE=0
|
|
159
|
+
log_warn "Wrangler not available. Cloudflare steps will be skipped."
|
|
160
|
+
fi
|
|
161
|
+
|
|
162
|
+
# Resolve names from wrangler.json
|
|
163
|
+
DB_NAME=$(node -e "const c=require('./config/wrangler.json'); console.log((c.d1_databases&&c.d1_databases[0]&&c.d1_databases[0].database_name)||'');")
|
|
164
|
+
if [[ -z "$DB_NAME" ]]; then
|
|
165
|
+
DB_NAME="${PROJECT_NAME}-db"
|
|
166
|
+
fi
|
|
167
|
+
|
|
168
|
+
DB_BINDING=$(node -e "const c=require('./config/wrangler.json'); console.log((c.d1_databases&&c.d1_databases[0]&&c.d1_databases[0].binding)||'DB');")
|
|
169
|
+
|
|
170
|
+
R2_BUCKET=$(node -e "const c=require('./config/wrangler.json'); console.log((c.r2_buckets&&c.r2_buckets[0]&&c.r2_buckets[0].bucket_name)||'');")
|
|
171
|
+
if [[ -z "$R2_BUCKET" ]]; then
|
|
172
|
+
R2_BUCKET="${PROJECT_NAME}-storage"
|
|
173
|
+
fi
|
|
174
|
+
|
|
175
|
+
KV_NAME="${PROJECT_NAME}-sessions"
|
|
176
|
+
|
|
177
|
+
# Read existing IDs from wrangler.json (if present)
|
|
178
|
+
D1_ID=$(node -e "const c=require('./config/wrangler.json'); console.log((c.d1_databases&&c.d1_databases[0]&&c.d1_databases[0].database_id)||'');")
|
|
179
|
+
KV_ID=$(node -e "const c=require('./config/wrangler.json'); console.log((c.kv_namespaces&&c.kv_namespaces[0]&&c.kv_namespaces[0].id)||'');")
|
|
180
|
+
if [[ "$D1_ID" == "TO_BE_PROVISIONED" ]]; then D1_ID=""; fi
|
|
181
|
+
if [[ "$KV_ID" == "TO_BE_PROVISIONED" ]]; then KV_ID=""; fi
|
|
182
|
+
|
|
183
|
+
if [[ "$SKIP_PROVISION" -eq 0 && "$WRANGLER_AVAILABLE" -eq 1 ]]; then
|
|
184
|
+
if $WRANGLER whoami >/dev/null 2>&1; then
|
|
185
|
+
log_info "Provisioning Cloudflare resources (D1, KV, R2)..."
|
|
186
|
+
|
|
187
|
+
# D1
|
|
188
|
+
D1_LIST=$($WRANGLER d1 list --json || echo '[]')
|
|
189
|
+
D1_ID=$(node -e "const fs=require('fs'); const list=JSON.parse(fs.readFileSync(0,'utf8')||'[]'); const name=process.env.DB_NAME; const item=list.find(x=>x.name===name); console.log(item?.uuid||item?.id||'');" <<< "$D1_LIST")
|
|
190
|
+
if [[ -z "$D1_ID" ]]; then
|
|
191
|
+
D1_CREATE=$($WRANGLER d1 create "$DB_NAME" --json)
|
|
192
|
+
D1_ID=$(node -e "const obj=JSON.parse(process.env.JSON||'{}'); console.log(obj.uuid||obj.id||'');" JSON="$D1_CREATE")
|
|
193
|
+
fi
|
|
194
|
+
|
|
195
|
+
# KV
|
|
196
|
+
KV_LIST=$($WRANGLER kv namespace list --json || echo '[]')
|
|
197
|
+
KV_ID=$(node -e "const fs=require('fs'); const list=JSON.parse(fs.readFileSync(0,'utf8')||'[]'); const name=process.env.KV_NAME; const item=list.find(x=>x.title===name||x.name===name); console.log(item?.id||'');" <<< "$KV_LIST")
|
|
198
|
+
if [[ -z "$KV_ID" ]]; then
|
|
199
|
+
KV_CREATE=$($WRANGLER kv namespace create "$KV_NAME" --json)
|
|
200
|
+
KV_ID=$(node -e "const obj=JSON.parse(process.env.JSON||'{}'); console.log(obj.id||'');" JSON="$KV_CREATE")
|
|
201
|
+
fi
|
|
202
|
+
|
|
203
|
+
# R2
|
|
204
|
+
R2_LIST=$($WRANGLER r2 bucket list --json || echo '[]')
|
|
205
|
+
R2_EXISTS=$(node -e "const fs=require('fs'); const list=JSON.parse(fs.readFileSync(0,'utf8')||'[]'); const name=process.env.R2_NAME; const item=list.find(x=>x.name===name); console.log(item? 'yes':'no');" R2_NAME="$R2_BUCKET" <<< "$R2_LIST")
|
|
206
|
+
if [[ "$R2_EXISTS" != "yes" ]]; then
|
|
207
|
+
$WRANGLER r2 bucket create "$R2_BUCKET" >/dev/null
|
|
61
208
|
fi
|
|
209
|
+
|
|
210
|
+
log_ok "Cloudflare resources ready."
|
|
211
|
+
else
|
|
212
|
+
log_warn "Wrangler not logged in. Skipping remote provisioning."
|
|
213
|
+
fi
|
|
214
|
+
fi
|
|
215
|
+
|
|
216
|
+
# If no remote IDs, generate local-only IDs for dev
|
|
217
|
+
if [[ -z "$D1_ID" ]]; then
|
|
218
|
+
D1_ID=$(node -e "console.log(require('crypto').randomUUID())")
|
|
219
|
+
log_warn "Using local D1 id: $D1_ID"
|
|
220
|
+
fi
|
|
221
|
+
|
|
222
|
+
if [[ -z "$KV_ID" ]]; then
|
|
223
|
+
KV_ID=$(node -e "console.log(require('crypto').randomUUID())")
|
|
224
|
+
log_warn "Using local KV id: $KV_ID"
|
|
62
225
|
fi
|
|
63
226
|
|
|
227
|
+
# Update wrangler.json with IDs
|
|
228
|
+
D1_ID="$D1_ID" KV_ID="$KV_ID" PROJECT_NAME="$PROJECT_NAME" node - <<'NODE'
|
|
229
|
+
const fs = require('fs');
|
|
230
|
+
const path = 'config/wrangler.json';
|
|
231
|
+
const data = JSON.parse(fs.readFileSync(path, 'utf8'));
|
|
232
|
+
|
|
233
|
+
const d1Id = process.env.D1_ID || '';
|
|
234
|
+
const kvId = process.env.KV_ID || '';
|
|
235
|
+
|
|
236
|
+
if (Array.isArray(data.d1_databases) && data.d1_databases[0]) {
|
|
237
|
+
data.d1_databases[0].database_id = d1Id;
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
if (Array.isArray(data.kv_namespaces) && data.kv_namespaces[0]) {
|
|
241
|
+
data.kv_namespaces[0].id = kvId;
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
fs.writeFileSync(path, JSON.stringify(data, null, 2));
|
|
245
|
+
NODE
|
|
246
|
+
|
|
64
247
|
# Generate Cloudflare types
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
248
|
+
log_info "Generating Cloudflare types..."
|
|
249
|
+
pnpm cf-typegen >/dev/null 2>&1 || log_warn "Skipping cf-typegen (wrangler not configured)"
|
|
250
|
+
|
|
251
|
+
# Ensure local D1 exists and update drizzle config
|
|
252
|
+
log_info "Preparing local D1 database..."
|
|
253
|
+
if [[ "$WRANGLER_AVAILABLE" -eq 1 ]]; then
|
|
254
|
+
$WRANGLER_CONFIG d1 execute "$DB_NAME" --local --command "SELECT 1;" >/dev/null 2>&1 || log_warn "Local D1 init failed (continuing)."
|
|
255
|
+
fi
|
|
256
|
+
|
|
257
|
+
D1_ID_FROM_CONFIG=$(node -e "const c=require('./config/wrangler.json'); console.log((c.d1_databases&&c.d1_databases[0]&&c.d1_databases[0].database_id)||'');")
|
|
258
|
+
if [[ -n "$D1_ID_FROM_CONFIG" ]]; then
|
|
259
|
+
LOCAL_D1_DIR=".wrangler/state/v3/d1/miniflare-D1DatabaseObject"
|
|
260
|
+
LOCAL_DB_PATH="${LOCAL_D1_DIR}/${D1_ID_FROM_CONFIG}.sqlite"
|
|
261
|
+
DRIZZLE_DB_URL="../${LOCAL_DB_PATH}"
|
|
262
|
+
|
|
263
|
+
mkdir -p "$LOCAL_D1_DIR"
|
|
264
|
+
touch "$LOCAL_DB_PATH"
|
|
265
|
+
|
|
266
|
+
if [[ -f config/drizzle.config.ts ]]; then
|
|
267
|
+
DRIZZLE_DB_URL="$DRIZZLE_DB_URL" node - <<'NODE'
|
|
268
|
+
const fs = require('fs');
|
|
269
|
+
const path = 'config/drizzle.config.ts';
|
|
270
|
+
const nextUrl = process.env.DRIZZLE_DB_URL;
|
|
271
|
+
let content = fs.readFileSync(path, 'utf8');
|
|
272
|
+
const pattern = /url:\s*['"][^'"]*\.sqlite['"]/;
|
|
273
|
+
if (pattern.test(content)) {
|
|
274
|
+
content = content.replace(pattern, `url: '${nextUrl}'`);
|
|
275
|
+
} else {
|
|
276
|
+
// Fallback: append url if not found
|
|
277
|
+
content = content.replace('dbCredentials: {', `dbCredentials: {\n url: '${nextUrl}',`);
|
|
278
|
+
}
|
|
279
|
+
fs.writeFileSync(path, content);
|
|
280
|
+
NODE
|
|
281
|
+
log_ok "Updated config/drizzle.config.ts with local DB path."
|
|
282
|
+
else
|
|
283
|
+
log_warn "config/drizzle.config.ts not found. Skipping DB config update."
|
|
284
|
+
fi
|
|
285
|
+
else
|
|
286
|
+
log_warn "D1 id not found in wrangler.json. Skipping drizzle config update."
|
|
287
|
+
fi
|
|
288
|
+
|
|
289
|
+
# Push schema (no migrations) and seed
|
|
290
|
+
DB_PUSH_OK=0
|
|
291
|
+
SEED_OK=0
|
|
292
|
+
|
|
293
|
+
log_info "Pushing schema to local D1..."
|
|
294
|
+
if pnpm db:push >/tmp/bhono-db-push.log 2>&1; then
|
|
295
|
+
DB_PUSH_OK=1
|
|
296
|
+
else
|
|
297
|
+
log_warn "Schema push failed."
|
|
298
|
+
tail -n 20 /tmp/bhono-db-push.log || true
|
|
299
|
+
fi
|
|
300
|
+
|
|
301
|
+
if [[ "$SKIP_SEED" -eq 0 ]]; then
|
|
302
|
+
log_info "Generating seed data..."
|
|
303
|
+
if pnpm db:seed >/tmp/bhono-seed-generate.log 2>&1; then
|
|
304
|
+
if [[ -f seed.sql ]]; then
|
|
305
|
+
if [[ "$WRANGLER_AVAILABLE" -eq 1 ]]; then
|
|
306
|
+
log_info "Seeding local D1..."
|
|
307
|
+
if $WRANGLER_CONFIG d1 execute "$DB_NAME" --local --file=seed.sql >/tmp/bhono-seed-apply.log 2>&1; then
|
|
308
|
+
SEED_OK=1
|
|
309
|
+
else
|
|
310
|
+
log_warn "Seed apply failed."
|
|
311
|
+
tail -n 20 /tmp/bhono-seed-apply.log || true
|
|
312
|
+
fi
|
|
313
|
+
else
|
|
314
|
+
log_warn "Wrangler not available. Skipping seed apply."
|
|
315
|
+
fi
|
|
316
|
+
else
|
|
317
|
+
log_warn "seed.sql not found. Seed generation did not create the file."
|
|
318
|
+
fi
|
|
319
|
+
else
|
|
320
|
+
log_warn "Seed generation failed."
|
|
321
|
+
tail -n 20 /tmp/bhono-seed-generate.log || true
|
|
322
|
+
fi
|
|
323
|
+
else
|
|
324
|
+
log_warn "Skipping seed step (--skip-seed)."
|
|
325
|
+
fi
|
|
326
|
+
|
|
327
|
+
rm -f /tmp/bhono-db-push.log /tmp/bhono-seed-generate.log /tmp/bhono-seed-apply.log >/dev/null 2>&1 || true
|
|
328
|
+
|
|
329
|
+
if [[ "$DB_PUSH_OK" -eq 1 && "$SEED_OK" -eq 1 ]]; then
|
|
330
|
+
log_ok "Local D1 ready with seed data."
|
|
331
|
+
else
|
|
332
|
+
log_warn "Local D1 setup incomplete. Review warnings above."
|
|
333
|
+
fi
|
|
334
|
+
|
|
335
|
+
log_info "Seed data is defined in src/server/db/seed.ts (customize as needed)."
|
|
336
|
+
|
|
337
|
+
if [[ "$SKIP_DEV" -eq 0 ]]; then
|
|
338
|
+
log_info "Starting dev server..."
|
|
339
|
+
pnpm dev
|
|
340
|
+
else
|
|
341
|
+
log_ok "Setup complete. Run 'pnpm dev' to start the server."
|
|
342
|
+
fi
|