@techstream/quark-create-app 1.5.1 → 1.5.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -12,7 +12,7 @@ The CLI scaffolds a complete project structure with:
12
12
  - **Next.js** web application
13
13
  - **Prisma** database schema and migrations
14
14
  - **BullMQ** job queues
15
- - **Docker Compose** setup (PostgreSQL, Redis, Mailhog)
15
+ - **Docker Compose** setup (PostgreSQL, Redis, Mailpit)
16
16
  - **JavaScript** monorepo with `pnpm` workspaces
17
17
 
18
18
  ## Quick Setup
@@ -20,17 +20,54 @@ The CLI scaffolds a complete project structure with:
20
20
  ```bash
21
21
  cd my-awesome-app
22
22
  docker compose up -d
23
- pnpm db:generate
24
23
  pnpm db:migrate
25
24
  pnpm dev
26
25
  ```
27
26
 
27
+ ## Commands
28
+
29
+ ```bash
30
+ # Create a new project
31
+ npx @techstream/quark-create-app@latest my-awesome-app
32
+
33
+ # Update Quark core in an existing project
34
+ npx @techstream/quark-create-app update
35
+
36
+ # Check for updates without applying
37
+ npx @techstream/quark-create-app update --check
38
+ ```
39
+
40
+ Aliases:
41
+ - `quark-create-app`
42
+ - `create-quark-app`
43
+ - `quark-update`
44
+
28
45
  ## Common Tasks
29
46
 
30
47
  - **Update Quark packages**: `quark-update` or `pnpm update @techstream/quark-*`
31
48
  - **Check for updates**: `quark-update --check`
32
49
  - **Configure environment**: Edit `.env` file (see `.env.example`)
33
50
 
51
+ ## CLI Testing
52
+
53
+ ```bash
54
+ # Lightweight template checks
55
+ pnpm test
56
+
57
+ # E2E scaffold simulation
58
+ pnpm test:e2e
59
+
60
+ # Full build verification (opt-in)
61
+ QUARK_CLI_BUILD_TEST=1 pnpm test:build
62
+ ```
63
+
64
+ ## Troubleshooting
65
+
66
+ - **pnpm install fails**: Ensure `pnpm` is installed and Node.js >= 22.
67
+ - **Prisma generate fails**: Run `pnpm --filter db db:generate` inside the project.
68
+ - **Docker ports conflict**: The CLI auto-selects free ports. Check `.env` for assigned values.
69
+ - **Missing env vars**: Copy `.env.example` to `.env` and fill required values.
70
+
34
71
  ## Support
35
72
 
36
73
  For issues, questions, and discussions:
package/package.json CHANGED
@@ -1,10 +1,11 @@
1
1
  {
2
2
  "name": "@techstream/quark-create-app",
3
- "version": "1.5.1",
3
+ "version": "1.5.3",
4
4
  "type": "module",
5
5
  "bin": {
6
6
  "quark-create-app": "src/index.js",
7
- "create-quark-app": "src/index.js"
7
+ "create-quark-app": "src/index.js",
8
+ "quark-update": "src/index.js"
8
9
  },
9
10
  "files": [
10
11
  "src",
@@ -28,6 +29,7 @@
28
29
  "license": "ISC",
29
30
  "scripts": {
30
31
  "test": "node test-cli.js",
32
+ "test:build": "node test-build.js",
31
33
  "test:e2e": "node test-e2e.js",
32
34
  "test:integration": "node test-integration.js",
33
35
  "test:all": "node test-all.js"
package/src/index.js CHANGED
@@ -264,6 +264,54 @@ program
264
264
  const targetDir = validateProjectName(projectName);
265
265
  const scope = projectName.toLowerCase().replace(/[^a-z0-9-]/g, "");
266
266
 
267
+ // Clean up orphaned Docker volumes from a previous project with the same name.
268
+ // Docker Compose names volumes as "<project>_postgres_data", "<project>_redis_data".
269
+ // These persist even if the project directory is manually deleted, causing
270
+ // authentication failures when the new project generates different credentials.
271
+ // We also need to stop any running containers that reference these volumes.
272
+ try {
273
+ const volumePrefix = `${projectName}_`;
274
+ const { stdout } = await execa("docker", [
275
+ "volume",
276
+ "ls",
277
+ "--filter",
278
+ `name=${volumePrefix}`,
279
+ "--format",
280
+ "{{.Name}}",
281
+ ]);
282
+ const orphanedVolumes = stdout
283
+ .split("\n")
284
+ .filter((v) => v.startsWith(volumePrefix));
285
+ if (orphanedVolumes.length > 0) {
286
+ // Stop and remove any containers using these volumes first
287
+ const { stdout: containerOut } = await execa("docker", [
288
+ "ps",
289
+ "-a",
290
+ "--filter",
291
+ `name=${projectName}`,
292
+ "--format",
293
+ "{{.ID}}",
294
+ ]);
295
+ const containers = containerOut.split("\n").filter(Boolean);
296
+ if (containers.length > 0) {
297
+ await execa("docker", ["rm", "-f", ...containers]);
298
+ }
299
+ // Remove the Docker network if it exists
300
+ try {
301
+ await execa("docker", ["network", "rm", `${projectName}_default`]);
302
+ } catch {
303
+ // Network may not exist — fine
304
+ }
305
+ // Now remove the orphaned volumes
306
+ for (const vol of orphanedVolumes) {
307
+ await execa("docker", ["volume", "rm", "-f", vol]);
308
+ }
309
+ console.log(chalk.green(" ✓ Cleaned up orphaned Docker volumes"));
310
+ }
311
+ } catch {
312
+ // Docker not available — fine
313
+ }
314
+
267
315
  // Check if directory already exists
268
316
  if (await fs.pathExists(targetDir)) {
269
317
  const { overwrite } = await prompts({
@@ -278,13 +326,13 @@ program
278
326
  process.exit(1);
279
327
  }
280
328
 
281
- // Clean up Docker resources (volumes hold old credentials)
329
+ // Stop any running Docker containers for this project
282
330
  try {
283
- await execa("docker", ["compose", "down", "-v"], {
331
+ await execa("docker", ["compose", "down"], {
284
332
  cwd: targetDir,
285
333
  stdio: "ignore",
286
334
  });
287
- console.log(chalk.green(" ✓ Cleaned up Docker volumes"));
335
+ console.log(chalk.green(" ✓ Stopped existing Docker containers"));
288
336
  } catch {
289
337
  // No docker-compose file or Docker not running — fine
290
338
  }
@@ -444,12 +492,12 @@ REDIS_PORT=6379
444
492
  # Optional: Set REDIS_URL to override the dynamic construction above
445
493
  # REDIS_URL="redis://localhost:6379"
446
494
 
447
- # --- Mailhog Configuration ---
448
- MAILHOG_HOST=localhost
449
- MAILHOG_SMTP_PORT=1025
450
- MAILHOG_UI_PORT=8025
451
- # Optional: Set MAILHOG_SMTP_URL to override the dynamic construction above
452
- # MAILHOG_SMTP_URL="smtp://localhost:1025"
495
+ # --- Mail Configuration (Mailpit in development) ---
496
+ MAIL_HOST=localhost
497
+ MAIL_SMTP_PORT=1025
498
+ MAIL_UI_PORT=8025
499
+ # Optional: Set MAIL_SMTP_URL to override the dynamic construction above
500
+ # MAIL_SMTP_URL="smtp://localhost:1025"
453
501
 
454
502
  # --- Application URL ---
455
503
  # In development, APP_URL is derived automatically from PORT — no need to set it.
@@ -544,9 +592,9 @@ REDIS_HOST=localhost
544
592
  REDIS_PORT=${redisPort}
545
593
 
546
594
  # --- Mail Configuration ---
547
- MAILHOG_HOST=localhost
548
- MAILHOG_SMTP_PORT=${mailSmtpPort}
549
- MAILHOG_UI_PORT=${mailUiPort}
595
+ MAIL_HOST=localhost
596
+ MAIL_SMTP_PORT=${mailSmtpPort}
597
+ MAIL_UI_PORT=${mailUiPort}
550
598
 
551
599
  # --- NextAuth Configuration ---
552
600
  NEXTAUTH_SECRET=${nextAuthSecret}
@@ -581,6 +629,45 @@ STORAGE_PROVIDER=local
581
629
  );
582
630
  console.log(chalk.green(` ✓ .quark-link.json`));
583
631
 
632
+ // Step 10b: Generate project-context skill with actual values
633
+ console.log(chalk.cyan("\n 🤖 Generating project-context skill..."));
634
+ const skillPath = path.join(
635
+ targetDir,
636
+ ".github",
637
+ "skills",
638
+ "project-context",
639
+ "SKILL.md",
640
+ );
641
+ if (await fs.pathExists(skillPath)) {
642
+ let skillContent = await fs.readFile(skillPath, "utf-8");
643
+
644
+ // Build optional packages section
645
+ const optionalLines = features
646
+ .map((f) => {
647
+ const labels = {
648
+ ui: "Shared UI components",
649
+ jobs: "Job queue definitions",
650
+ };
651
+ return `│ ├── ${f}/ # ${labels[f] || f}`;
652
+ })
653
+ .join("\n");
654
+ const optionalBlock = optionalLines ? `${optionalLines}\n` : "";
655
+
656
+ skillContent = skillContent
657
+ .replace(/__QUARK_SCOPE__/g, scope)
658
+ .replace(/__QUARK_PROJECT_NAME__/g, projectName)
659
+ .replace(
660
+ /__QUARK_SCAFFOLD_DATE__/g,
661
+ new Date().toISOString().split("T")[0],
662
+ )
663
+ .replace(/__QUARK_OPTIONAL_PACKAGES__/g, optionalBlock);
664
+
665
+ await fs.writeFile(skillPath, skillContent);
666
+ console.log(
667
+ chalk.green(` ✓ .github/skills/project-context/SKILL.md`),
668
+ );
669
+ }
670
+
584
671
  // Step 11: Initialize git repository
585
672
  console.log(chalk.cyan("\n 📝 Initializing git repository..."));
586
673
  const gitInitialized = await initializeGit(targetDir);
@@ -637,7 +724,7 @@ STORAGE_PROVIDER=local
637
724
  console.log(chalk.cyan("Next steps:"));
638
725
  console.log(chalk.white(` 1. cd ${projectName}`));
639
726
  console.log(chalk.white(` 2. docker compose up -d`));
640
- console.log(chalk.white(` 3. pnpm --filter db db:push`));
727
+ console.log(chalk.white(` 3. pnpm db:migrate`));
641
728
  console.log(chalk.white(` 4. pnpm dev\n`));
642
729
 
643
730
  console.log(chalk.cyan("Important:"));
@@ -647,7 +734,7 @@ STORAGE_PROVIDER=local
647
734
  ),
648
735
  );
649
736
  console.log(
650
- chalk.white(` • Use 'quark-update' to upgrade Quark packages\n`),
737
+ chalk.white(` • Or run: npx @techstream/quark-create-app update\n`),
651
738
  );
652
739
 
653
740
  console.log(chalk.cyan("Learn more:"));
@@ -0,0 +1,7 @@
1
+ <skills>
2
+ <skill>
3
+ <name>project-context</name>
4
+ <description>Project-specific context and conventions. This skill evolves with your project — update it as your architecture grows.</description>
5
+ <file>.github/skills/project-context/SKILL.md</file>
6
+ </skill>
7
+ </skills>
@@ -0,0 +1,106 @@
1
+ ---
2
+ name: project-context
3
+ description: Project-specific context and conventions. This skill evolves with your project — update it as your architecture grows.
4
+ ---
5
+
6
+ # Project Context
7
+
8
+ ## Overview
9
+
10
+ This is a Quark-based full-stack JavaScript application.
11
+
12
+ | Property | Value |
13
+ |---|---|
14
+ | **Scope** | `@__QUARK_SCOPE__` |
15
+ | **Framework** | Quark (scaffolded from `@techstream/quark-create-app`) |
16
+ | **Scaffolded** | __QUARK_SCAFFOLD_DATE__ |
17
+
18
+ ## Project Structure
19
+
20
+ ```
21
+ __QUARK_PROJECT_NAME__/
22
+ ├── apps/
23
+ │ ├── web/ # Next.js (App Router, Server Actions)
24
+ │ └── worker/ # BullMQ background worker
25
+ ├── packages/
26
+ │ ├── db/ # Prisma schema, client, queries
27
+ │ ├── config/ # Environment validation & shared config
28
+ __QUARK_OPTIONAL_PACKAGES__├── docker-compose.yml
29
+ ├── .env # Local environment (git-ignored)
30
+ └── .env.example # Template for environment variables
31
+ ```
32
+
33
+ ## Tech Stack
34
+
35
+ | Layer | Technology |
36
+ |---|---|
37
+ | Runtime | Node.js 24, ES Modules |
38
+ | Package manager | pnpm (workspaces) |
39
+ | Monorepo | Turborepo |
40
+ | Web | Next.js 16 (App Router) |
41
+ | Database | PostgreSQL 16 + Prisma 7 |
42
+ | Queue | BullMQ + Redis 7 |
43
+ | Auth | NextAuth v5 |
44
+ | Validation | Zod 4 |
45
+ | UI | Tailwind CSS + Shadcn |
46
+ | Email | Nodemailer |
47
+ | Linting | Biome |
48
+ | Testing | Node.js built-in test runner |
49
+
50
+ ## Coding Conventions
51
+
52
+ - **ESM only** — use `import`/`export`, never `require`.
53
+ - **No TypeScript** — plain `.js` and `.jsx` files.
54
+ - **Imports:** Use `@techstream/quark-core` for published utilities. Use `@__QUARK_SCOPE__/*` for local packages (db, config, ui, jobs).
55
+ - **Tests:** Co-located `*.test.js` files, run with `node --test`.
56
+ - **Validation:** Zod schemas for all Server Actions and API routes.
57
+ - **Errors:** Use `AppError` / `ValidationError` from `@techstream/quark-core/errors`.
58
+ - **Database models:** Always include `createdAt`/`updatedAt`.
59
+ - **Environment:** All env vars validated in `packages/config/src/validate-env.js`.
60
+
61
+ ## Common Commands
62
+
63
+ ```bash
64
+ pnpm dev # Start all apps in dev mode
65
+ pnpm build # Build everything
66
+ pnpm test # Run all tests
67
+ pnpm lint # Lint with Biome
68
+ pnpm db:generate # Regenerate Prisma client
69
+ pnpm db:push # Push schema changes
70
+ pnpm db:migrate # Run database migrations
71
+ docker compose up -d # Start infrastructure
72
+ ```
73
+
74
+ ## Key Files to Know
75
+
76
+ - `packages/db/prisma/schema.prisma` — Database schema (edit this to add models)
77
+ - `packages/db/src/queries.js` — Database query functions
78
+ - `packages/config/src/validate-env.js` — Environment variable validation
79
+ - `apps/web/src/app/` — Next.js App Router pages and API routes
80
+ - `apps/web/src/lib/auth.js` — Authentication configuration
81
+ - `apps/worker/src/handlers/` — Background job handlers
82
+
83
+ ## Updating Quark Core
84
+
85
+ ```bash
86
+ npx @techstream/quark-create-app update # Update core infrastructure
87
+ pnpm update @techstream/quark-core # Or update directly
88
+ ```
89
+
90
+ ---
91
+
92
+ ## Maintaining This Skill
93
+
94
+ > **Important:** When you make changes that affect this project's architecture,
95
+ > conventions, or structure — such as adding new packages, models, API patterns,
96
+ > environment variables, deployment targets, or team conventions — **update this
97
+ > skill file** to reflect those changes. This ensures future AI interactions
98
+ > always have accurate, up-to-date context.
99
+ >
100
+ > Examples of when to update this file:
101
+ > - Adding a new Prisma model or database table
102
+ > - Introducing a new API route pattern or middleware
103
+ > - Adding or removing a workspace package
104
+ > - Changing deployment infrastructure or CI/CD steps
105
+ > - Establishing new coding conventions or architectural decisions
106
+ > - Adding third-party integrations or services
@@ -6,15 +6,19 @@ A modern, scalable monorepo built with Quark.
6
6
 
7
7
  ```bash
8
8
  pnpm install
9
+ docker compose up -d
10
+ pnpm db:migrate
9
11
  pnpm dev
10
12
  ```
11
13
 
14
+ Open http://localhost:3000
15
+
12
16
  ## Services
13
17
 
14
18
  - **Docker**: `docker compose up -d`
15
- - **Database**: PostgreSQL on port 5432
16
- - **Cache**: Redis on port 6379
17
- - **Email**: Mailhog UI on port 8025
19
+ - **Database**: PostgreSQL
20
+ - **Cache**: Redis
21
+ - **Email**: Mailpit
18
22
 
19
23
  ## Development
20
24
 
@@ -29,6 +33,16 @@ pnpm test
29
33
  pnpm lint
30
34
  ```
31
35
 
36
+ ## Database
37
+
38
+ | Task | Command |
39
+ |------|--------|
40
+ | Run migrations | `pnpm db:migrate` |
41
+ | Push schema (no migration) | `pnpm db:push` |
42
+ | Generate Prisma client | `pnpm db:generate` |
43
+ | Seed database | `pnpm db:seed` |
44
+ | Open Prisma Studio | `pnpm db:studio` |
45
+
32
46
  ## Structure
33
47
 
34
48
  - `apps/` - Applications (web, worker, etc.)
@@ -0,0 +1,10 @@
1
+ {
2
+ "compilerOptions": {
3
+ "baseUrl": ".",
4
+ "paths": {
5
+ "@/*": ["./src/*"]
6
+ }
7
+ },
8
+ "include": ["next.env.d.ts", "**/*.js", "**/*.jsx"],
9
+ "exclude": ["node_modules"]
10
+ }
@@ -21,7 +21,8 @@
21
21
  "next-auth": "5.0.0-beta.30",
22
22
  "pg": "^8.18.0",
23
23
  "react": "19.2.0",
24
- "react-dom": "19.2.0"
24
+ "react-dom": "19.2.0",
25
+ "zod": "^4.3.6"
25
26
  },
26
27
  "devDependencies": {
27
28
  "@techstream/quark-config": "workspace:*",
@@ -2,13 +2,14 @@ import {
2
2
  createQueue,
3
3
  hashPassword,
4
4
  validateBody,
5
+ withCsrfProtection,
5
6
  } from "@techstream/quark-core";
6
7
  import { user, userRegisterSchema } from "@techstream/quark-db";
7
8
  import { JOB_NAMES, JOB_QUEUES } from "@techstream/quark-jobs";
8
9
  import { NextResponse } from "next/server";
9
10
  import { handleError } from "../../error-handler";
10
11
 
11
- export async function POST(request) {
12
+ export const POST = withCsrfProtection(async (request) => {
12
13
  try {
13
14
  const data = await validateBody(request, userRegisterSchema);
14
15
 
@@ -52,4 +53,4 @@ export async function POST(request) {
52
53
  } catch (error) {
53
54
  return handleError(error);
54
55
  }
55
- }
56
+ });
@@ -9,6 +9,7 @@ import {
9
9
  generateStorageKey,
10
10
  parseMultipart,
11
11
  validateFile,
12
+ withCsrfProtection,
12
13
  } from "@techstream/quark-core";
13
14
  import { file } from "@techstream/quark-db";
14
15
  import { NextResponse } from "next/server";
@@ -26,7 +27,7 @@ const paginationSchema = z.object({
26
27
  * Upload one or more files via multipart/form-data.
27
28
  * Requires authentication.
28
29
  */
29
- export async function POST(request) {
30
+ export const POST = withCsrfProtection(async (request) => {
30
31
  try {
31
32
  const session = await requireAuth();
32
33
 
@@ -94,7 +95,7 @@ export async function POST(request) {
94
95
  } catch (error) {
95
96
  return handleError(error);
96
97
  }
97
- }
98
+ });
98
99
 
99
100
  /**
100
101
  * GET /api/files
@@ -1,4 +1,8 @@
1
- import { validateBody, withCsrfProtection } from "@techstream/quark-core";
1
+ import {
2
+ createQueryBuilder,
3
+ validateBody,
4
+ withCsrfProtection,
5
+ } from "@techstream/quark-core";
2
6
  import { post, postCreateSchema } from "@techstream/quark-db";
3
7
  import { NextResponse } from "next/server";
4
8
  import { z } from "zod";
@@ -10,16 +14,65 @@ const paginationSchema = z.object({
10
14
  limit: z.coerce.number().int().min(1).max(100).default(10),
11
15
  });
12
16
 
17
+ const querySchema = paginationSchema.extend({
18
+ search: z.string().optional(),
19
+ status: z.enum(["draft", "published"]).optional(),
20
+ authorId: z.string().optional(),
21
+ sort: z.enum(["createdAt", "updatedAt", "title"]).optional(),
22
+ order: z.enum(["asc", "desc"]).default("desc"),
23
+ });
24
+
13
25
  export async function GET(request) {
14
26
  try {
15
27
  const { searchParams } = new URL(request.url);
16
- const { page, limit } = paginationSchema.parse({
17
- page: searchParams.get("page") ?? undefined,
18
- limit: searchParams.get("limit") ?? undefined,
19
- });
28
+ const { page, limit, search, status, authorId, sort, order } =
29
+ querySchema.parse({
30
+ page: searchParams.get("page") ?? undefined,
31
+ limit: searchParams.get("limit") ?? undefined,
32
+ search: searchParams.get("search") ?? undefined,
33
+ status: searchParams.get("status") ?? undefined,
34
+ authorId: searchParams.get("authorId") ?? undefined,
35
+ sort: searchParams.get("sort") ?? undefined,
36
+ order: searchParams.get("order") ?? undefined,
37
+ });
38
+
20
39
  const skip = (page - 1) * limit;
21
40
 
22
- const posts = await post.findAll({ skip, take: limit });
41
+ // Build query with filters, search, and sort
42
+ const qb = createQueryBuilder({
43
+ filterableFields: ["published", "authorId"],
44
+ searchFields: ["title", "content"],
45
+ sortableFields: ["createdAt", "updatedAt", "title"],
46
+ });
47
+
48
+ // Apply filters
49
+ if (status === "published") {
50
+ qb.filter("published", "eq", true);
51
+ } else if (status === "draft") {
52
+ qb.filter("published", "eq", false);
53
+ }
54
+
55
+ if (authorId) {
56
+ qb.filter("authorId", "eq", authorId);
57
+ }
58
+
59
+ // Apply search
60
+ if (search) {
61
+ qb.search(search);
62
+ }
63
+
64
+ // Apply sort
65
+ if (sort) {
66
+ qb.sort(sort, order);
67
+ }
68
+
69
+ const posts = await post.findAll({
70
+ skip,
71
+ take: limit,
72
+ where: qb.toWhere(),
73
+ orderBy: qb.toOrderBy(),
74
+ });
75
+
23
76
  return NextResponse.json(posts);
24
77
  } catch (error) {
25
78
  return handleError(error);
@@ -2,6 +2,7 @@
2
2
  "name": "@techstream/quark-worker",
3
3
  "version": "1.0.0",
4
4
  "type": "module",
5
+ "private": true,
5
6
  "description": "",
6
7
  "main": "index.js",
7
8
  "scripts": {
@@ -56,6 +56,20 @@ function createQueueWorker(queueName) {
56
56
  );
57
57
  });
58
58
 
59
+ queueWorker.on("stalled", (jobId) => {
60
+ logger.warn(`Job ${jobId} in queue "${queueName}" has stalled`, {
61
+ queueName,
62
+ jobId,
63
+ });
64
+ });
65
+
66
+ queueWorker.on("error", (error) => {
67
+ logger.error(`Worker error in queue "${queueName}"`, {
68
+ error: error.message,
69
+ queueName,
70
+ });
71
+ });
72
+
59
73
  logger.info(
60
74
  `Queue "${queueName}" worker started (concurrency: ${queueWorker.opts.concurrency})`,
61
75
  );
@@ -11,6 +11,11 @@ services:
11
11
  POSTGRES_DB: ${POSTGRES_DB}
12
12
  volumes:
13
13
  - postgres_data:/var/lib/postgresql/data
14
+ healthcheck:
15
+ test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-quark_user}"]
16
+ interval: 5s
17
+ timeout: 5s
18
+ retries: 5
14
19
 
15
20
  # --- 2. Redis Cache & Job Queue ---
16
21
  redis:
@@ -21,6 +26,11 @@ services:
21
26
  command: redis-server --appendonly yes
22
27
  volumes:
23
28
  - redis_data:/data
29
+ healthcheck:
30
+ test: ["CMD", "redis-cli", "ping"]
31
+ interval: 5s
32
+ timeout: 5s
33
+ retries: 5
24
34
 
25
35
  # --- 3. Mailpit (Local SMTP Server) ---
26
36
  mailpit:
@@ -28,9 +38,14 @@ services:
28
38
  restart: always
29
39
  ports:
30
40
  # SMTP port (used by application to send mail)
31
- - "${MAILHOG_SMTP_PORT:-1025}:1025"
41
+ - "${MAIL_SMTP_PORT:-1025}:1025"
32
42
  # Web UI port (to view sent emails)
33
- - "${MAILHOG_UI_PORT:-8025}:8025"
43
+ - "${MAIL_UI_PORT:-8025}:8025"
44
+ healthcheck:
45
+ test: ["CMD", "wget", "--spider", "-q", "http://localhost:8025"]
46
+ interval: 10s
47
+ timeout: 5s
48
+ retries: 3
34
49
 
35
50
  volumes:
36
51
  postgres_data:
@@ -11,7 +11,11 @@
11
11
  "test": "turbo run test",
12
12
  "docker:up": "docker compose up -d",
13
13
  "docker:down": "docker compose down",
14
- "db:generate": "turbo run db:generate"
14
+ "db:generate": "turbo run db:generate",
15
+ "db:migrate": "turbo run db:migrate",
16
+ "db:push": "turbo run db:push",
17
+ "db:seed": "turbo run db:seed",
18
+ "db:studio": "turbo run db:studio"
15
19
  },
16
20
  "keywords": [],
17
21
  "author": "",
@@ -23,11 +23,11 @@
23
23
  "devDependencies": {
24
24
  "@techstream/quark-config": "workspace:*",
25
25
  "bcryptjs": "^3.0.3",
26
- "prisma": "^7.3.0"
26
+ "prisma": "^7.4.0"
27
27
  },
28
28
  "dependencies": {
29
- "@prisma/adapter-pg": "^7.3.0",
30
- "@prisma/client": "^7.3.0",
29
+ "@prisma/adapter-pg": "^7.4.0",
30
+ "@prisma/client": "^7.4.0",
31
31
  "dotenv": "^17.2.4",
32
32
  "pg": "^8.18.0",
33
33
  "zod": "^4.3.6"
@@ -43,6 +43,8 @@ CREATE TABLE "Account" (
43
43
  "scope" TEXT,
44
44
  "id_token" TEXT,
45
45
  "session_state" TEXT,
46
+ "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
47
+ "updatedAt" TIMESTAMP(3) NOT NULL,
46
48
 
47
49
  CONSTRAINT "Account_pkey" PRIMARY KEY ("id")
48
50
  );
@@ -86,6 +88,22 @@ CREATE TABLE "Job" (
86
88
  CONSTRAINT "Job_pkey" PRIMARY KEY ("id")
87
89
  );
88
90
 
91
+ -- CreateTable
92
+ CREATE TABLE "File" (
93
+ "id" TEXT NOT NULL,
94
+ "filename" TEXT NOT NULL,
95
+ "originalName" TEXT NOT NULL,
96
+ "mimeType" TEXT NOT NULL,
97
+ "size" INTEGER NOT NULL,
98
+ "storageKey" TEXT NOT NULL,
99
+ "storageProvider" TEXT NOT NULL DEFAULT 'local',
100
+ "uploadedById" TEXT,
101
+ "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
102
+ "updatedAt" TIMESTAMP(3) NOT NULL,
103
+
104
+ CONSTRAINT "File_pkey" PRIMARY KEY ("id")
105
+ );
106
+
89
107
  -- CreateTable
90
108
  CREATE TABLE "AuditLog" (
91
109
  "id" TEXT NOT NULL,
@@ -130,6 +148,9 @@ CREATE UNIQUE INDEX "Session_sessionToken_key" ON "Session"("sessionToken");
130
148
  -- CreateIndex
131
149
  CREATE INDEX "Session_userId_idx" ON "Session"("userId");
132
150
 
151
+ -- CreateIndex
152
+ CREATE INDEX "Session_expires_idx" ON "Session"("expires");
153
+
133
154
  -- CreateIndex
134
155
  CREATE UNIQUE INDEX "VerificationToken_token_key" ON "VerificationToken"("token");
135
156
 
@@ -139,6 +160,9 @@ CREATE INDEX "VerificationToken_token_idx" ON "VerificationToken"("token");
139
160
  -- CreateIndex
140
161
  CREATE UNIQUE INDEX "VerificationToken_identifier_token_key" ON "VerificationToken"("identifier", "token");
141
162
 
163
+ -- CreateIndex
164
+ CREATE INDEX "VerificationToken_expires_idx" ON "VerificationToken"("expires");
165
+
142
166
  -- CreateIndex
143
167
  CREATE INDEX "Job_queue_idx" ON "Job"("queue");
144
168
 
@@ -148,9 +172,24 @@ CREATE INDEX "Job_status_idx" ON "Job"("status");
148
172
  -- CreateIndex
149
173
  CREATE INDEX "Job_runAt_idx" ON "Job"("runAt");
150
174
 
175
+ -- CreateIndex
176
+ CREATE INDEX "Job_status_runAt_idx" ON "Job"("status", "runAt");
177
+
151
178
  -- CreateIndex
152
179
  CREATE INDEX "Job_createdAt_idx" ON "Job"("createdAt");
153
180
 
181
+ -- CreateIndex
182
+ CREATE UNIQUE INDEX "File_storageKey_key" ON "File"("storageKey");
183
+
184
+ -- CreateIndex
185
+ CREATE INDEX "File_uploadedById_idx" ON "File"("uploadedById");
186
+
187
+ -- CreateIndex
188
+ CREATE INDEX "File_mimeType_idx" ON "File"("mimeType");
189
+
190
+ -- CreateIndex
191
+ CREATE INDEX "File_createdAt_idx" ON "File"("createdAt");
192
+
154
193
  -- CreateIndex
155
194
  CREATE INDEX "AuditLog_userId_idx" ON "AuditLog"("userId");
156
195
 
@@ -172,5 +211,8 @@ ALTER TABLE "Account" ADD CONSTRAINT "Account_userId_fkey" FOREIGN KEY ("userId"
172
211
  -- AddForeignKey
173
212
  ALTER TABLE "Session" ADD CONSTRAINT "Session_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
174
213
 
214
+ -- AddForeignKey
215
+ ALTER TABLE "File" ADD CONSTRAINT "File_uploadedById_fkey" FOREIGN KEY ("uploadedById") REFERENCES "User"("id") ON DELETE SET NULL ON UPDATE CASCADE;
216
+
175
217
  -- AddForeignKey
176
218
  ALTER TABLE "AuditLog" ADD CONSTRAINT "AuditLog_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
@@ -28,6 +28,7 @@ model User {
28
28
  accounts Account[]
29
29
  sessions Session[]
30
30
  auditLogs AuditLog[]
31
+ files File[]
31
32
 
32
33
  @@index([email])
33
34
  @@index([createdAt])
@@ -127,6 +128,25 @@ enum JobStatus {
127
128
  CANCELLED
128
129
  }
129
130
 
131
+ // File Model
132
+ model File {
133
+ id String @id @default(cuid())
134
+ filename String
135
+ originalName String
136
+ mimeType String
137
+ size Int
138
+ storageKey String @unique
139
+ storageProvider String @default("local")
140
+ uploadedById String?
141
+ uploadedBy User? @relation(fields: [uploadedById], references: [id], onDelete: SetNull)
142
+ createdAt DateTime @default(now())
143
+ updatedAt DateTime @updatedAt
144
+
145
+ @@index([uploadedById])
146
+ @@index([mimeType])
147
+ @@index([createdAt])
148
+ }
149
+
130
150
  // Audit Log Model
131
151
  model AuditLog {
132
152
  id String @id @default(cuid())
@@ -81,12 +81,13 @@ export const post = {
81
81
  });
82
82
  },
83
83
  findAll: (options = {}) => {
84
- const { skip = 0, take = 10 } = options;
84
+ const { skip = 0, take = 10, where, orderBy } = options;
85
85
  return prisma.post.findMany({
86
+ where,
86
87
  skip,
87
88
  take,
88
89
  include: AUTHOR_SAFE_INCLUDE,
89
- orderBy: { createdAt: "desc" },
90
+ orderBy: orderBy || { createdAt: "desc" },
90
91
  });
91
92
  },
92
93
  findPublished: (options = {}) => {
@@ -230,6 +231,61 @@ export const verificationToken = {
230
231
  },
231
232
  };
232
233
 
234
+ // File queries
235
+ export const file = {
236
+ create: (data) => {
237
+ return prisma.file.create({ data });
238
+ },
239
+ findById: (id) => {
240
+ return prisma.file.findUnique({
241
+ where: { id },
242
+ include: {
243
+ uploadedBy: { select: { id: true, email: true, name: true } },
244
+ },
245
+ });
246
+ },
247
+ findByStorageKey: (storageKey) => {
248
+ return prisma.file.findUnique({ where: { storageKey } });
249
+ },
250
+ findByUploader: (uploadedById, options = {}) => {
251
+ const { skip = 0, take = 50 } = options;
252
+ return prisma.file.findMany({
253
+ where: { uploadedById },
254
+ skip,
255
+ take,
256
+ orderBy: { createdAt: "desc" },
257
+ });
258
+ },
259
+ findOrphaned: (options = {}) => {
260
+ const { take = 100 } = options;
261
+ return prisma.file.findMany({
262
+ where: { uploadedById: null },
263
+ take,
264
+ orderBy: { createdAt: "asc" },
265
+ });
266
+ },
267
+ findOlderThan: (date, options = {}) => {
268
+ const { take = 100 } = options;
269
+ return prisma.file.findMany({
270
+ where: {
271
+ uploadedById: null,
272
+ createdAt: { lt: date },
273
+ },
274
+ take,
275
+ orderBy: { createdAt: "asc" },
276
+ });
277
+ },
278
+ delete: (id) => {
279
+ return prisma.file.delete({ where: { id } });
280
+ },
281
+ deleteMany: (ids) => {
282
+ return prisma.file.deleteMany({ where: { id: { in: ids } } });
283
+ },
284
+ count: (where = {}) => {
285
+ return prisma.file.count({ where });
286
+ },
287
+ };
288
+
233
289
  // AuditLog queries
234
290
  export const auditLog = {
235
291
  findAll: (options = {}) => {
@@ -34,3 +34,9 @@ export const postUpdateSchema = z.object({
34
34
  content: z.string().optional(),
35
35
  published: z.boolean().optional(),
36
36
  });
37
+
38
+ export const fileUploadSchema = z.object({
39
+ filename: z.string().min(1, "Filename is required"),
40
+ mimeType: z.string().min(1, "MIME type is required"),
41
+ size: z.number().int().positive("File size must be positive"),
42
+ });
@@ -2,7 +2,7 @@
2
2
  "$schema": "https://turbo.build/schema.json",
3
3
  "tasks": {
4
4
  "build": {
5
- "dependsOn": ["^build"],
5
+ "dependsOn": ["^build", "db:generate"],
6
6
  "outputs": ["dist/**", ".next/**"],
7
7
  "env": ["NODE_ENV"]
8
8
  },
@@ -17,6 +17,19 @@
17
17
  "cache": false,
18
18
  "outputs": ["node_modules/.prisma/client"]
19
19
  },
20
+ "db:migrate": {
21
+ "cache": false
22
+ },
23
+ "db:push": {
24
+ "cache": false
25
+ },
26
+ "db:seed": {
27
+ "cache": false
28
+ },
29
+ "db:studio": {
30
+ "cache": false,
31
+ "persistent": true
32
+ },
20
33
  "dev": {
21
34
  "cache": false,
22
35
  "persistent": true,
@@ -29,9 +42,9 @@
29
42
  "POSTGRES_DB",
30
43
  "REDIS_HOST",
31
44
  "REDIS_PORT",
32
- "MAILHOG_HOST",
33
- "MAILHOG_SMTP_PORT",
34
- "MAILHOG_UI_PORT",
45
+ "MAIL_HOST",
46
+ "MAIL_SMTP_PORT",
47
+ "MAIL_UI_PORT",
35
48
  "NEXTAUTH_SECRET",
36
49
  "APP_URL",
37
50
  "WORKER_CONCURRENCY",
@@ -14,7 +14,7 @@ export const config = {
14
14
  },
15
15
  email: {
16
16
  from: process.env.EMAIL_FROM || "noreply@myquarkapp.com",
17
- provider: process.env.EMAIL_PROVIDER || "mailhog",
17
+ provider: process.env.EMAIL_PROVIDER || "smtp",
18
18
  },
19
19
  };
20
20
 
@@ -22,11 +22,11 @@ const envSchema = {
22
22
  REDIS_HOST: { required: false, description: "Redis host" },
23
23
  REDIS_PORT: { required: false, description: "Redis port" },
24
24
 
25
- // Mailhog
26
- MAILHOG_SMTP_URL: { required: false, description: "Mailhog SMTP URL" },
27
- MAILHOG_HOST: { required: false, description: "Mailhog host" },
28
- MAILHOG_SMTP_PORT: { required: false, description: "Mailhog SMTP port" },
29
- MAILHOG_UI_PORT: { required: false, description: "Mailhog UI port" },
25
+ // Mail (local SMTP server)
26
+ MAIL_SMTP_URL: { required: false, description: "Mail SMTP URL" },
27
+ MAIL_HOST: { required: false, description: "Mail host" },
28
+ MAIL_SMTP_PORT: { required: false, description: "Mail SMTP port" },
29
+ MAIL_UI_PORT: { required: false, description: "Mail UI port" },
30
30
 
31
31
  // NextAuth
32
32
  NEXTAUTH_SECRET: {