@postxl/generators 1.15.1 → 1.17.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/dist/backend-excel-io/excel-io.generator.d.ts +2 -1
  2. package/dist/backend-excel-io/excel-io.generator.js +2 -0
  3. package/dist/backend-excel-io/excel-io.generator.js.map +1 -1
  4. package/dist/backend-excel-io/template/excel-io.controller.ts +24 -54
  5. package/dist/backend-upload/index.d.ts +3 -0
  6. package/dist/backend-upload/index.js +40 -0
  7. package/dist/backend-upload/index.js.map +1 -0
  8. package/dist/backend-upload/template/src/index.ts +13 -0
  9. package/dist/backend-upload/template/src/upload.config.ts +15 -0
  10. package/dist/backend-upload/template/src/upload.controller.ts +53 -0
  11. package/dist/backend-upload/template/src/upload.guard.ts +39 -0
  12. package/dist/backend-upload/template/src/upload.module.ts +26 -0
  13. package/dist/backend-upload/template/src/upload.service.ts +333 -0
  14. package/dist/backend-upload/template/src/upload.types.ts +37 -0
  15. package/dist/backend-upload/template/src/uploaded-file.decorator.ts +12 -0
  16. package/dist/backend-upload/template/tsconfig.lib.json +10 -0
  17. package/dist/backend-upload/upload.generator.d.ts +16 -0
  18. package/dist/backend-upload/upload.generator.js +107 -0
  19. package/dist/backend-upload/upload.generator.js.map +1 -0
  20. package/dist/base/template/.claude/commands/README.md +65 -0
  21. package/dist/base/template/.claude/settings.json +3 -1
  22. package/dist/base/template/.github/.copilot-prompts.json +22 -0
  23. package/dist/e2e/e2e.generator.js +43 -1
  24. package/dist/e2e/e2e.generator.js.map +1 -1
  25. package/dist/e2e/template/.claude/commands/prepare-e2e-tests.md +251 -0
  26. package/dist/e2e/template/.claude/commands/run-e2e-tests.md +221 -0
  27. package/dist/e2e/template/scripts/e2e.sh +398 -0
  28. package/dist/frontend-forms/generators/model/forms.generator.js +191 -0
  29. package/dist/frontend-forms/generators/model/forms.generator.js.map +1 -1
  30. package/dist/frontend-tables/generators/model-table.generator.js +16 -2
  31. package/dist/frontend-tables/generators/model-table.generator.js.map +1 -1
  32. package/dist/generators.js +2 -0
  33. package/dist/generators.js.map +1 -1
  34. package/dist/index.d.ts +1 -0
  35. package/dist/index.js +5 -2
  36. package/dist/index.js.map +1 -1
  37. package/package.json +4 -4
@@ -0,0 +1,333 @@
1
+ import { DispatcherService } from '@actions/dispatcher.service'
2
+ import { BadRequestException, Injectable, NotFoundException, PayloadTooLargeException } from '@nestjs/common'
3
+ import { S3Service } from '@s3/s3.service'
4
+ import type { FileId, User } from '@types'
5
+ import { ViewService } from '@view/view.service'
6
+
7
+ import type { MultipartFile } from '@fastify/multipart'
8
+ import { mkdir, readFile, writeFile } from 'node:fs/promises'
9
+ import path from 'node:path'
10
+ import { Readable } from 'node:stream'
11
+
12
+ import { UploadConfig } from './upload.config'
13
+ import {
14
+ DEFAULT_MAX_SIZE_BYTES,
15
+ type StoredUploadRecord,
16
+ type UploadGuardOptions,
17
+ type UploadedFileDataPayload,
18
+ type UploadStorageMode,
19
+ } from './upload.types'
20
+
21
+ type FileTypeRule = {
22
+ extensions: string[]
23
+ mimeTypes: string[]
24
+ mimePrefixes: string[]
25
+ }
26
+
27
+ const FILE_TYPE_RULES: Record<string, FileTypeRule> = {
28
+ image: {
29
+ extensions: ['.png', '.jpg', '.jpeg', '.gif', '.webp', '.svg'],
30
+ mimeTypes: [],
31
+ mimePrefixes: ['image/'],
32
+ },
33
+ excel: {
34
+ extensions: ['.xlsx', '.xls'],
35
+ mimeTypes: [
36
+ 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
37
+ 'application/vnd.ms-excel',
38
+ 'application/octet-stream',
39
+ ],
40
+ mimePrefixes: [],
41
+ },
42
+ json: {
43
+ extensions: ['.json'],
44
+ mimeTypes: ['application/json', 'text/json'],
45
+ mimePrefixes: [],
46
+ },
47
+ csv: {
48
+ extensions: ['.csv'],
49
+ mimeTypes: ['text/csv', 'application/csv', 'application/vnd.ms-excel'],
50
+ mimePrefixes: [],
51
+ },
52
+ pdf: {
53
+ extensions: ['.pdf'],
54
+ mimeTypes: ['application/pdf'],
55
+ mimePrefixes: [],
56
+ },
57
+ text: {
58
+ extensions: ['.txt', '.md'],
59
+ mimeTypes: ['text/plain', 'text/markdown'],
60
+ mimePrefixes: ['text/'],
61
+ },
62
+ }
63
+
64
+ @Injectable()
65
+ export class UploadService {
66
+ private readonly inMemoryFiles = new Map<FileId, Buffer>()
67
+
68
+ constructor(
69
+ private readonly uploadConfig: UploadConfig,
70
+ private readonly dispatcherService: DispatcherService,
71
+ private readonly viewService: ViewService,
72
+ private readonly s3Service: S3Service,
73
+ ) {}
74
+
75
+ public async processUpload({
76
+ multipartFile,
77
+ user,
78
+ options,
79
+ }: {
80
+ multipartFile: MultipartFile
81
+ user: User
82
+ options?: UploadGuardOptions
83
+ }): Promise<UploadedFileDataPayload> {
84
+ const maxBytes = options?.maxFileSizeBytes ?? this.uploadConfig.values.maxSizeBytes ?? DEFAULT_MAX_SIZE_BYTES
85
+ const filename = multipartFile.filename || 'upload.bin'
86
+ const mimetype = multipartFile.mimetype || 'application/octet-stream'
87
+ const extension = path.extname(filename).toLowerCase()
88
+
89
+ this.validateConstraints({ extension, mimetype, options })
90
+
91
+ const buffer = await this.toBufferWithLimit(multipartFile.file, maxBytes)
92
+ if (buffer.length === 0) {
93
+ throw new BadRequestException('Uploaded file is empty')
94
+ }
95
+
96
+ const storageMode = this.resolveStorageMode(options)
97
+ const createdFile = await this.dispatcherService.dispatch({
98
+ action: {
99
+ scope: 'file',
100
+ type: 'create',
101
+ payload: {
102
+ name: filename,
103
+ mimetype,
104
+ size: buffer.length,
105
+ url: '',
106
+ },
107
+ },
108
+ user,
109
+ })
110
+
111
+ try {
112
+ const stored = await this.storeUpload({ fileId: createdFile.id, filename, extension, buffer, storageMode })
113
+
114
+ const updatedFile = await this.dispatcherService.dispatch({
115
+ action: {
116
+ scope: 'file',
117
+ type: 'update',
118
+ payload: {
119
+ id: createdFile.id,
120
+ name: filename,
121
+ mimetype,
122
+ size: buffer.length,
123
+ url: stored.location,
124
+ },
125
+ },
126
+ user,
127
+ })
128
+
129
+ return {
130
+ file: updatedFile,
131
+ buffer,
132
+ filename,
133
+ mimetype,
134
+ size: buffer.length,
135
+ fields: multipartFile.fields,
136
+ }
137
+ } catch (error) {
138
+ await this.deleteFailedUploadRecord({ fileId: createdFile.id, user })
139
+ throw error
140
+ }
141
+ }
142
+
143
+ public async getStoredFileBuffer({ fileId, user }: { fileId: FileId; user: User }): Promise<Buffer> {
144
+ const file = await this.viewService.files.get({ id: fileId, user })
145
+ if (!file) {
146
+ throw new NotFoundException(`File ${fileId} not found`)
147
+ }
148
+
149
+ return this.readStoredFile({ fileId, location: file.url })
150
+ }
151
+
152
+ public async getStoredFileRecord({ fileId, user }: { fileId: FileId; user: User }) {
153
+ const file = await this.viewService.files.get({ id: fileId, user })
154
+ if (!file) {
155
+ throw new NotFoundException(`File ${fileId} not found`)
156
+ }
157
+
158
+ return file
159
+ }
160
+
161
+ public async getBufferFromFileRecord({ fileId, location }: { fileId: FileId; location: string }): Promise<Buffer> {
162
+ return this.readStoredFile({ fileId, location })
163
+ }
164
+
165
+ public getRootUser(): User {
166
+ return this.viewService.users.data.rootUser
167
+ }
168
+
169
+ private resolveStorageMode(options?: UploadGuardOptions): UploadStorageMode {
170
+ if (this.uploadConfig.values.dryRun) {
171
+ return 'none'
172
+ }
173
+
174
+ return options?.storageMode ?? this.uploadConfig.values.storage
175
+ }
176
+
177
+ private async storeUpload({
178
+ fileId,
179
+ filename,
180
+ extension,
181
+ buffer,
182
+ storageMode,
183
+ }: {
184
+ fileId: FileId
185
+ filename: string
186
+ extension: string
187
+ buffer: Buffer
188
+ storageMode: UploadStorageMode
189
+ }): Promise<StoredUploadRecord> {
190
+ if (storageMode === 'none') {
191
+ this.inMemoryFiles.set(fileId, buffer)
192
+ return {
193
+ fileId,
194
+ location: `memory://${fileId}`,
195
+ storageMode,
196
+ }
197
+ }
198
+
199
+ if (storageMode === 'local') {
200
+ const sanitizedName = filename.replaceAll(/[^A-Za-z0-9._-]/g, '_')
201
+ const storageDir = path.resolve(this.uploadConfig.values.localDirectory)
202
+ const targetPath = path.join(storageDir, `${fileId}-${sanitizedName}`)
203
+
204
+ await mkdir(storageDir, { recursive: true })
205
+ await writeFile(targetPath, buffer)
206
+
207
+ return {
208
+ fileId,
209
+ location: `file://${targetPath}`,
210
+ storageMode,
211
+ }
212
+ }
213
+
214
+ const key = await this.s3Service.send(
215
+ Readable.from(buffer),
216
+ String(fileId),
217
+ extension ? extension.slice(1) : undefined,
218
+ )
219
+
220
+ return {
221
+ fileId,
222
+ location: `s3://${key}`,
223
+ storageMode,
224
+ }
225
+ }
226
+
227
+ private async readStoredFile({ fileId, location }: { fileId: FileId; location: string }): Promise<Buffer> {
228
+ if (location.startsWith('memory://')) {
229
+ const cached = this.inMemoryFiles.get(fileId)
230
+ if (!cached) {
231
+ throw new NotFoundException(`No in-memory binary found for ${fileId}`)
232
+ }
233
+ return cached
234
+ }
235
+
236
+ if (location.startsWith('file://')) {
237
+ const filePath = location.slice('file://'.length)
238
+ return readFile(filePath)
239
+ }
240
+
241
+ if (location.startsWith('s3://')) {
242
+ const key = location.slice('s3://'.length)
243
+ const body = await this.s3Service.getFile(key)
244
+ if (!body) {
245
+ throw new NotFoundException(`Stored object ${key} not found in S3`)
246
+ }
247
+ return Buffer.from(body)
248
+ }
249
+
250
+ throw new BadRequestException(`Unsupported upload location format: ${location}`)
251
+ }
252
+
253
+ private validateConstraints({
254
+ extension,
255
+ mimetype,
256
+ options,
257
+ }: {
258
+ extension: string
259
+ mimetype: string
260
+ options?: UploadGuardOptions
261
+ }): void {
262
+ if (options?.allowedMimeTypes && options.allowedMimeTypes.length > 0) {
263
+ if (!options.allowedMimeTypes.includes(mimetype)) {
264
+ throw new BadRequestException(`Unsupported mimetype: ${mimetype}`)
265
+ }
266
+ }
267
+
268
+ if (options?.allowedFileExtensions && options.allowedFileExtensions.length > 0) {
269
+ const allowedExtensions = options.allowedFileExtensions.map((ext) => ext.toLowerCase())
270
+ if (!allowedExtensions.includes(extension)) {
271
+ throw new BadRequestException(`Unsupported file extension: ${extension || '(none)'}`)
272
+ }
273
+ }
274
+
275
+ if (options?.allowedFileTypes && options.allowedFileTypes.length > 0) {
276
+ const allowed = options.allowedFileTypes.some((type) => {
277
+ const rule = FILE_TYPE_RULES[type]
278
+ return (
279
+ rule.extensions.includes(extension) ||
280
+ rule.mimeTypes.includes(mimetype) ||
281
+ rule.mimePrefixes.some((prefix) => mimetype.startsWith(prefix))
282
+ )
283
+ })
284
+
285
+ if (!allowed) {
286
+ throw new BadRequestException(
287
+ `File type is not allowed for mimetype ${mimetype} and extension ${extension || '(none)'}`,
288
+ )
289
+ }
290
+ }
291
+ }
292
+
293
+ private async toBufferWithLimit(stream: AsyncIterable<unknown>, maxBytes: number): Promise<Buffer> {
294
+ const chunks: Buffer[] = []
295
+ let size = 0
296
+
297
+ for await (const chunk of stream) {
298
+ const buffer = toBuffer(chunk)
299
+ size += buffer.length
300
+ if (size > maxBytes) {
301
+ throw new PayloadTooLargeException(`Uploaded file exceeds ${Math.floor(maxBytes / (1024 * 1024))} MB limit`)
302
+ }
303
+ chunks.push(buffer)
304
+ }
305
+
306
+ return Buffer.concat(chunks)
307
+ }
308
+
309
+ private async deleteFailedUploadRecord({ fileId, user }: { fileId: FileId; user: User }): Promise<void> {
310
+ try {
311
+ await this.dispatcherService.dispatch({
312
+ action: {
313
+ scope: 'file',
314
+ type: 'delete',
315
+ payload: fileId,
316
+ },
317
+ user,
318
+ })
319
+ } catch {
320
+ // Best-effort rollback to avoid orphan file records when storage fails.
321
+ }
322
+ }
323
+ }
324
+
325
+ function toBuffer(chunk: unknown): Buffer {
326
+ if (Buffer.isBuffer(chunk)) {
327
+ return chunk
328
+ }
329
+ if (chunk instanceof Uint8Array) {
330
+ return Buffer.from(chunk)
331
+ }
332
+ return Buffer.from(String(chunk))
333
+ }
@@ -0,0 +1,37 @@
1
+ import type { MultipartFile } from '@fastify/multipart'
2
+ import type { FastifyRequestWithViewer } from '@authentication/auth.guard'
3
+
4
+ import type { FileId, FileViewModel } from '@types'
5
+
6
+ export const DEFAULT_MAX_SIZE_BYTES = 30 * 1024 * 1024
7
+
8
+ export type UploadStorageMode = 'none' | 'local' | 's3'
9
+
10
+ export type UploadAllowedFileType = 'image' | 'excel' | 'json' | 'csv' | 'pdf' | 'text'
11
+
12
+ export type UploadGuardOptions = {
13
+ maxFileSizeBytes?: number
14
+ allowedMimeTypes?: string[]
15
+ allowedFileExtensions?: string[]
16
+ allowedFileTypes?: UploadAllowedFileType[]
17
+ storageMode?: UploadStorageMode
18
+ }
19
+
20
+ export type UploadedFileDataPayload = {
21
+ file: FileViewModel
22
+ buffer: Buffer
23
+ filename: string
24
+ mimetype: string
25
+ size: number
26
+ fields: MultipartFile['fields']
27
+ }
28
+
29
+ export type UploadRequest = FastifyRequestWithViewer & {
30
+ uploadedFileData?: UploadedFileDataPayload
31
+ }
32
+
33
+ export type StoredUploadRecord = {
34
+ fileId: FileId
35
+ location: string
36
+ storageMode: UploadStorageMode
37
+ }
@@ -0,0 +1,12 @@
1
+ import { createParamDecorator, ExecutionContext } from '@nestjs/common'
2
+
3
+ import type { UploadRequest } from './upload.types'
4
+
5
+ export const UploadedFileData = createParamDecorator((_data: unknown, ctx: ExecutionContext) => {
6
+ const req = ctx.switchToHttp().getRequest<UploadRequest>()
7
+ if (!req.uploadedFileData) {
8
+ throw new Error('Decorator @UploadedFileData must be used with @UploadGuard')
9
+ }
10
+
11
+ return req.uploadedFileData
12
+ })
@@ -0,0 +1,10 @@
1
+ {
2
+ "extends": "../../tsconfig.json",
3
+ "compilerOptions": {
4
+ "declaration": true,
5
+ "declarationMap": true,
6
+ "outDir": "../../dist/libs/upload"
7
+ },
8
+ "include": ["src/**/*"],
9
+ "exclude": ["node_modules", "dist", "test", "**/*spec.ts"]
10
+ }
@@ -0,0 +1,16 @@
1
+ import * as Generator from '@postxl/generator';
2
+ import { WithActions } from '../backend-actions';
3
+ import { WithBackend } from '../backend-core';
4
+ import { WithView } from '../backend-view';
5
+ import { WithTypes } from '../types';
6
+ type ContextRequirements = WithTypes<WithActions<WithView<WithBackend<Generator.Context>>>>;
7
+ export type ContextResult = WithUpload<ContextRequirements>;
8
+ export type WithUpload<Context> = Generator.ExtendContext<Context, {
9
+ upload: UploadContext;
10
+ }>;
11
+ type UploadContext = {
12
+ module: Generator.ImportableClass;
13
+ };
14
+ export declare const generatorId: string & import("zod").$brand<"PXL.GeneratorInterfaceId">;
15
+ export declare const generator: Generator.GeneratorInterface;
16
+ export {};
@@ -0,0 +1,107 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ exports.generator = exports.generatorId = void 0;
37
+ const path = __importStar(require("node:path"));
38
+ const Generator = __importStar(require("@postxl/generator"));
39
+ const generator_1 = require("@postxl/generator");
40
+ const backend_actions_1 = require("../backend-actions");
41
+ const backend_core_1 = require("../backend-core");
42
+ const backend_s3_1 = require("../backend-s3");
43
+ const backend_view_1 = require("../backend-view");
44
+ const types_1 = require("../types");
45
+ exports.generatorId = Generator.toGeneratorInterfaceId('backend-upload');
46
+ exports.generator = {
47
+ id: exports.generatorId,
48
+ requires: [
49
+ backend_core_1.backendGeneratorId,
50
+ backend_actions_1.backendActionsGeneratorId,
51
+ backend_view_1.backendViewGeneratorId,
52
+ backend_s3_1.backendS3GeneratorId,
53
+ types_1.typesGeneratorId,
54
+ ],
55
+ register: (context) => {
56
+ const module = {
57
+ name: Generator.toClassName('UploadModule'),
58
+ location: Generator.toBackendModuleLocation('@upload/upload.module'),
59
+ };
60
+ const uploadModule = {
61
+ name: Generator.toBackendModuleName('upload'),
62
+ moduleClass: module,
63
+ apiModuleRegistration: {
64
+ code: (0, generator_1.ts)('UploadModule.forRoot(config.upload)'),
65
+ },
66
+ envConfig: {
67
+ dotEnvExample: `
68
+ # Upload storage mode: none | local | s3
69
+ UPLOAD_STORAGE=none
70
+ # Path for local uploads if UPLOAD_STORAGE=local
71
+ UPLOAD_LOCAL_DIRECTORY="./tmp/uploads"
72
+ # If true, upload binaries are not persisted to local disk/S3
73
+ UPLOAD_DRY_RUN=true
74
+ # Default max upload size in bytes (30 MB)
75
+ UPLOAD_MAX_SIZE_BYTES=31457280`,
76
+ decoder: (0, generator_1.ts)(`
77
+ UPLOAD_STORAGE: z.enum(['none', 'local', 's3']).optional().default('none'),
78
+ UPLOAD_LOCAL_DIRECTORY: z.string().optional().default('./tmp/uploads'),
79
+ UPLOAD_DRY_RUN: zEnvBoolean.optional().default(true),
80
+ UPLOAD_MAX_SIZE_BYTES: z.coerce.number().int().positive().optional().default(30 * 1024 * 1024),
81
+ `),
82
+ transformer: (0, generator_1.ts)(`
83
+ upload: {
84
+ storage: val.UPLOAD_STORAGE,
85
+ localDirectory: val.UPLOAD_LOCAL_DIRECTORY,
86
+ dryRun: val.UPLOAD_DRY_RUN,
87
+ maxSizeBytes: val.UPLOAD_MAX_SIZE_BYTES,
88
+ }`),
89
+ },
90
+ };
91
+ context.backend.modules.push(uploadModule);
92
+ context.backend.packageJson.dependencies.push({ packageName: '@fastify/multipart', version: '9.2.1' });
93
+ return {
94
+ ...context,
95
+ upload: { module },
96
+ };
97
+ },
98
+ generate: async (context) => {
99
+ const vfs = new Generator.VirtualFileSystem();
100
+ await vfs.loadFolder({
101
+ diskPath: path.resolve(__dirname, './template'),
102
+ });
103
+ context.vfs.insertFromVfs({ targetPath: '/backend/libs/upload', vfs });
104
+ return context;
105
+ },
106
+ };
107
+ //# sourceMappingURL=upload.generator.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"upload.generator.js","sourceRoot":"","sources":["../../src/backend-upload/upload.generator.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,gDAAiC;AAEjC,6DAA8C;AAC9C,iDAAsC;AAEtC,wDAA2E;AAC3E,kDAA+E;AAC/E,8CAAoD;AACpD,kDAAkE;AAClE,oCAAsD;AAUzC,QAAA,WAAW,GAAG,SAAS,CAAC,sBAAsB,CAAC,gBAAgB,CAAC,CAAA;AAEhE,QAAA,SAAS,GAAiC;IACrD,EAAE,EAAE,mBAAW;IACf,QAAQ,EAAE;QACR,iCAAkB;QAClB,2CAAyB;QACzB,qCAAsB;QACtB,iCAAoB;QACpB,wBAAgB;KACjB;IAED,QAAQ,EAAE,CAAsC,OAAgB,EAAiB,EAAE;QACjF,MAAM,MAAM,GAA8B;YACxC,IAAI,EAAE,SAAS,CAAC,WAAW,CAAC,cAAc,CAAC;YAC3C,QAAQ,EAAE,SAAS,CAAC,uBAAuB,CAAC,uBAAuB,CAAC;SACrE,CAAA;QAED,MAAM,YAAY,GAAiB;YACjC,IAAI,EAAE,SAAS,CAAC,mBAAmB,CAAC,QAAQ,CAAC;YAC7C,WAAW,EAAE,MAAM;YACnB,qBAAqB,EAAE;gBACrB,IAAI,EAAE,IAAA,cAAE,EAAC,qCAAqC,CAAC;aAChD;YACD,SAAS,EAAE;gBACT,aAAa,EAAE;;;;;;;;yCAQkB;gBACjC,OAAO,EAAE,IAAA,cAAE,EAAC;;;;;SAKX,CAAC;gBACF,WAAW,EAAE,IAAA,cAAE,EAAC;;;;;;YAMZ,CAAC;aACN;SACF,CAAA;QAED,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAA;QAC1C,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,YAAY,CAAC,IAAI,CAAC,EAAE,WAAW,EAAE,oBAAoB,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAA;QAEtG,OAAO;YACL,GAAG,OAAO;YACV,MAAM,EAAE,EAAE,MAAM,EAAE;SACnB,CAAA;IACH,CAAC;IAED,QAAQ,EAAE,KAAK,EAAiC,OAAgB,EAAoB,EAAE;QACpF,MAAM,GAAG,GAAG,IAAI,SAAS,CAAC,iBAAiB,EAAE,CAAA;QAE7C,MAAM,GAAG,CAAC,UAAU,CAAC;YACnB,QAAQ,EAAE,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,YAAY,CAAC;SAChD,CAAC,CAAA;QAEF,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE,UAAU,EAAE,sBAAsB,EAAE,GAAG,EAAE,CAAC,CAAA;QACtE,OAAO,OAAO,CAAA;IAChB,CAAC;CACF,CAAA"}
@@ -0,0 +1,65 @@
1
+ # Custom Commands for Claude Code
2
+
3
+ This directory contains custom command documentation that Claude Code can reference when assisting with development tasks.
4
+
5
+ ## Available Commands
6
+
7
+ ### 1. Run E2E Tests (`run-e2e-tests.md`)
8
+ Comprehensive instructions for running end-to-end tests using Playwright.
9
+
10
+ **Trigger phrases:**
11
+ - "run e2e tests"
12
+ - "run end-to-end tests"
13
+ - "execute e2e test suite"
14
+
15
+ ### 2. Fix CI (`fix-ci.md`)
16
+ Guidance for troubleshooting and fixing CI/CD pipeline issues.
17
+
18
+ **Trigger phrases:**
19
+ - "fix ci"
20
+ - "help with continuous integration"
21
+
22
+ ### 3. Browser Observe (`browser-observe.md`)
23
+ Instructions for browser-based testing and observation workflows.
24
+
25
+ **Trigger phrases:**
26
+ - "observe browser"
27
+ - "start browser observation"
28
+
29
+ ### 4. Prepare E2E Tests (`prepare-e2e-tests.md`)
30
+ Analyzes code changes, determines needed E2E test coverage, writes test specs, and runs them to verify they pass.
31
+
32
+ **Trigger phrases:**
33
+ - "prepare e2e tests"
34
+ - "write e2e tests for my changes"
35
+ - "add e2e test coverage"
36
+
37
+ ## How It Works
38
+
39
+ When you ask Claude to perform one of these tasks, it will:
40
+ 1. Automatically detect the request based on your natural language
41
+ 2. Reference the appropriate command file
42
+ 3. Follow the detailed instructions in that file
43
+ 4. Execute the necessary steps to complete the task
44
+
45
+ ## Adding New Commands
46
+
47
+ To add a new custom command:
48
+
49
+ 1. Create a new markdown file in this directory with a descriptive name (e.g., `deploy-staging.md`)
50
+ 2. Write clear, detailed instructions including:
51
+ - Overview of the task
52
+ - Prerequisites
53
+ - Step-by-step instructions
54
+ - Common issues and troubleshooting
55
+ - Examples
56
+ 3. Update this README to list the new command
57
+ 5. Commit all files to version control
58
+
59
+ ## Best Practices
60
+
61
+ - **Be specific**: Include exact commands, file paths, and configuration details
62
+ - **Include context**: Explain why certain steps are necessary
63
+ - **Add troubleshooting**: Document common errors and solutions
64
+ - **Use examples**: Show concrete examples of usage
65
+ - **Keep updated**: Review and update commands as the project evolves
@@ -4,6 +4,8 @@
4
4
  "Read CLAUDE.md for project-specific guidance before making changes",
5
5
  "This is a PostXL-generated project - see the main PostXL monorepo CLAUDE.md for full documentation",
6
6
  "Always run type checks after code changes: pnpm run test:types",
7
- "Use @postxl/ui-components for all UI elements - never create custom generic UI primitives"
7
+ "Use @postxl/ui-components for all UI elements - never create custom generic UI primitives",
8
+ "When asked to 'run e2e tests', 'run end-to-end tests', or 'execute e2e': ALWAYS read the detailed instructions from .claude/commands/run-e2e-tests.md first, then run the automated script at scripts/e2e.sh from the project root",
9
+ "When significant frontend or backend changes are made (new pages, routes, forms, tables, or API endpoints), proactively suggest running /prepare-e2e-tests to ensure E2E test coverage"
8
10
  ]
9
11
  }
@@ -0,0 +1,22 @@
1
+ {
2
+ "prompts": [
3
+ {
4
+ "name": "Run E2E Tests",
5
+ "description": "Run end-to-end tests using Playwright",
6
+ "path": "../.claude/commands/run-e2e-tests.md",
7
+ "tags": ["testing", "e2e", "playwright"]
8
+ },
9
+ {
10
+ "name": "Fix CI",
11
+ "description": "Help fix CI/CD pipeline issues",
12
+ "path": "../.claude/commands/fix-ci.md",
13
+ "tags": ["ci", "devops"]
14
+ },
15
+ {
16
+ "name": "Browser Observe",
17
+ "description": "Browser testing and observation workflows",
18
+ "path": "../.claude/commands/browser-observe.md",
19
+ "tags": ["testing", "browser"]
20
+ }
21
+ ]
22
+ }
@@ -54,7 +54,22 @@ exports.generator = {
54
54
  },
55
55
  generate: async (context) => {
56
56
  const vfs = new Generator.VirtualFileSystem();
57
- const templateContext = { schema: context.schema };
57
+ const { projectType, slug } = context.schema;
58
+ const isWorkspace = projectType === 'workspace';
59
+ // Template context with project-type-aware values
60
+ const templateContext = {
61
+ schema: context.schema,
62
+ // For e2e.sh
63
+ monorepoRootExpr: isWorkspace ? '"$(cd "$PROJECT_DIR/../.." && pwd)"' : '"$PROJECT_DIR"',
64
+ dockerWorkDir: isWorkspace ? `/pxl/projects/${slug}/e2e` : '/pxl/e2e',
65
+ nodeShimsCleanup: isWorkspace
66
+ ? 'rm -f /pxl/node_modules/.bin/node /pxl/projects/*/node_modules/.bin/node 2>/dev/null'
67
+ : 'rm -f /pxl/node_modules/.bin/node 2>/dev/null',
68
+ // For run-e2e-tests.md
69
+ cdProjectDir: isWorkspace ? `cd projects/${slug}\n` : '',
70
+ backendPath: isWorkspace ? `projects/${slug}/backend` : 'backend',
71
+ frontendPath: isWorkspace ? `projects/${slug}/frontend` : 'frontend',
72
+ };
58
73
  // Load e2e folder, excluding files that need template substitution
59
74
  await vfs.loadFolder({
60
75
  diskPath: path.resolve(__dirname, './template/e2e'),
@@ -70,6 +85,33 @@ exports.generator = {
70
85
  throw new Error(`Failed to generate package.json: ${packageJsonContent.unwrapErr().message}`);
71
86
  }
72
87
  vfs.write(`/e2e/${PACKAGE_JSON_FILENAME}`, packageJsonContent.unwrap());
88
+ // Generate e2e.sh with project-type-aware paths
89
+ const e2eShContent = await Generator.generateFromTemplate({
90
+ file: path.resolve(__dirname, './template/scripts/e2e.sh'),
91
+ context: templateContext,
92
+ });
93
+ if (e2eShContent.isErr()) {
94
+ throw new Error(`Failed to generate e2e.sh: ${e2eShContent.unwrapErr().message}`);
95
+ }
96
+ vfs.write('/scripts/e2e.sh', e2eShContent.unwrap());
97
+ // Generate run-e2e-tests.md with project-type-aware paths
98
+ const runE2eTestsMdContent = await Generator.generateFromTemplate({
99
+ file: path.resolve(__dirname, './template/.claude/commands/run-e2e-tests.md'),
100
+ context: templateContext,
101
+ });
102
+ if (runE2eTestsMdContent.isErr()) {
103
+ throw new Error(`Failed to generate run-e2e-tests.md: ${runE2eTestsMdContent.unwrapErr().message}`);
104
+ }
105
+ vfs.write('/.claude/commands/run-e2e-tests.md', runE2eTestsMdContent.unwrap());
106
+ // Generate prepare-e2e-tests.md with project-type-aware paths
107
+ const prepareE2eTestsMdContent = await Generator.generateFromTemplate({
108
+ file: path.resolve(__dirname, './template/.claude/commands/prepare-e2e-tests.md'),
109
+ context: templateContext,
110
+ });
111
+ if (prepareE2eTestsMdContent.isErr()) {
112
+ throw new Error(`Failed to generate prepare-e2e-tests.md: ${prepareE2eTestsMdContent.unwrapErr().message}`);
113
+ }
114
+ vfs.write('/.claude/commands/prepare-e2e-tests.md', prepareE2eTestsMdContent.unwrap());
73
115
  // write dynamic files
74
116
  vfs.write('/e2e/support/model-test-ids.ts', (0, model_test_id_generator_1.generateModelTestIds)(context.e2e));
75
117
  vfs.write('/scripts/docker.sh', (0, docker_sh_generator_1.generateDockerSh)(context));