alepha 0.11.10 → 0.11.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -0
- package/dist/index.cjs +1692 -408
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +1622 -338
- package/dist/index.js.map +1 -1
- package/package.json +12 -6
- package/src/assets/tsconfigJson.ts +1 -0
- package/src/commands/BiomeCommands.ts +5 -24
- package/src/commands/CoreCommands.ts +41 -138
- package/src/commands/DrizzleCommands.ts +18 -193
- package/src/commands/VerifyCommands.ts +2 -6
- package/src/commands/ViteCommands.ts +24 -57
- package/src/services/ProjectUtils.ts +508 -0
package/dist/index.js
CHANGED
|
@@ -5,15 +5,16 @@ import { AsyncLocalStorage } from "node:async_hooks";
|
|
|
5
5
|
import cluster from "node:cluster";
|
|
6
6
|
import { cpus } from "node:os";
|
|
7
7
|
import * as fs from "node:fs/promises";
|
|
8
|
-
import { access, cp, glob, mkdir, readFile, rm, writeFile } from "node:fs/promises";
|
|
8
|
+
import { access, copyFile, cp, glob, mkdir, readFile, readdir, rename, rm, stat, writeFile } from "node:fs/promises";
|
|
9
9
|
import { join } from "node:path";
|
|
10
10
|
import { stdin, stdout } from "node:process";
|
|
11
11
|
import { createInterface } from "node:readline/promises";
|
|
12
12
|
import { exec, spawn } from "node:child_process";
|
|
13
|
-
import { Readable } from "node:stream";
|
|
14
|
-
import { pipeline } from "stream/promises";
|
|
13
|
+
import { PassThrough, Readable } from "node:stream";
|
|
14
|
+
import { pipeline } from "node:stream/promises";
|
|
15
|
+
import { createReadStream, readFileSync } from "node:fs";
|
|
16
|
+
import { fileURLToPath } from "node:url";
|
|
15
17
|
import * as tar from "tar";
|
|
16
|
-
import { readFileSync } from "node:fs";
|
|
17
18
|
import { tsImport } from "tsx/esm/api";
|
|
18
19
|
|
|
19
20
|
//#region rolldown:runtime
|
|
@@ -14796,7 +14797,8 @@ var CliProvider = class {
|
|
|
14796
14797
|
run: runner.run,
|
|
14797
14798
|
ask: this.asker.ask,
|
|
14798
14799
|
fs,
|
|
14799
|
-
glob
|
|
14800
|
+
glob,
|
|
14801
|
+
root: process.cwd()
|
|
14800
14802
|
};
|
|
14801
14803
|
await command.options.handler(args);
|
|
14802
14804
|
if (command.options.summary !== false) runner.summary();
|
|
@@ -15007,42 +15009,6 @@ const AlephaCommand = $module({
|
|
|
15007
15009
|
]
|
|
15008
15010
|
});
|
|
15009
15011
|
|
|
15010
|
-
//#endregion
|
|
15011
|
-
//#region src/assets/biomeJson.ts
|
|
15012
|
-
const biomeJson = `
|
|
15013
|
-
{
|
|
15014
|
-
"$schema": "https://biomejs.dev/schemas/latest/schema.json",
|
|
15015
|
-
"vcs": {
|
|
15016
|
-
"enabled": true,
|
|
15017
|
-
"clientKind": "git"
|
|
15018
|
-
},
|
|
15019
|
-
"files": {
|
|
15020
|
-
"ignoreUnknown": true,
|
|
15021
|
-
"includes": ["**", "!node_modules", "!dist"]
|
|
15022
|
-
},
|
|
15023
|
-
"formatter": {
|
|
15024
|
-
"enabled": true,
|
|
15025
|
-
"indentStyle": "space"
|
|
15026
|
-
},
|
|
15027
|
-
"linter": {
|
|
15028
|
-
"enabled": true,
|
|
15029
|
-
"rules": {
|
|
15030
|
-
"recommended": true
|
|
15031
|
-
},
|
|
15032
|
-
"domains": {
|
|
15033
|
-
"react": "recommended"
|
|
15034
|
-
}
|
|
15035
|
-
},
|
|
15036
|
-
"assist": {
|
|
15037
|
-
"actions": {
|
|
15038
|
-
"source": {
|
|
15039
|
-
"organizeImports": "on"
|
|
15040
|
-
}
|
|
15041
|
-
}
|
|
15042
|
-
}
|
|
15043
|
-
}
|
|
15044
|
-
`.trim();
|
|
15045
|
-
|
|
15046
15012
|
//#endregion
|
|
15047
15013
|
//#region src/services/ProcessRunner.ts
|
|
15048
15014
|
/**
|
|
@@ -15111,18 +15077,1567 @@ var ProcessRunner = class {
|
|
|
15111
15077
|
}
|
|
15112
15078
|
};
|
|
15113
15079
|
|
|
15080
|
+
//#endregion
|
|
15081
|
+
//#region ../file/src/providers/FileSystemProvider.ts
|
|
15082
|
+
/**
|
|
15083
|
+
* FileSystem interface providing utilities for working with files.
|
|
15084
|
+
*/
|
|
15085
|
+
var FileSystemProvider = class {};
|
|
15086
|
+
|
|
15087
|
+
//#endregion
|
|
15088
|
+
//#region ../file/src/services/FileDetector.ts
|
|
15089
|
+
/**
|
|
15090
|
+
* Service for detecting file types and getting content types.
|
|
15091
|
+
*
|
|
15092
|
+
* @example
|
|
15093
|
+
* ```typescript
|
|
15094
|
+
* const detector = alepha.inject(FileDetector);
|
|
15095
|
+
*
|
|
15096
|
+
* // Get content type from filename
|
|
15097
|
+
* const mimeType = detector.getContentType("image.png"); // "image/png"
|
|
15098
|
+
*
|
|
15099
|
+
* // Detect file type by magic bytes
|
|
15100
|
+
* const stream = createReadStream('image.png');
|
|
15101
|
+
* const result = await detector.detectFileType(stream, 'image.png');
|
|
15102
|
+
* console.log(result.mimeType); // 'image/png'
|
|
15103
|
+
* console.log(result.verified); // true if magic bytes match
|
|
15104
|
+
* ```
|
|
15105
|
+
*/
|
|
15106
|
+
var FileDetector = class FileDetector {
|
|
15107
|
+
/**
|
|
15108
|
+
* Magic byte signatures for common file formats.
|
|
15109
|
+
* Each signature is represented as an array of bytes or null (wildcard).
|
|
15110
|
+
*/
|
|
15111
|
+
static MAGIC_BYTES = {
|
|
15112
|
+
png: [{
|
|
15113
|
+
signature: [
|
|
15114
|
+
137,
|
|
15115
|
+
80,
|
|
15116
|
+
78,
|
|
15117
|
+
71,
|
|
15118
|
+
13,
|
|
15119
|
+
10,
|
|
15120
|
+
26,
|
|
15121
|
+
10
|
|
15122
|
+
],
|
|
15123
|
+
mimeType: "image/png"
|
|
15124
|
+
}],
|
|
15125
|
+
jpg: [
|
|
15126
|
+
{
|
|
15127
|
+
signature: [
|
|
15128
|
+
255,
|
|
15129
|
+
216,
|
|
15130
|
+
255,
|
|
15131
|
+
224
|
|
15132
|
+
],
|
|
15133
|
+
mimeType: "image/jpeg"
|
|
15134
|
+
},
|
|
15135
|
+
{
|
|
15136
|
+
signature: [
|
|
15137
|
+
255,
|
|
15138
|
+
216,
|
|
15139
|
+
255,
|
|
15140
|
+
225
|
|
15141
|
+
],
|
|
15142
|
+
mimeType: "image/jpeg"
|
|
15143
|
+
},
|
|
15144
|
+
{
|
|
15145
|
+
signature: [
|
|
15146
|
+
255,
|
|
15147
|
+
216,
|
|
15148
|
+
255,
|
|
15149
|
+
226
|
|
15150
|
+
],
|
|
15151
|
+
mimeType: "image/jpeg"
|
|
15152
|
+
},
|
|
15153
|
+
{
|
|
15154
|
+
signature: [
|
|
15155
|
+
255,
|
|
15156
|
+
216,
|
|
15157
|
+
255,
|
|
15158
|
+
227
|
|
15159
|
+
],
|
|
15160
|
+
mimeType: "image/jpeg"
|
|
15161
|
+
},
|
|
15162
|
+
{
|
|
15163
|
+
signature: [
|
|
15164
|
+
255,
|
|
15165
|
+
216,
|
|
15166
|
+
255,
|
|
15167
|
+
232
|
|
15168
|
+
],
|
|
15169
|
+
mimeType: "image/jpeg"
|
|
15170
|
+
}
|
|
15171
|
+
],
|
|
15172
|
+
jpeg: [
|
|
15173
|
+
{
|
|
15174
|
+
signature: [
|
|
15175
|
+
255,
|
|
15176
|
+
216,
|
|
15177
|
+
255,
|
|
15178
|
+
224
|
|
15179
|
+
],
|
|
15180
|
+
mimeType: "image/jpeg"
|
|
15181
|
+
},
|
|
15182
|
+
{
|
|
15183
|
+
signature: [
|
|
15184
|
+
255,
|
|
15185
|
+
216,
|
|
15186
|
+
255,
|
|
15187
|
+
225
|
|
15188
|
+
],
|
|
15189
|
+
mimeType: "image/jpeg"
|
|
15190
|
+
},
|
|
15191
|
+
{
|
|
15192
|
+
signature: [
|
|
15193
|
+
255,
|
|
15194
|
+
216,
|
|
15195
|
+
255,
|
|
15196
|
+
226
|
|
15197
|
+
],
|
|
15198
|
+
mimeType: "image/jpeg"
|
|
15199
|
+
},
|
|
15200
|
+
{
|
|
15201
|
+
signature: [
|
|
15202
|
+
255,
|
|
15203
|
+
216,
|
|
15204
|
+
255,
|
|
15205
|
+
227
|
|
15206
|
+
],
|
|
15207
|
+
mimeType: "image/jpeg"
|
|
15208
|
+
},
|
|
15209
|
+
{
|
|
15210
|
+
signature: [
|
|
15211
|
+
255,
|
|
15212
|
+
216,
|
|
15213
|
+
255,
|
|
15214
|
+
232
|
|
15215
|
+
],
|
|
15216
|
+
mimeType: "image/jpeg"
|
|
15217
|
+
}
|
|
15218
|
+
],
|
|
15219
|
+
gif: [{
|
|
15220
|
+
signature: [
|
|
15221
|
+
71,
|
|
15222
|
+
73,
|
|
15223
|
+
70,
|
|
15224
|
+
56,
|
|
15225
|
+
55,
|
|
15226
|
+
97
|
|
15227
|
+
],
|
|
15228
|
+
mimeType: "image/gif"
|
|
15229
|
+
}, {
|
|
15230
|
+
signature: [
|
|
15231
|
+
71,
|
|
15232
|
+
73,
|
|
15233
|
+
70,
|
|
15234
|
+
56,
|
|
15235
|
+
57,
|
|
15236
|
+
97
|
|
15237
|
+
],
|
|
15238
|
+
mimeType: "image/gif"
|
|
15239
|
+
}],
|
|
15240
|
+
webp: [{
|
|
15241
|
+
signature: [
|
|
15242
|
+
82,
|
|
15243
|
+
73,
|
|
15244
|
+
70,
|
|
15245
|
+
70,
|
|
15246
|
+
null,
|
|
15247
|
+
null,
|
|
15248
|
+
null,
|
|
15249
|
+
null,
|
|
15250
|
+
87,
|
|
15251
|
+
69,
|
|
15252
|
+
66,
|
|
15253
|
+
80
|
|
15254
|
+
],
|
|
15255
|
+
mimeType: "image/webp"
|
|
15256
|
+
}],
|
|
15257
|
+
bmp: [{
|
|
15258
|
+
signature: [66, 77],
|
|
15259
|
+
mimeType: "image/bmp"
|
|
15260
|
+
}],
|
|
15261
|
+
ico: [{
|
|
15262
|
+
signature: [
|
|
15263
|
+
0,
|
|
15264
|
+
0,
|
|
15265
|
+
1,
|
|
15266
|
+
0
|
|
15267
|
+
],
|
|
15268
|
+
mimeType: "image/x-icon"
|
|
15269
|
+
}],
|
|
15270
|
+
tiff: [{
|
|
15271
|
+
signature: [
|
|
15272
|
+
73,
|
|
15273
|
+
73,
|
|
15274
|
+
42,
|
|
15275
|
+
0
|
|
15276
|
+
],
|
|
15277
|
+
mimeType: "image/tiff"
|
|
15278
|
+
}, {
|
|
15279
|
+
signature: [
|
|
15280
|
+
77,
|
|
15281
|
+
77,
|
|
15282
|
+
0,
|
|
15283
|
+
42
|
|
15284
|
+
],
|
|
15285
|
+
mimeType: "image/tiff"
|
|
15286
|
+
}],
|
|
15287
|
+
tif: [{
|
|
15288
|
+
signature: [
|
|
15289
|
+
73,
|
|
15290
|
+
73,
|
|
15291
|
+
42,
|
|
15292
|
+
0
|
|
15293
|
+
],
|
|
15294
|
+
mimeType: "image/tiff"
|
|
15295
|
+
}, {
|
|
15296
|
+
signature: [
|
|
15297
|
+
77,
|
|
15298
|
+
77,
|
|
15299
|
+
0,
|
|
15300
|
+
42
|
|
15301
|
+
],
|
|
15302
|
+
mimeType: "image/tiff"
|
|
15303
|
+
}],
|
|
15304
|
+
pdf: [{
|
|
15305
|
+
signature: [
|
|
15306
|
+
37,
|
|
15307
|
+
80,
|
|
15308
|
+
68,
|
|
15309
|
+
70,
|
|
15310
|
+
45
|
|
15311
|
+
],
|
|
15312
|
+
mimeType: "application/pdf"
|
|
15313
|
+
}],
|
|
15314
|
+
zip: [
|
|
15315
|
+
{
|
|
15316
|
+
signature: [
|
|
15317
|
+
80,
|
|
15318
|
+
75,
|
|
15319
|
+
3,
|
|
15320
|
+
4
|
|
15321
|
+
],
|
|
15322
|
+
mimeType: "application/zip"
|
|
15323
|
+
},
|
|
15324
|
+
{
|
|
15325
|
+
signature: [
|
|
15326
|
+
80,
|
|
15327
|
+
75,
|
|
15328
|
+
5,
|
|
15329
|
+
6
|
|
15330
|
+
],
|
|
15331
|
+
mimeType: "application/zip"
|
|
15332
|
+
},
|
|
15333
|
+
{
|
|
15334
|
+
signature: [
|
|
15335
|
+
80,
|
|
15336
|
+
75,
|
|
15337
|
+
7,
|
|
15338
|
+
8
|
|
15339
|
+
],
|
|
15340
|
+
mimeType: "application/zip"
|
|
15341
|
+
}
|
|
15342
|
+
],
|
|
15343
|
+
rar: [{
|
|
15344
|
+
signature: [
|
|
15345
|
+
82,
|
|
15346
|
+
97,
|
|
15347
|
+
114,
|
|
15348
|
+
33,
|
|
15349
|
+
26,
|
|
15350
|
+
7
|
|
15351
|
+
],
|
|
15352
|
+
mimeType: "application/vnd.rar"
|
|
15353
|
+
}],
|
|
15354
|
+
"7z": [{
|
|
15355
|
+
signature: [
|
|
15356
|
+
55,
|
|
15357
|
+
122,
|
|
15358
|
+
188,
|
|
15359
|
+
175,
|
|
15360
|
+
39,
|
|
15361
|
+
28
|
|
15362
|
+
],
|
|
15363
|
+
mimeType: "application/x-7z-compressed"
|
|
15364
|
+
}],
|
|
15365
|
+
tar: [{
|
|
15366
|
+
signature: [
|
|
15367
|
+
117,
|
|
15368
|
+
115,
|
|
15369
|
+
116,
|
|
15370
|
+
97,
|
|
15371
|
+
114
|
|
15372
|
+
],
|
|
15373
|
+
mimeType: "application/x-tar"
|
|
15374
|
+
}],
|
|
15375
|
+
gz: [{
|
|
15376
|
+
signature: [31, 139],
|
|
15377
|
+
mimeType: "application/gzip"
|
|
15378
|
+
}],
|
|
15379
|
+
tgz: [{
|
|
15380
|
+
signature: [31, 139],
|
|
15381
|
+
mimeType: "application/gzip"
|
|
15382
|
+
}],
|
|
15383
|
+
mp3: [
|
|
15384
|
+
{
|
|
15385
|
+
signature: [255, 251],
|
|
15386
|
+
mimeType: "audio/mpeg"
|
|
15387
|
+
},
|
|
15388
|
+
{
|
|
15389
|
+
signature: [255, 243],
|
|
15390
|
+
mimeType: "audio/mpeg"
|
|
15391
|
+
},
|
|
15392
|
+
{
|
|
15393
|
+
signature: [255, 242],
|
|
15394
|
+
mimeType: "audio/mpeg"
|
|
15395
|
+
},
|
|
15396
|
+
{
|
|
15397
|
+
signature: [
|
|
15398
|
+
73,
|
|
15399
|
+
68,
|
|
15400
|
+
51
|
|
15401
|
+
],
|
|
15402
|
+
mimeType: "audio/mpeg"
|
|
15403
|
+
}
|
|
15404
|
+
],
|
|
15405
|
+
wav: [{
|
|
15406
|
+
signature: [
|
|
15407
|
+
82,
|
|
15408
|
+
73,
|
|
15409
|
+
70,
|
|
15410
|
+
70,
|
|
15411
|
+
null,
|
|
15412
|
+
null,
|
|
15413
|
+
null,
|
|
15414
|
+
null,
|
|
15415
|
+
87,
|
|
15416
|
+
65,
|
|
15417
|
+
86,
|
|
15418
|
+
69
|
|
15419
|
+
],
|
|
15420
|
+
mimeType: "audio/wav"
|
|
15421
|
+
}],
|
|
15422
|
+
ogg: [{
|
|
15423
|
+
signature: [
|
|
15424
|
+
79,
|
|
15425
|
+
103,
|
|
15426
|
+
103,
|
|
15427
|
+
83
|
|
15428
|
+
],
|
|
15429
|
+
mimeType: "audio/ogg"
|
|
15430
|
+
}],
|
|
15431
|
+
flac: [{
|
|
15432
|
+
signature: [
|
|
15433
|
+
102,
|
|
15434
|
+
76,
|
|
15435
|
+
97,
|
|
15436
|
+
67
|
|
15437
|
+
],
|
|
15438
|
+
mimeType: "audio/flac"
|
|
15439
|
+
}],
|
|
15440
|
+
mp4: [
|
|
15441
|
+
{
|
|
15442
|
+
signature: [
|
|
15443
|
+
null,
|
|
15444
|
+
null,
|
|
15445
|
+
null,
|
|
15446
|
+
null,
|
|
15447
|
+
102,
|
|
15448
|
+
116,
|
|
15449
|
+
121,
|
|
15450
|
+
112
|
|
15451
|
+
],
|
|
15452
|
+
mimeType: "video/mp4"
|
|
15453
|
+
},
|
|
15454
|
+
{
|
|
15455
|
+
signature: [
|
|
15456
|
+
null,
|
|
15457
|
+
null,
|
|
15458
|
+
null,
|
|
15459
|
+
null,
|
|
15460
|
+
102,
|
|
15461
|
+
116,
|
|
15462
|
+
121,
|
|
15463
|
+
112,
|
|
15464
|
+
105,
|
|
15465
|
+
115,
|
|
15466
|
+
111,
|
|
15467
|
+
109
|
|
15468
|
+
],
|
|
15469
|
+
mimeType: "video/mp4"
|
|
15470
|
+
},
|
|
15471
|
+
{
|
|
15472
|
+
signature: [
|
|
15473
|
+
null,
|
|
15474
|
+
null,
|
|
15475
|
+
null,
|
|
15476
|
+
null,
|
|
15477
|
+
102,
|
|
15478
|
+
116,
|
|
15479
|
+
121,
|
|
15480
|
+
112,
|
|
15481
|
+
109,
|
|
15482
|
+
112,
|
|
15483
|
+
52,
|
|
15484
|
+
50
|
|
15485
|
+
],
|
|
15486
|
+
mimeType: "video/mp4"
|
|
15487
|
+
}
|
|
15488
|
+
],
|
|
15489
|
+
webm: [{
|
|
15490
|
+
signature: [
|
|
15491
|
+
26,
|
|
15492
|
+
69,
|
|
15493
|
+
223,
|
|
15494
|
+
163
|
|
15495
|
+
],
|
|
15496
|
+
mimeType: "video/webm"
|
|
15497
|
+
}],
|
|
15498
|
+
avi: [{
|
|
15499
|
+
signature: [
|
|
15500
|
+
82,
|
|
15501
|
+
73,
|
|
15502
|
+
70,
|
|
15503
|
+
70,
|
|
15504
|
+
null,
|
|
15505
|
+
null,
|
|
15506
|
+
null,
|
|
15507
|
+
null,
|
|
15508
|
+
65,
|
|
15509
|
+
86,
|
|
15510
|
+
73,
|
|
15511
|
+
32
|
|
15512
|
+
],
|
|
15513
|
+
mimeType: "video/x-msvideo"
|
|
15514
|
+
}],
|
|
15515
|
+
mov: [{
|
|
15516
|
+
signature: [
|
|
15517
|
+
null,
|
|
15518
|
+
null,
|
|
15519
|
+
null,
|
|
15520
|
+
null,
|
|
15521
|
+
102,
|
|
15522
|
+
116,
|
|
15523
|
+
121,
|
|
15524
|
+
112,
|
|
15525
|
+
113,
|
|
15526
|
+
116,
|
|
15527
|
+
32,
|
|
15528
|
+
32
|
|
15529
|
+
],
|
|
15530
|
+
mimeType: "video/quicktime"
|
|
15531
|
+
}],
|
|
15532
|
+
mkv: [{
|
|
15533
|
+
signature: [
|
|
15534
|
+
26,
|
|
15535
|
+
69,
|
|
15536
|
+
223,
|
|
15537
|
+
163
|
|
15538
|
+
],
|
|
15539
|
+
mimeType: "video/x-matroska"
|
|
15540
|
+
}],
|
|
15541
|
+
docx: [{
|
|
15542
|
+
signature: [
|
|
15543
|
+
80,
|
|
15544
|
+
75,
|
|
15545
|
+
3,
|
|
15546
|
+
4
|
|
15547
|
+
],
|
|
15548
|
+
mimeType: "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
|
15549
|
+
}],
|
|
15550
|
+
xlsx: [{
|
|
15551
|
+
signature: [
|
|
15552
|
+
80,
|
|
15553
|
+
75,
|
|
15554
|
+
3,
|
|
15555
|
+
4
|
|
15556
|
+
],
|
|
15557
|
+
mimeType: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
|
15558
|
+
}],
|
|
15559
|
+
pptx: [{
|
|
15560
|
+
signature: [
|
|
15561
|
+
80,
|
|
15562
|
+
75,
|
|
15563
|
+
3,
|
|
15564
|
+
4
|
|
15565
|
+
],
|
|
15566
|
+
mimeType: "application/vnd.openxmlformats-officedocument.presentationml.presentation"
|
|
15567
|
+
}],
|
|
15568
|
+
doc: [{
|
|
15569
|
+
signature: [
|
|
15570
|
+
208,
|
|
15571
|
+
207,
|
|
15572
|
+
17,
|
|
15573
|
+
224,
|
|
15574
|
+
161,
|
|
15575
|
+
177,
|
|
15576
|
+
26,
|
|
15577
|
+
225
|
|
15578
|
+
],
|
|
15579
|
+
mimeType: "application/msword"
|
|
15580
|
+
}],
|
|
15581
|
+
xls: [{
|
|
15582
|
+
signature: [
|
|
15583
|
+
208,
|
|
15584
|
+
207,
|
|
15585
|
+
17,
|
|
15586
|
+
224,
|
|
15587
|
+
161,
|
|
15588
|
+
177,
|
|
15589
|
+
26,
|
|
15590
|
+
225
|
|
15591
|
+
],
|
|
15592
|
+
mimeType: "application/vnd.ms-excel"
|
|
15593
|
+
}],
|
|
15594
|
+
ppt: [{
|
|
15595
|
+
signature: [
|
|
15596
|
+
208,
|
|
15597
|
+
207,
|
|
15598
|
+
17,
|
|
15599
|
+
224,
|
|
15600
|
+
161,
|
|
15601
|
+
177,
|
|
15602
|
+
26,
|
|
15603
|
+
225
|
|
15604
|
+
],
|
|
15605
|
+
mimeType: "application/vnd.ms-powerpoint"
|
|
15606
|
+
}]
|
|
15607
|
+
};
|
|
15608
|
+
/**
|
|
15609
|
+
* All possible format signatures for checking against actual file content
|
|
15610
|
+
*/
|
|
15611
|
+
static ALL_SIGNATURES = Object.entries(FileDetector.MAGIC_BYTES).flatMap(([ext, signatures]) => signatures.map((sig) => ({
|
|
15612
|
+
ext,
|
|
15613
|
+
...sig
|
|
15614
|
+
})));
|
|
15615
|
+
/**
|
|
15616
|
+
* MIME type map for file extensions.
|
|
15617
|
+
*
|
|
15618
|
+
* Can be used to get the content type of file based on its extension.
|
|
15619
|
+
* Feel free to add more mime types in your project!
|
|
15620
|
+
*/
|
|
15621
|
+
static mimeMap = {
|
|
15622
|
+
json: "application/json",
|
|
15623
|
+
txt: "text/plain",
|
|
15624
|
+
html: "text/html",
|
|
15625
|
+
htm: "text/html",
|
|
15626
|
+
xml: "application/xml",
|
|
15627
|
+
csv: "text/csv",
|
|
15628
|
+
pdf: "application/pdf",
|
|
15629
|
+
md: "text/markdown",
|
|
15630
|
+
markdown: "text/markdown",
|
|
15631
|
+
rtf: "application/rtf",
|
|
15632
|
+
css: "text/css",
|
|
15633
|
+
js: "application/javascript",
|
|
15634
|
+
mjs: "application/javascript",
|
|
15635
|
+
ts: "application/typescript",
|
|
15636
|
+
jsx: "text/jsx",
|
|
15637
|
+
tsx: "text/tsx",
|
|
15638
|
+
zip: "application/zip",
|
|
15639
|
+
rar: "application/vnd.rar",
|
|
15640
|
+
"7z": "application/x-7z-compressed",
|
|
15641
|
+
tar: "application/x-tar",
|
|
15642
|
+
gz: "application/gzip",
|
|
15643
|
+
tgz: "application/gzip",
|
|
15644
|
+
png: "image/png",
|
|
15645
|
+
jpg: "image/jpeg",
|
|
15646
|
+
jpeg: "image/jpeg",
|
|
15647
|
+
gif: "image/gif",
|
|
15648
|
+
webp: "image/webp",
|
|
15649
|
+
svg: "image/svg+xml",
|
|
15650
|
+
bmp: "image/bmp",
|
|
15651
|
+
ico: "image/x-icon",
|
|
15652
|
+
tiff: "image/tiff",
|
|
15653
|
+
tif: "image/tiff",
|
|
15654
|
+
mp3: "audio/mpeg",
|
|
15655
|
+
wav: "audio/wav",
|
|
15656
|
+
ogg: "audio/ogg",
|
|
15657
|
+
m4a: "audio/mp4",
|
|
15658
|
+
aac: "audio/aac",
|
|
15659
|
+
flac: "audio/flac",
|
|
15660
|
+
mp4: "video/mp4",
|
|
15661
|
+
webm: "video/webm",
|
|
15662
|
+
avi: "video/x-msvideo",
|
|
15663
|
+
mov: "video/quicktime",
|
|
15664
|
+
wmv: "video/x-ms-wmv",
|
|
15665
|
+
flv: "video/x-flv",
|
|
15666
|
+
mkv: "video/x-matroska",
|
|
15667
|
+
doc: "application/msword",
|
|
15668
|
+
docx: "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
|
15669
|
+
xls: "application/vnd.ms-excel",
|
|
15670
|
+
xlsx: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
|
15671
|
+
ppt: "application/vnd.ms-powerpoint",
|
|
15672
|
+
pptx: "application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
|
15673
|
+
woff: "font/woff",
|
|
15674
|
+
woff2: "font/woff2",
|
|
15675
|
+
ttf: "font/ttf",
|
|
15676
|
+
otf: "font/otf",
|
|
15677
|
+
eot: "application/vnd.ms-fontobject"
|
|
15678
|
+
};
|
|
15679
|
+
/**
|
|
15680
|
+
* Returns the content type of file based on its filename.
|
|
15681
|
+
*
|
|
15682
|
+
* @param filename - The filename to check
|
|
15683
|
+
* @returns The MIME type
|
|
15684
|
+
*
|
|
15685
|
+
* @example
|
|
15686
|
+
* ```typescript
|
|
15687
|
+
* const detector = alepha.inject(FileDetector);
|
|
15688
|
+
* const mimeType = detector.getContentType("image.png"); // "image/png"
|
|
15689
|
+
* ```
|
|
15690
|
+
*/
|
|
15691
|
+
getContentType(filename) {
|
|
15692
|
+
const ext = filename.toLowerCase().split(".").pop() || "";
|
|
15693
|
+
return FileDetector.mimeMap[ext] || "application/octet-stream";
|
|
15694
|
+
}
|
|
15695
|
+
/**
|
|
15696
|
+
* Detects the file type by checking magic bytes against the stream content.
|
|
15697
|
+
*
|
|
15698
|
+
* @param stream - The readable stream to check
|
|
15699
|
+
* @param filename - The filename (used to get the extension)
|
|
15700
|
+
* @returns File type information including MIME type, extension, and verification status
|
|
15701
|
+
*
|
|
15702
|
+
* @example
|
|
15703
|
+
* ```typescript
|
|
15704
|
+
* const detector = alepha.inject(FileDetector);
|
|
15705
|
+
* const stream = createReadStream('image.png');
|
|
15706
|
+
* const result = await detector.detectFileType(stream, 'image.png');
|
|
15707
|
+
* console.log(result.mimeType); // 'image/png'
|
|
15708
|
+
* console.log(result.verified); // true if magic bytes match
|
|
15709
|
+
* ```
|
|
15710
|
+
*/
|
|
15711
|
+
async detectFileType(stream, filename) {
|
|
15712
|
+
const expectedMimeType = this.getContentType(filename);
|
|
15713
|
+
const lastDotIndex = filename.lastIndexOf(".");
|
|
15714
|
+
const ext = lastDotIndex > 0 ? filename.substring(lastDotIndex + 1).toLowerCase() : "";
|
|
15715
|
+
const { buffer, stream: newStream } = await this.peekBytes(stream, 16);
|
|
15716
|
+
const expectedSignatures = FileDetector.MAGIC_BYTES[ext];
|
|
15717
|
+
if (expectedSignatures) {
|
|
15718
|
+
for (const { signature, mimeType } of expectedSignatures) if (this.matchesSignature(buffer, signature)) return {
|
|
15719
|
+
mimeType,
|
|
15720
|
+
extension: ext,
|
|
15721
|
+
verified: true,
|
|
15722
|
+
stream: newStream
|
|
15723
|
+
};
|
|
15724
|
+
}
|
|
15725
|
+
for (const { ext: detectedExt, signature, mimeType } of FileDetector.ALL_SIGNATURES) if (detectedExt !== ext && this.matchesSignature(buffer, signature)) return {
|
|
15726
|
+
mimeType,
|
|
15727
|
+
extension: detectedExt,
|
|
15728
|
+
verified: true,
|
|
15729
|
+
stream: newStream
|
|
15730
|
+
};
|
|
15731
|
+
return {
|
|
15732
|
+
mimeType: expectedMimeType,
|
|
15733
|
+
extension: ext,
|
|
15734
|
+
verified: false,
|
|
15735
|
+
stream: newStream
|
|
15736
|
+
};
|
|
15737
|
+
}
|
|
15738
|
+
/**
|
|
15739
|
+
* Reads all bytes from a stream and returns the first N bytes along with a new stream containing all data.
|
|
15740
|
+
* This approach reads the entire stream upfront to avoid complex async handling issues.
|
|
15741
|
+
*
|
|
15742
|
+
* @protected
|
|
15743
|
+
*/
|
|
15744
|
+
async peekBytes(stream, numBytes) {
|
|
15745
|
+
const chunks = [];
|
|
15746
|
+
for await (const chunk of stream) chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
|
15747
|
+
const allData = Buffer.concat(chunks);
|
|
15748
|
+
return {
|
|
15749
|
+
buffer: allData.subarray(0, numBytes),
|
|
15750
|
+
stream: Readable.from(allData)
|
|
15751
|
+
};
|
|
15752
|
+
}
|
|
15753
|
+
/**
|
|
15754
|
+
* Checks if a buffer matches a magic byte signature.
|
|
15755
|
+
*
|
|
15756
|
+
* @protected
|
|
15757
|
+
*/
|
|
15758
|
+
matchesSignature(buffer, signature) {
|
|
15759
|
+
if (buffer.length < signature.length) return false;
|
|
15760
|
+
for (let i = 0; i < signature.length; i++) if (signature[i] !== null && buffer[i] !== signature[i]) return false;
|
|
15761
|
+
return true;
|
|
15762
|
+
}
|
|
15763
|
+
};
|
|
15764
|
+
|
|
15765
|
+
//#endregion
|
|
15766
|
+
//#region ../file/src/providers/NodeFileSystemProvider.ts
|
|
15767
|
+
/**
|
|
15768
|
+
* Node.js implementation of FileSystem interface.
|
|
15769
|
+
*
|
|
15770
|
+
* @example
|
|
15771
|
+
* ```typescript
|
|
15772
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
15773
|
+
*
|
|
15774
|
+
* // Create from URL
|
|
15775
|
+
* const file1 = fs.createFile({ url: "file:///path/to/file.png" });
|
|
15776
|
+
*
|
|
15777
|
+
* // Create from Buffer
|
|
15778
|
+
* const file2 = fs.createFile({ buffer: Buffer.from("hello"), name: "hello.txt" });
|
|
15779
|
+
*
|
|
15780
|
+
* // Create from text
|
|
15781
|
+
* const file3 = fs.createFile({ text: "Hello, world!", name: "greeting.txt" });
|
|
15782
|
+
*
|
|
15783
|
+
* // File operations
|
|
15784
|
+
* await fs.mkdir("/tmp/mydir", { recursive: true });
|
|
15785
|
+
* await fs.cp("/src/file.txt", "/dest/file.txt");
|
|
15786
|
+
* await fs.mv("/old/path.txt", "/new/path.txt");
|
|
15787
|
+
* const files = await fs.ls("/tmp");
|
|
15788
|
+
* await fs.rm("/tmp/file.txt");
|
|
15789
|
+
* ```
|
|
15790
|
+
*/
|
|
15791
|
+
var NodeFileSystemProvider = class {
|
|
15792
|
+
detector = $inject(FileDetector);
|
|
15793
|
+
/**
|
|
15794
|
+
* Creates a FileLike object from various sources.
|
|
15795
|
+
*
|
|
15796
|
+
* @param options - Options for creating the file
|
|
15797
|
+
* @returns A FileLike object
|
|
15798
|
+
*
|
|
15799
|
+
* @example
|
|
15800
|
+
* ```typescript
|
|
15801
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
15802
|
+
*
|
|
15803
|
+
* // From URL
|
|
15804
|
+
* const file1 = fs.createFile({ url: "https://example.com/image.png" });
|
|
15805
|
+
*
|
|
15806
|
+
* // From Buffer
|
|
15807
|
+
* const file2 = fs.createFile({
|
|
15808
|
+
* buffer: Buffer.from("hello"),
|
|
15809
|
+
* name: "hello.txt",
|
|
15810
|
+
* type: "text/plain"
|
|
15811
|
+
* });
|
|
15812
|
+
*
|
|
15813
|
+
* // From text
|
|
15814
|
+
* const file3 = fs.createFile({ text: "Hello!", name: "greeting.txt" });
|
|
15815
|
+
*
|
|
15816
|
+
* // From stream with detection
|
|
15817
|
+
* const stream = createReadStream("/path/to/file.png");
|
|
15818
|
+
* const file4 = fs.createFile({ stream, name: "image.png" });
|
|
15819
|
+
* ```
|
|
15820
|
+
*/
|
|
15821
|
+
createFile(options) {
|
|
15822
|
+
if ("url" in options) return this.createFileFromUrl(options.url, {
|
|
15823
|
+
type: options.type,
|
|
15824
|
+
name: options.name
|
|
15825
|
+
});
|
|
15826
|
+
if ("file" in options) return this.createFileFromWebFile(options.file, {
|
|
15827
|
+
type: options.type,
|
|
15828
|
+
name: options.name,
|
|
15829
|
+
size: options.size
|
|
15830
|
+
});
|
|
15831
|
+
if ("buffer" in options) return this.createFileFromBuffer(options.buffer, {
|
|
15832
|
+
type: options.type,
|
|
15833
|
+
name: options.name
|
|
15834
|
+
});
|
|
15835
|
+
if ("arrayBuffer" in options) return this.createFileFromBuffer(Buffer.from(options.arrayBuffer), {
|
|
15836
|
+
type: options.type,
|
|
15837
|
+
name: options.name
|
|
15838
|
+
});
|
|
15839
|
+
if ("text" in options) return this.createFileFromBuffer(Buffer.from(options.text, "utf-8"), {
|
|
15840
|
+
type: options.type || "text/plain",
|
|
15841
|
+
name: options.name || "file.txt"
|
|
15842
|
+
});
|
|
15843
|
+
if ("stream" in options) return this.createFileFromStream(options.stream, {
|
|
15844
|
+
type: options.type,
|
|
15845
|
+
name: options.name,
|
|
15846
|
+
size: options.size
|
|
15847
|
+
});
|
|
15848
|
+
throw new AlephaError("Invalid createFile options: no valid source provided");
|
|
15849
|
+
}
|
|
15850
|
+
/**
|
|
15851
|
+
* Removes a file or directory.
|
|
15852
|
+
*
|
|
15853
|
+
* @param path - The path to remove
|
|
15854
|
+
* @param options - Remove options
|
|
15855
|
+
*
|
|
15856
|
+
* @example
|
|
15857
|
+
* ```typescript
|
|
15858
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
15859
|
+
*
|
|
15860
|
+
* // Remove a file
|
|
15861
|
+
* await fs.rm("/tmp/file.txt");
|
|
15862
|
+
*
|
|
15863
|
+
* // Remove a directory recursively
|
|
15864
|
+
* await fs.rm("/tmp/mydir", { recursive: true });
|
|
15865
|
+
*
|
|
15866
|
+
* // Remove with force (no error if doesn't exist)
|
|
15867
|
+
* await fs.rm("/tmp/maybe-exists.txt", { force: true });
|
|
15868
|
+
* ```
|
|
15869
|
+
*/
|
|
15870
|
+
async rm(path, options) {
|
|
15871
|
+
await rm(path, options);
|
|
15872
|
+
}
|
|
15873
|
+
/**
|
|
15874
|
+
* Copies a file or directory.
|
|
15875
|
+
*
|
|
15876
|
+
* @param src - Source path
|
|
15877
|
+
* @param dest - Destination path
|
|
15878
|
+
* @param options - Copy options
|
|
15879
|
+
*
|
|
15880
|
+
* @example
|
|
15881
|
+
* ```typescript
|
|
15882
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
15883
|
+
*
|
|
15884
|
+
* // Copy a file
|
|
15885
|
+
* await fs.cp("/src/file.txt", "/dest/file.txt");
|
|
15886
|
+
*
|
|
15887
|
+
* // Copy a directory recursively
|
|
15888
|
+
* await fs.cp("/src/dir", "/dest/dir", { recursive: true });
|
|
15889
|
+
*
|
|
15890
|
+
* // Copy with force (overwrite existing)
|
|
15891
|
+
* await fs.cp("/src/file.txt", "/dest/file.txt", { force: true });
|
|
15892
|
+
* ```
|
|
15893
|
+
*/
|
|
15894
|
+
async cp(src, dest, options) {
|
|
15895
|
+
if ((await stat(src)).isDirectory()) {
|
|
15896
|
+
if (!options?.recursive) throw new Error(`Cannot copy directory without recursive option: ${src}`);
|
|
15897
|
+
await cp(src, dest, {
|
|
15898
|
+
recursive: true,
|
|
15899
|
+
force: options?.force ?? false
|
|
15900
|
+
});
|
|
15901
|
+
} else await copyFile(src, dest);
|
|
15902
|
+
}
|
|
15903
|
+
/**
|
|
15904
|
+
* Moves/renames a file or directory.
|
|
15905
|
+
*
|
|
15906
|
+
* @param src - Source path
|
|
15907
|
+
* @param dest - Destination path
|
|
15908
|
+
*
|
|
15909
|
+
* @example
|
|
15910
|
+
* ```typescript
|
|
15911
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
15912
|
+
*
|
|
15913
|
+
* // Move/rename a file
|
|
15914
|
+
* await fs.mv("/old/path.txt", "/new/path.txt");
|
|
15915
|
+
*
|
|
15916
|
+
* // Move a directory
|
|
15917
|
+
* await fs.mv("/old/dir", "/new/dir");
|
|
15918
|
+
* ```
|
|
15919
|
+
*/
|
|
15920
|
+
async mv(src, dest) {
|
|
15921
|
+
await rename(src, dest);
|
|
15922
|
+
}
|
|
15923
|
+
/**
|
|
15924
|
+
* Creates a directory.
|
|
15925
|
+
*
|
|
15926
|
+
* @param path - The directory path to create
|
|
15927
|
+
* @param options - Mkdir options
|
|
15928
|
+
*
|
|
15929
|
+
* @example
|
|
15930
|
+
* ```typescript
|
|
15931
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
15932
|
+
*
|
|
15933
|
+
* // Create a directory
|
|
15934
|
+
* await fs.mkdir("/tmp/mydir");
|
|
15935
|
+
*
|
|
15936
|
+
* // Create nested directories
|
|
15937
|
+
* await fs.mkdir("/tmp/path/to/dir", { recursive: true });
|
|
15938
|
+
*
|
|
15939
|
+
* // Create with specific permissions
|
|
15940
|
+
* await fs.mkdir("/tmp/mydir", { mode: 0o755 });
|
|
15941
|
+
* ```
|
|
15942
|
+
*/
|
|
15943
|
+
async mkdir(path, options) {
|
|
15944
|
+
await mkdir(path, options);
|
|
15945
|
+
}
|
|
15946
|
+
/**
|
|
15947
|
+
* Lists files in a directory.
|
|
15948
|
+
*
|
|
15949
|
+
* @param path - The directory path to list
|
|
15950
|
+
* @param options - List options
|
|
15951
|
+
* @returns Array of filenames
|
|
15952
|
+
*
|
|
15953
|
+
* @example
|
|
15954
|
+
* ```typescript
|
|
15955
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
15956
|
+
*
|
|
15957
|
+
* // List files in a directory
|
|
15958
|
+
* const files = await fs.ls("/tmp");
|
|
15959
|
+
* console.log(files); // ["file1.txt", "file2.txt", "subdir"]
|
|
15960
|
+
*
|
|
15961
|
+
* // List with hidden files
|
|
15962
|
+
* const allFiles = await fs.ls("/tmp", { hidden: true });
|
|
15963
|
+
*
|
|
15964
|
+
* // List recursively
|
|
15965
|
+
* const allFilesRecursive = await fs.ls("/tmp", { recursive: true });
|
|
15966
|
+
* ```
|
|
15967
|
+
*/
|
|
15968
|
+
async ls(path, options) {
|
|
15969
|
+
const entries = await readdir(path);
|
|
15970
|
+
const filteredEntries = options?.hidden ? entries : entries.filter((e) => !e.startsWith("."));
|
|
15971
|
+
if (options?.recursive) {
|
|
15972
|
+
const allFiles = [];
|
|
15973
|
+
for (const entry of filteredEntries) {
|
|
15974
|
+
const fullPath = join(path, entry);
|
|
15975
|
+
if ((await stat(fullPath)).isDirectory()) {
|
|
15976
|
+
allFiles.push(entry);
|
|
15977
|
+
const subFiles = await this.ls(fullPath, options);
|
|
15978
|
+
allFiles.push(...subFiles.map((f) => join(entry, f)));
|
|
15979
|
+
} else allFiles.push(entry);
|
|
15980
|
+
}
|
|
15981
|
+
return allFiles;
|
|
15982
|
+
}
|
|
15983
|
+
return filteredEntries;
|
|
15984
|
+
}
|
|
15985
|
+
/**
|
|
15986
|
+
* Checks if a file or directory exists.
|
|
15987
|
+
*
|
|
15988
|
+
* @param path - The path to check
|
|
15989
|
+
* @returns True if the path exists, false otherwise
|
|
15990
|
+
*
|
|
15991
|
+
* @example
|
|
15992
|
+
* ```typescript
|
|
15993
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
15994
|
+
*
|
|
15995
|
+
* if (await fs.exists("/tmp/file.txt")) {
|
|
15996
|
+
* console.log("File exists");
|
|
15997
|
+
* }
|
|
15998
|
+
* ```
|
|
15999
|
+
*/
|
|
16000
|
+
async exists(path) {
|
|
16001
|
+
try {
|
|
16002
|
+
await access(path);
|
|
16003
|
+
return true;
|
|
16004
|
+
} catch {
|
|
16005
|
+
return false;
|
|
16006
|
+
}
|
|
16007
|
+
}
|
|
16008
|
+
/**
|
|
16009
|
+
* Reads the content of a file.
|
|
16010
|
+
*
|
|
16011
|
+
* @param path - The file path to read
|
|
16012
|
+
* @returns The file content as a Buffer
|
|
16013
|
+
*
|
|
16014
|
+
* @example
|
|
16015
|
+
* ```typescript
|
|
16016
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
16017
|
+
*
|
|
16018
|
+
* const buffer = await fs.readFile("/tmp/file.txt");
|
|
16019
|
+
* console.log(buffer.toString("utf-8"));
|
|
16020
|
+
* ```
|
|
16021
|
+
*/
|
|
16022
|
+
async readFile(path) {
|
|
16023
|
+
return await readFile(path);
|
|
16024
|
+
}
|
|
16025
|
+
/**
|
|
16026
|
+
* Writes data to a file.
|
|
16027
|
+
*
|
|
16028
|
+
* @param path - The file path to write to
|
|
16029
|
+
* @param data - The data to write (Buffer or string)
|
|
16030
|
+
*
|
|
16031
|
+
* @example
|
|
16032
|
+
* ```typescript
|
|
16033
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
16034
|
+
*
|
|
16035
|
+
* // Write string
|
|
16036
|
+
* await fs.writeFile("/tmp/file.txt", "Hello, world!");
|
|
16037
|
+
*
|
|
16038
|
+
* // Write Buffer
|
|
16039
|
+
* await fs.writeFile("/tmp/file.bin", Buffer.from([0x01, 0x02, 0x03]));
|
|
16040
|
+
* ```
|
|
16041
|
+
*/
|
|
16042
|
+
async writeFile(path, data) {
|
|
16043
|
+
await writeFile(path, data);
|
|
16044
|
+
}
|
|
16045
|
+
/**
|
|
16046
|
+
* Creates a FileLike object from a Web File.
|
|
16047
|
+
*
|
|
16048
|
+
* @protected
|
|
16049
|
+
*/
|
|
16050
|
+
createFileFromWebFile(source, options = {}) {
|
|
16051
|
+
const name = options.name ?? source.name;
|
|
16052
|
+
return {
|
|
16053
|
+
name,
|
|
16054
|
+
type: options.type ?? (source.type || this.detector.getContentType(name)),
|
|
16055
|
+
size: options.size ?? source.size ?? 0,
|
|
16056
|
+
lastModified: source.lastModified || Date.now(),
|
|
16057
|
+
stream: () => source.stream(),
|
|
16058
|
+
arrayBuffer: async () => {
|
|
16059
|
+
return await source.arrayBuffer();
|
|
16060
|
+
},
|
|
16061
|
+
text: async () => {
|
|
16062
|
+
return await source.text();
|
|
16063
|
+
}
|
|
16064
|
+
};
|
|
16065
|
+
}
|
|
16066
|
+
/**
|
|
16067
|
+
* Creates a FileLike object from a Buffer.
|
|
16068
|
+
*
|
|
16069
|
+
* @protected
|
|
16070
|
+
*/
|
|
16071
|
+
createFileFromBuffer(source, options = {}) {
|
|
16072
|
+
const name = options.name ?? "file";
|
|
16073
|
+
return {
|
|
16074
|
+
name,
|
|
16075
|
+
type: options.type ?? this.detector.getContentType(options.name ?? name),
|
|
16076
|
+
size: source.byteLength,
|
|
16077
|
+
lastModified: Date.now(),
|
|
16078
|
+
stream: () => Readable.from(source),
|
|
16079
|
+
arrayBuffer: async () => {
|
|
16080
|
+
return this.bufferToArrayBuffer(source);
|
|
16081
|
+
},
|
|
16082
|
+
text: async () => {
|
|
16083
|
+
return source.toString("utf-8");
|
|
16084
|
+
}
|
|
16085
|
+
};
|
|
16086
|
+
}
|
|
16087
|
+
/**
|
|
16088
|
+
* Creates a FileLike object from a stream.
|
|
16089
|
+
*
|
|
16090
|
+
* @protected
|
|
16091
|
+
*/
|
|
16092
|
+
createFileFromStream(source, options = {}) {
|
|
16093
|
+
let buffer = null;
|
|
16094
|
+
return {
|
|
16095
|
+
name: options.name ?? "file",
|
|
16096
|
+
type: options.type ?? this.detector.getContentType(options.name ?? "file"),
|
|
16097
|
+
size: options.size ?? 0,
|
|
16098
|
+
lastModified: Date.now(),
|
|
16099
|
+
stream: () => source,
|
|
16100
|
+
_buffer: null,
|
|
16101
|
+
arrayBuffer: async () => {
|
|
16102
|
+
buffer ??= await this.streamToBuffer(source);
|
|
16103
|
+
return this.bufferToArrayBuffer(buffer);
|
|
16104
|
+
},
|
|
16105
|
+
text: async () => {
|
|
16106
|
+
buffer ??= await this.streamToBuffer(source);
|
|
16107
|
+
return buffer.toString("utf-8");
|
|
16108
|
+
}
|
|
16109
|
+
};
|
|
16110
|
+
}
|
|
16111
|
+
/**
|
|
16112
|
+
* Creates a FileLike object from a URL.
|
|
16113
|
+
*
|
|
16114
|
+
* @protected
|
|
16115
|
+
*/
|
|
16116
|
+
createFileFromUrl(url, options = {}) {
|
|
16117
|
+
const parsedUrl = new URL(url);
|
|
16118
|
+
const filename = options.name || parsedUrl.pathname.split("/").pop() || "file";
|
|
16119
|
+
let buffer = null;
|
|
16120
|
+
return {
|
|
16121
|
+
name: filename,
|
|
16122
|
+
type: options.type ?? this.detector.getContentType(filename),
|
|
16123
|
+
size: 0,
|
|
16124
|
+
lastModified: Date.now(),
|
|
16125
|
+
stream: () => this.createStreamFromUrl(url),
|
|
16126
|
+
arrayBuffer: async () => {
|
|
16127
|
+
buffer ??= await this.loadFromUrl(url);
|
|
16128
|
+
return this.bufferToArrayBuffer(buffer);
|
|
16129
|
+
},
|
|
16130
|
+
text: async () => {
|
|
16131
|
+
buffer ??= await this.loadFromUrl(url);
|
|
16132
|
+
return buffer.toString("utf-8");
|
|
16133
|
+
},
|
|
16134
|
+
filepath: url
|
|
16135
|
+
};
|
|
16136
|
+
}
|
|
16137
|
+
/**
|
|
16138
|
+
* Gets a streaming response from a URL.
|
|
16139
|
+
*
|
|
16140
|
+
* @protected
|
|
16141
|
+
*/
|
|
16142
|
+
getStreamingResponse(url) {
|
|
16143
|
+
const stream = new PassThrough();
|
|
16144
|
+
fetch(url).then((res) => Readable.fromWeb(res.body).pipe(stream)).catch((err) => stream.destroy(err));
|
|
16145
|
+
return stream;
|
|
16146
|
+
}
|
|
16147
|
+
/**
|
|
16148
|
+
* Loads data from a URL.
|
|
16149
|
+
*
|
|
16150
|
+
* @protected
|
|
16151
|
+
*/
|
|
16152
|
+
async loadFromUrl(url) {
|
|
16153
|
+
const parsedUrl = new URL(url);
|
|
16154
|
+
if (parsedUrl.protocol === "file:") return await readFile(fileURLToPath(url));
|
|
16155
|
+
else if (parsedUrl.protocol === "http:" || parsedUrl.protocol === "https:") {
|
|
16156
|
+
const response = await fetch(url);
|
|
16157
|
+
if (!response.ok) throw new Error(`Failed to fetch ${url}: ${response.status} ${response.statusText}`);
|
|
16158
|
+
const arrayBuffer = await response.arrayBuffer();
|
|
16159
|
+
return Buffer.from(arrayBuffer);
|
|
16160
|
+
} else throw new Error(`Unsupported protocol: ${parsedUrl.protocol}`);
|
|
16161
|
+
}
|
|
16162
|
+
/**
|
|
16163
|
+
* Creates a stream from a URL.
|
|
16164
|
+
*
|
|
16165
|
+
* @protected
|
|
16166
|
+
*/
|
|
16167
|
+
createStreamFromUrl(url) {
|
|
16168
|
+
const parsedUrl = new URL(url);
|
|
16169
|
+
if (parsedUrl.protocol === "file:") return createReadStream(fileURLToPath(url));
|
|
16170
|
+
else if (parsedUrl.protocol === "http:" || parsedUrl.protocol === "https:") return this.getStreamingResponse(url);
|
|
16171
|
+
else throw new AlephaError(`Unsupported protocol: ${parsedUrl.protocol}`);
|
|
16172
|
+
}
|
|
16173
|
+
/**
|
|
16174
|
+
* Converts a stream-like object to a Buffer.
|
|
16175
|
+
*
|
|
16176
|
+
* @protected
|
|
16177
|
+
*/
|
|
16178
|
+
async streamToBuffer(streamLike) {
|
|
16179
|
+
const stream = streamLike instanceof Readable ? streamLike : Readable.fromWeb(streamLike);
|
|
16180
|
+
return new Promise((resolve, reject) => {
|
|
16181
|
+
const buffer = [];
|
|
16182
|
+
stream.on("data", (chunk) => buffer.push(Buffer.from(chunk)));
|
|
16183
|
+
stream.on("end", () => resolve(Buffer.concat(buffer)));
|
|
16184
|
+
stream.on("error", (err) => reject(new AlephaError("Error converting stream", { cause: err })));
|
|
16185
|
+
});
|
|
16186
|
+
}
|
|
16187
|
+
/**
|
|
16188
|
+
* Converts a Node.js Buffer to an ArrayBuffer.
|
|
16189
|
+
*
|
|
16190
|
+
* @protected
|
|
16191
|
+
*/
|
|
16192
|
+
bufferToArrayBuffer(buffer) {
|
|
16193
|
+
return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength);
|
|
16194
|
+
}
|
|
16195
|
+
};
|
|
16196
|
+
|
|
16197
|
+
//#endregion
|
|
16198
|
+
//#region ../file/src/index.ts
|
|
16199
|
+
/**
|
|
16200
|
+
* Provides file system capabilities for Alepha applications with support for multiple file sources and operations.
|
|
16201
|
+
*
|
|
16202
|
+
* The file module enables working with files from various sources (URLs, buffers, streams) and provides
|
|
16203
|
+
* utilities for file type detection, content type determination, and common file system operations.
|
|
16204
|
+
*
|
|
16205
|
+
* @see {@link FileDetector}
|
|
16206
|
+
* @see {@link FileSystemProvider}
|
|
16207
|
+
* @see {@link NodeFileSystemProvider}
|
|
16208
|
+
* @module alepha.file
|
|
16209
|
+
*/
|
|
16210
|
+
const AlephaFile = $module({
|
|
16211
|
+
name: "alepha.file",
|
|
16212
|
+
descriptors: [],
|
|
16213
|
+
services: [
|
|
16214
|
+
FileDetector,
|
|
16215
|
+
FileSystemProvider,
|
|
16216
|
+
NodeFileSystemProvider
|
|
16217
|
+
],
|
|
16218
|
+
register: (alepha$1) => alepha$1.with({
|
|
16219
|
+
optional: true,
|
|
16220
|
+
provide: FileSystemProvider,
|
|
16221
|
+
use: NodeFileSystemProvider
|
|
16222
|
+
})
|
|
16223
|
+
});
|
|
16224
|
+
|
|
16225
|
+
//#endregion
|
|
16226
|
+
//#region src/assets/biomeJson.ts
|
|
16227
|
+
const biomeJson = `
|
|
16228
|
+
{
|
|
16229
|
+
"$schema": "https://biomejs.dev/schemas/latest/schema.json",
|
|
16230
|
+
"vcs": {
|
|
16231
|
+
"enabled": true,
|
|
16232
|
+
"clientKind": "git"
|
|
16233
|
+
},
|
|
16234
|
+
"files": {
|
|
16235
|
+
"ignoreUnknown": true,
|
|
16236
|
+
"includes": ["**", "!node_modules", "!dist"]
|
|
16237
|
+
},
|
|
16238
|
+
"formatter": {
|
|
16239
|
+
"enabled": true,
|
|
16240
|
+
"indentStyle": "space"
|
|
16241
|
+
},
|
|
16242
|
+
"linter": {
|
|
16243
|
+
"enabled": true,
|
|
16244
|
+
"rules": {
|
|
16245
|
+
"recommended": true
|
|
16246
|
+
},
|
|
16247
|
+
"domains": {
|
|
16248
|
+
"react": "recommended"
|
|
16249
|
+
}
|
|
16250
|
+
},
|
|
16251
|
+
"assist": {
|
|
16252
|
+
"actions": {
|
|
16253
|
+
"source": {
|
|
16254
|
+
"organizeImports": "on"
|
|
16255
|
+
}
|
|
16256
|
+
}
|
|
16257
|
+
}
|
|
16258
|
+
}
|
|
16259
|
+
`.trim();
|
|
16260
|
+
|
|
16261
|
+
//#endregion
|
|
16262
|
+
//#region src/assets/tsconfigJson.ts
|
|
16263
|
+
const tsconfigJson = `
|
|
16264
|
+
{
|
|
16265
|
+
"compilerOptions": {
|
|
16266
|
+
"module": "nodenext",
|
|
16267
|
+
"target": "esnext",
|
|
16268
|
+
"strict": true,
|
|
16269
|
+
"jsx": "react-jsx",
|
|
16270
|
+
"verbatimModuleSyntax": true,
|
|
16271
|
+
"isolatedModules": true,
|
|
16272
|
+
"moduleDetection": "force",
|
|
16273
|
+
"skipLibCheck": true,
|
|
16274
|
+
"resolveJsonModule": true,
|
|
16275
|
+
"noEmit": true,
|
|
16276
|
+
"allowImportingTsExtensions": true
|
|
16277
|
+
},
|
|
16278
|
+
"exclude": ["node_modules", "dist"]
|
|
16279
|
+
}
|
|
16280
|
+
`.trim();
|
|
16281
|
+
|
|
16282
|
+
//#endregion
|
|
16283
|
+
//#region src/assets/viteConfigTs.ts
|
|
16284
|
+
const viteConfigTs = (serverEntry) => `
|
|
16285
|
+
import { viteAlepha } from "@alepha/vite";
|
|
16286
|
+
|
|
16287
|
+
export default {
|
|
16288
|
+
plugins: [
|
|
16289
|
+
viteAlepha(${serverEntry ? `{ serverEntry: "${serverEntry}" }` : ""}),
|
|
16290
|
+
],
|
|
16291
|
+
test: {
|
|
16292
|
+
globals: true,
|
|
16293
|
+
},
|
|
16294
|
+
};
|
|
16295
|
+
`.trim();
|
|
16296
|
+
|
|
16297
|
+
//#endregion
|
|
16298
|
+
//#region src/version.ts
|
|
16299
|
+
const packageJson = JSON.parse(readFileSync(new URL("../package.json", import.meta.url), "utf-8"));
|
|
16300
|
+
const version = packageJson.version;
|
|
16301
|
+
|
|
16302
|
+
//#endregion
|
|
16303
|
+
//#region src/services/ProjectUtils.ts
|
|
16304
|
+
/**
|
|
16305
|
+
* Utility service for common project operations used by CLI commands.
|
|
16306
|
+
*
|
|
16307
|
+
* This service provides helper methods for:
|
|
16308
|
+
* - Project configuration file management (tsconfig.json, package.json, etc.)
|
|
16309
|
+
* - Package manager setup (Yarn, npm, pnpm)
|
|
16310
|
+
* - Sample project downloading
|
|
16311
|
+
* - Drizzle ORM/Kit utilities
|
|
16312
|
+
* - Alepha instance loading
|
|
16313
|
+
*/
|
|
16314
|
+
var ProjectUtils = class {
|
|
16315
|
+
log = $logger();
|
|
16316
|
+
runner = $inject(ProcessRunner);
|
|
16317
|
+
fs = $inject(FileSystemProvider);
|
|
16318
|
+
/**
|
|
16319
|
+
* Ensure Yarn is configured in the project directory.
|
|
16320
|
+
*
|
|
16321
|
+
* Creates a .yarnrc.yml file with node-modules linker if it doesn't exist.
|
|
16322
|
+
*
|
|
16323
|
+
* @param root - The root directory of the project
|
|
16324
|
+
*/
|
|
16325
|
+
async ensureYarn(root) {
|
|
16326
|
+
const yarnrcPath = join(root, ".yarnrc.yml");
|
|
16327
|
+
try {
|
|
16328
|
+
await access(yarnrcPath);
|
|
16329
|
+
} catch {
|
|
16330
|
+
await writeFile(yarnrcPath, "nodeLinker: node-modules");
|
|
16331
|
+
}
|
|
16332
|
+
const npmLockPath = join(root, "package-lock.json");
|
|
16333
|
+
const pnpmLockPath = join(root, "pnpm-lock.yaml");
|
|
16334
|
+
await this.fs.rm(npmLockPath, { force: true });
|
|
16335
|
+
await this.fs.rm(pnpmLockPath, { force: true });
|
|
16336
|
+
}
|
|
16337
|
+
/**
|
|
16338
|
+
* Generate package.json content with Alepha dependencies.
|
|
16339
|
+
*
|
|
16340
|
+
* @param modes - Configuration for which dependencies to include
|
|
16341
|
+
* @returns Package.json partial with dependencies, devDependencies, and scripts
|
|
16342
|
+
*/
|
|
16343
|
+
generatePackageJsonContent(modes) {
|
|
16344
|
+
const dependencies = {
|
|
16345
|
+
"@alepha/core": `^${version}`,
|
|
16346
|
+
"@alepha/logger": `^${version}`,
|
|
16347
|
+
"@alepha/datetime": `^${version}`
|
|
16348
|
+
};
|
|
16349
|
+
const devDependencies = {
|
|
16350
|
+
alepha: `^${version}`,
|
|
16351
|
+
"@alepha/vite": `^${version}`
|
|
16352
|
+
};
|
|
16353
|
+
if (modes.api) {
|
|
16354
|
+
dependencies["@alepha/server"] = `^${version}`;
|
|
16355
|
+
dependencies["@alepha/server-swagger"] = `^${version}`;
|
|
16356
|
+
dependencies["@alepha/server-multipart"] = `^${version}`;
|
|
16357
|
+
}
|
|
16358
|
+
if (modes.orm) dependencies["@alepha/orm"] = `^${version}`;
|
|
16359
|
+
if (modes.react) {
|
|
16360
|
+
dependencies["@alepha/server"] = `^${version}`;
|
|
16361
|
+
dependencies["@alepha/server-links"] = `^${version}`;
|
|
16362
|
+
dependencies["@alepha/react"] = `^${version}`;
|
|
16363
|
+
dependencies.react = "^19.2.0";
|
|
16364
|
+
devDependencies["@types/react"] = "^19.2.0";
|
|
16365
|
+
}
|
|
16366
|
+
return {
|
|
16367
|
+
dependencies,
|
|
16368
|
+
devDependencies,
|
|
16369
|
+
scripts: {
|
|
16370
|
+
dev: "alepha dev",
|
|
16371
|
+
build: "alepha build"
|
|
16372
|
+
}
|
|
16373
|
+
};
|
|
16374
|
+
}
|
|
16375
|
+
/**
|
|
16376
|
+
* Ensure package.json exists and has correct configuration.
|
|
16377
|
+
*
|
|
16378
|
+
* Creates a new package.json if none exists, or updates an existing one to:
|
|
16379
|
+
* - Set "type": "module"
|
|
16380
|
+
* - Add Alepha dependencies
|
|
16381
|
+
* - Add standard scripts
|
|
16382
|
+
*
|
|
16383
|
+
* @param root - The root directory of the project
|
|
16384
|
+
* @param modes - Configuration for which dependencies to include
|
|
16385
|
+
*/
|
|
16386
|
+
async ensurePackageJson(root, modes) {
|
|
16387
|
+
const packageJsonPath = join(root, "package.json");
|
|
16388
|
+
try {
|
|
16389
|
+
await access(packageJsonPath);
|
|
16390
|
+
} catch (error) {
|
|
16391
|
+
await writeFile(packageJsonPath, JSON.stringify(this.generatePackageJsonContent(modes), null, 2));
|
|
16392
|
+
return;
|
|
16393
|
+
}
|
|
16394
|
+
const content = await readFile(packageJsonPath, "utf8");
|
|
16395
|
+
const packageJson$1 = JSON.parse(content);
|
|
16396
|
+
if (!packageJson$1.type || packageJson$1.type !== "module") packageJson$1.type = "module";
|
|
16397
|
+
const newPackageJson = this.generatePackageJsonContent(modes);
|
|
16398
|
+
packageJson$1.type = "module";
|
|
16399
|
+
packageJson$1.dependencies ??= {};
|
|
16400
|
+
packageJson$1.devDependencies ??= {};
|
|
16401
|
+
packageJson$1.scripts ??= {};
|
|
16402
|
+
Object.assign(packageJson$1.dependencies, newPackageJson.dependencies);
|
|
16403
|
+
Object.assign(packageJson$1.devDependencies, newPackageJson.devDependencies);
|
|
16404
|
+
Object.assign(packageJson$1.scripts, newPackageJson.scripts);
|
|
16405
|
+
await writeFile(packageJsonPath, JSON.stringify(packageJson$1, null, 2));
|
|
16406
|
+
}
|
|
16407
|
+
/**
|
|
16408
|
+
* Ensure package.json exists and is configured as ES module.
|
|
16409
|
+
*
|
|
16410
|
+
* Similar to ensurePackageJson but only validates/sets the "type": "module" field.
|
|
16411
|
+
* Throws an error if no package.json exists.
|
|
16412
|
+
*
|
|
16413
|
+
* @param root - The root directory of the project
|
|
16414
|
+
* @throws {AlephaError} If no package.json is found
|
|
16415
|
+
*/
|
|
16416
|
+
async ensurePackageJsonModule(root) {
|
|
16417
|
+
const packageJsonPath = join(root, "package.json");
|
|
16418
|
+
try {
|
|
16419
|
+
await access(packageJsonPath);
|
|
16420
|
+
} catch (error) {
|
|
16421
|
+
throw new AlephaError("No package.json found in project root. Run 'npx alepha init' to create one.");
|
|
16422
|
+
}
|
|
16423
|
+
const content = await readFile(packageJsonPath, "utf8");
|
|
16424
|
+
const packageJson$1 = JSON.parse(content);
|
|
16425
|
+
if (!packageJson$1.type || packageJson$1.type !== "module") {
|
|
16426
|
+
packageJson$1.type = "module";
|
|
16427
|
+
await writeFile(packageJsonPath, JSON.stringify(packageJson$1, null, 2));
|
|
16428
|
+
}
|
|
16429
|
+
}
|
|
16430
|
+
/**
|
|
16431
|
+
* Ensure tsconfig.json exists in the project.
|
|
16432
|
+
*
|
|
16433
|
+
* Creates a standard Alepha tsconfig.json if none exists.
|
|
16434
|
+
*
|
|
16435
|
+
* @param root - The root directory of the project
|
|
16436
|
+
*/
|
|
16437
|
+
async ensureTsConfig(root) {
|
|
16438
|
+
const tsconfigPath = join(root, "tsconfig.json");
|
|
16439
|
+
try {
|
|
16440
|
+
await access(tsconfigPath);
|
|
16441
|
+
} catch {
|
|
16442
|
+
await writeFile(tsconfigPath, tsconfigJson);
|
|
16443
|
+
}
|
|
16444
|
+
}
|
|
16445
|
+
/**
|
|
16446
|
+
* Download Alepha starter project from GitHub.
|
|
16447
|
+
*
|
|
16448
|
+
* Downloads and extracts the apps/starter directory from the main Alepha repository.
|
|
16449
|
+
*
|
|
16450
|
+
* @param targetDir - The directory where the project should be extracted
|
|
16451
|
+
* @throws {AlephaError} If the download fails
|
|
16452
|
+
*/
|
|
16453
|
+
async downloadSampleProject(targetDir) {
|
|
16454
|
+
const response = await fetch("https://api.github.com/repos/feunard/alepha/tarball/main", { headers: { "User-Agent": "Alepha-CLI" } });
|
|
16455
|
+
if (!response.ok) throw new AlephaError(`Failed to download: ${response.statusText}`);
|
|
16456
|
+
await pipeline(Readable.fromWeb(response.body), tar.extract({
|
|
16457
|
+
cwd: targetDir,
|
|
16458
|
+
strip: 3,
|
|
16459
|
+
filter: (path) => {
|
|
16460
|
+
const parts = path.split("/");
|
|
16461
|
+
return parts.length >= 3 && parts[1] === "apps" && parts[2] === "starter";
|
|
16462
|
+
}
|
|
16463
|
+
}));
|
|
16464
|
+
}
|
|
16465
|
+
/**
|
|
16466
|
+
* Get the path to Biome configuration file.
|
|
16467
|
+
*
|
|
16468
|
+
* Looks for an existing biome.json in the project root, or creates one if it doesn't exist.
|
|
16469
|
+
*
|
|
16470
|
+
* @param maybePath - Optional custom path to biome config
|
|
16471
|
+
* @returns Absolute path to the biome.json config file
|
|
16472
|
+
*/
|
|
16473
|
+
async getBiomeConfigPath(maybePath) {
|
|
16474
|
+
const root = process.cwd();
|
|
16475
|
+
if (maybePath) try {
|
|
16476
|
+
const path = join(root, maybePath);
|
|
16477
|
+
await access(path);
|
|
16478
|
+
return path;
|
|
16479
|
+
} catch {}
|
|
16480
|
+
try {
|
|
16481
|
+
const path = join(root, "biome.json");
|
|
16482
|
+
await access(path);
|
|
16483
|
+
return path;
|
|
16484
|
+
} catch {
|
|
16485
|
+
return await this.runner.writeConfigFile("biome.json", biomeJson);
|
|
16486
|
+
}
|
|
16487
|
+
}
|
|
16488
|
+
/**
|
|
16489
|
+
* Get the path to Vite configuration file.
|
|
16490
|
+
*
|
|
16491
|
+
* Looks for an existing vite.config.ts in the project root, or creates one if it doesn't exist.
|
|
16492
|
+
*
|
|
16493
|
+
* @param root - The root directory of the project (defaults to process.cwd())
|
|
16494
|
+
* @param serverEntry - Optional path to the server entry file to include in the config
|
|
16495
|
+
* @returns Absolute path to the vite.config.ts file
|
|
16496
|
+
*/
|
|
16497
|
+
async getViteConfigPath(root, serverEntry) {
|
|
16498
|
+
try {
|
|
16499
|
+
const viteConfigPath = join(root, "vite.config.ts");
|
|
16500
|
+
await access(viteConfigPath);
|
|
16501
|
+
return viteConfigPath;
|
|
16502
|
+
} catch {
|
|
16503
|
+
return this.runner.writeConfigFile("vite.config.ts", viteConfigTs(serverEntry));
|
|
16504
|
+
}
|
|
16505
|
+
}
|
|
16506
|
+
/**
|
|
16507
|
+
* Load Alepha instance from a server entry file.
|
|
16508
|
+
*
|
|
16509
|
+
* Dynamically imports the server entry file and extracts the Alepha instance.
|
|
16510
|
+
* Skips the automatic start process.
|
|
16511
|
+
*
|
|
16512
|
+
* @param rootDir - The root directory of the project
|
|
16513
|
+
* @param explicitEntry - Optional explicit path to the entry file
|
|
16514
|
+
* @returns Object containing the Alepha instance and the entry file path
|
|
16515
|
+
* @throws {AlephaError} If the Alepha instance cannot be found
|
|
16516
|
+
*/
|
|
16517
|
+
async loadAlephaFromServerEntryFile(rootDir, explicitEntry) {
|
|
16518
|
+
process.env.ALEPHA_SKIP_START = "true";
|
|
16519
|
+
const entry = await boot.getServerEntry(rootDir, explicitEntry);
|
|
16520
|
+
const mod = await tsImport(entry, { parentURL: import.meta.url });
|
|
16521
|
+
this.log.debug(`Load entry: ${entry}`);
|
|
16522
|
+
if (mod.default instanceof Alepha) return {
|
|
16523
|
+
alepha: mod.default,
|
|
16524
|
+
entry
|
|
16525
|
+
};
|
|
16526
|
+
const g = global;
|
|
16527
|
+
if (g.__alepha) return {
|
|
16528
|
+
alepha: g.__alepha,
|
|
16529
|
+
entry
|
|
16530
|
+
};
|
|
16531
|
+
throw new AlephaError(`Could not find Alepha instance in entry file: ${entry}`);
|
|
16532
|
+
}
|
|
16533
|
+
/**
|
|
16534
|
+
* Get DrizzleKitProvider from an Alepha instance.
|
|
16535
|
+
*
|
|
16536
|
+
* Searches the Alepha registry for the DrizzleKitProvider instance.
|
|
16537
|
+
*
|
|
16538
|
+
* @param alepha - The Alepha instance to search
|
|
16539
|
+
* @returns The DrizzleKitProvider instance
|
|
16540
|
+
*/
|
|
16541
|
+
getKitFromAlepha(alepha$1) {
|
|
16542
|
+
return alepha$1["registry"].values().find((it) => it.instance.constructor.name === "DrizzleKitProvider")?.instance;
|
|
16543
|
+
}
|
|
16544
|
+
/**
|
|
16545
|
+
* Generate JavaScript code for Drizzle entities export.
|
|
16546
|
+
*
|
|
16547
|
+
* Creates a temporary entities.js file that imports from the entry file
|
|
16548
|
+
* and exports database models for Drizzle Kit to process.
|
|
16549
|
+
*
|
|
16550
|
+
* @param entry - Path to the server entry file
|
|
16551
|
+
* @param provider - Name of the database provider
|
|
16552
|
+
* @param models - Array of model names to export
|
|
16553
|
+
* @returns JavaScript code as a string
|
|
16554
|
+
*/
|
|
16555
|
+
generateEntitiesJs(entry, provider, models = []) {
|
|
16556
|
+
return `
|
|
16557
|
+
import "${entry}";
|
|
16558
|
+
import { DrizzleKitProvider, Repository } from "@alepha/orm";
|
|
16559
|
+
|
|
16560
|
+
const alepha = globalThis.__alepha;
|
|
16561
|
+
const kit = alepha.inject(DrizzleKitProvider);
|
|
16562
|
+
const provider = alepha.services(Repository).find((it) => it.provider.name === "${provider}").provider;
|
|
16563
|
+
const models = kit.getModels(provider);
|
|
16564
|
+
|
|
16565
|
+
${models.map((it) => `export const ${it} = models["${it}"];`).join("\n")}
|
|
16566
|
+
|
|
16567
|
+
`.trim();
|
|
16568
|
+
}
|
|
16569
|
+
/**
|
|
16570
|
+
* Prepare Drizzle configuration files for a database provider.
|
|
16571
|
+
*
|
|
16572
|
+
* Creates temporary entities.js and drizzle.config.js files needed
|
|
16573
|
+
* for Drizzle Kit commands to run properly.
|
|
16574
|
+
*
|
|
16575
|
+
* @param options - Configuration options including kit, provider info, and paths
|
|
16576
|
+
* @returns Path to the generated drizzle.config.js file
|
|
16577
|
+
*/
|
|
16578
|
+
async prepareDrizzleConfig(options) {
|
|
16579
|
+
const models = Object.keys(options.kit.getModels(options.provider));
|
|
16580
|
+
const entitiesJs = this.generateEntitiesJs(options.entry, options.providerName, models);
|
|
16581
|
+
const config = {
|
|
16582
|
+
schema: await this.runner.writeConfigFile("entities.js", entitiesJs, options.rootDir),
|
|
16583
|
+
out: `./migrations/${options.providerName}`,
|
|
16584
|
+
dialect: options.dialect,
|
|
16585
|
+
dbCredentials: { url: options.providerUrl }
|
|
16586
|
+
};
|
|
16587
|
+
if (options.providerName === "pglite") config.driver = "pglite";
|
|
16588
|
+
const drizzleConfigJs = `export default ${JSON.stringify(config, null, 2)}`;
|
|
16589
|
+
return await this.runner.writeConfigFile("drizzle.config.js", drizzleConfigJs, options.rootDir);
|
|
16590
|
+
}
|
|
16591
|
+
/**
|
|
16592
|
+
* Run a drizzle-kit command for all database providers in an Alepha instance.
|
|
16593
|
+
*
|
|
16594
|
+
* Iterates through all repository providers, prepares Drizzle config for each,
|
|
16595
|
+
* and executes the specified drizzle-kit command.
|
|
16596
|
+
*
|
|
16597
|
+
* @param options - Configuration including command to run, flags, and logging
|
|
16598
|
+
*/
|
|
16599
|
+
async runDrizzleKitCommand(options) {
|
|
16600
|
+
const rootDir = options.root;
|
|
16601
|
+
this.log.debug(`Using project root: ${rootDir}`);
|
|
16602
|
+
const { alepha: alepha$1, entry } = await this.loadAlephaFromServerEntryFile(rootDir, options.args);
|
|
16603
|
+
const kit = this.getKitFromAlepha(alepha$1);
|
|
16604
|
+
const repositoryProvider = alepha$1.inject("RepositoryProvider");
|
|
16605
|
+
const accepted = /* @__PURE__ */ new Set([]);
|
|
16606
|
+
for (const descriptor of repositoryProvider.getRepositories()) {
|
|
16607
|
+
const provider = descriptor.provider;
|
|
16608
|
+
const providerName = provider.name;
|
|
16609
|
+
const dialect = provider.dialect;
|
|
16610
|
+
if (accepted.has(providerName)) continue;
|
|
16611
|
+
accepted.add(providerName);
|
|
16612
|
+
this.log.info("");
|
|
16613
|
+
this.log.info(options.logMessage(providerName, dialect));
|
|
16614
|
+
const drizzleConfigJsPath = await this.prepareDrizzleConfig({
|
|
16615
|
+
kit,
|
|
16616
|
+
provider,
|
|
16617
|
+
providerName,
|
|
16618
|
+
providerUrl: provider.url,
|
|
16619
|
+
dialect,
|
|
16620
|
+
entry,
|
|
16621
|
+
rootDir
|
|
16622
|
+
});
|
|
16623
|
+
await this.runner.exec(`drizzle-kit ${options.command} --config=${drizzleConfigJsPath}`);
|
|
16624
|
+
}
|
|
16625
|
+
}
|
|
16626
|
+
};
|
|
16627
|
+
|
|
15114
16628
|
//#endregion
|
|
15115
16629
|
//#region src/commands/BiomeCommands.ts
|
|
15116
16630
|
var BiomeCommands = class {
|
|
15117
16631
|
log = $logger();
|
|
15118
16632
|
runner = $inject(ProcessRunner);
|
|
16633
|
+
utils = $inject(ProjectUtils);
|
|
15119
16634
|
biomeFlags = t.object({ config: t.optional(t.text({ aliases: ["c"] })) });
|
|
15120
16635
|
format = $command({
|
|
15121
16636
|
name: "format",
|
|
15122
16637
|
description: "Format the codebase using Biome",
|
|
15123
16638
|
flags: this.biomeFlags,
|
|
15124
16639
|
handler: async ({ flags }) => {
|
|
15125
|
-
const configPath = await this.
|
|
16640
|
+
const configPath = await this.utils.getBiomeConfigPath(flags.config);
|
|
15126
16641
|
await this.runner.exec(`biome format --fix --config-path=${configPath}`);
|
|
15127
16642
|
}
|
|
15128
16643
|
});
|
|
@@ -15131,57 +16646,21 @@ var BiomeCommands = class {
|
|
|
15131
16646
|
description: "Run linter across the codebase using Biome",
|
|
15132
16647
|
flags: this.biomeFlags,
|
|
15133
16648
|
handler: async ({ flags }) => {
|
|
15134
|
-
const configPath = await this.
|
|
16649
|
+
const configPath = await this.utils.getBiomeConfigPath(flags.config);
|
|
15135
16650
|
await this.runner.exec(`biome check --formatter-enabled=false --fix --config-path=${configPath}`);
|
|
15136
16651
|
}
|
|
15137
16652
|
});
|
|
15138
|
-
async configPath(maybePath) {
|
|
15139
|
-
const root = process.cwd();
|
|
15140
|
-
if (maybePath) try {
|
|
15141
|
-
const path = join(root, maybePath);
|
|
15142
|
-
await access(path);
|
|
15143
|
-
return path;
|
|
15144
|
-
} catch {}
|
|
15145
|
-
try {
|
|
15146
|
-
const path = join(root, "biome.json");
|
|
15147
|
-
await access(path);
|
|
15148
|
-
return path;
|
|
15149
|
-
} catch {
|
|
15150
|
-
return await this.runner.writeConfigFile("biome.json", biomeJson);
|
|
15151
|
-
}
|
|
15152
|
-
}
|
|
15153
16653
|
};
|
|
15154
16654
|
|
|
15155
|
-
//#endregion
|
|
15156
|
-
//#region src/assets/tsconfigJson.ts
|
|
15157
|
-
const tsconfigJson = `
|
|
15158
|
-
{
|
|
15159
|
-
"compilerOptions": {
|
|
15160
|
-
"module": "nodenext",
|
|
15161
|
-
"target": "esnext",
|
|
15162
|
-
"strict": true,
|
|
15163
|
-
"jsx": "react-jsx",
|
|
15164
|
-
"verbatimModuleSyntax": true,
|
|
15165
|
-
"isolatedModules": true,
|
|
15166
|
-
"moduleDetection": "force",
|
|
15167
|
-
"skipLibCheck": true,
|
|
15168
|
-
"resolveJsonModule": true,
|
|
15169
|
-
"allowImportingTsExtensions": true
|
|
15170
|
-
},
|
|
15171
|
-
"exclude": ["node_modules", "dist"]
|
|
15172
|
-
}
|
|
15173
|
-
`.trim();
|
|
15174
|
-
|
|
15175
|
-
//#endregion
|
|
15176
|
-
//#region src/version.ts
|
|
15177
|
-
const packageJson = JSON.parse(readFileSync(new URL("../package.json", import.meta.url), "utf-8"));
|
|
15178
|
-
const version = packageJson.version;
|
|
15179
|
-
|
|
15180
16655
|
//#endregion
|
|
15181
16656
|
//#region src/commands/CoreCommands.ts
|
|
15182
16657
|
var CoreCommands = class {
|
|
15183
16658
|
log = $logger();
|
|
15184
16659
|
cli = $inject(CliProvider);
|
|
16660
|
+
utils = $inject(ProjectUtils);
|
|
16661
|
+
/**
|
|
16662
|
+
* Called when no command is provided
|
|
16663
|
+
*/
|
|
15185
16664
|
root = $command({
|
|
15186
16665
|
root: true,
|
|
15187
16666
|
flags: t.object({ version: t.optional(t.boolean({
|
|
@@ -15196,6 +16675,9 @@ var CoreCommands = class {
|
|
|
15196
16675
|
this.cli.printHelp();
|
|
15197
16676
|
}
|
|
15198
16677
|
});
|
|
16678
|
+
/**
|
|
16679
|
+
* Create a new Alepha project based on one of the sample projects (for now, only one sample project is available)
|
|
16680
|
+
*/
|
|
15199
16681
|
create = $command({
|
|
15200
16682
|
name: "create",
|
|
15201
16683
|
description: "Create a new Alepha project",
|
|
@@ -15206,9 +16688,9 @@ var CoreCommands = class {
|
|
|
15206
16688
|
pnpm: t.optional(t.boolean({ description: "Use pnpm package manager" }))
|
|
15207
16689
|
}),
|
|
15208
16690
|
summary: false,
|
|
15209
|
-
handler: async ({ run: run$1, args, flags }) => {
|
|
16691
|
+
handler: async ({ run: run$1, args, flags, root }) => {
|
|
15210
16692
|
const name = args;
|
|
15211
|
-
const dest = join(
|
|
16693
|
+
const dest = join(root, name);
|
|
15212
16694
|
try {
|
|
15213
16695
|
await access(dest);
|
|
15214
16696
|
this.log.error(`Directory "${name}" already exists. Please choose a different project name.`);
|
|
@@ -15224,9 +16706,9 @@ var CoreCommands = class {
|
|
|
15224
16706
|
execCmd = "pnpm";
|
|
15225
16707
|
}
|
|
15226
16708
|
await mkdir(dest, { recursive: true }).catch(() => null);
|
|
15227
|
-
await run$1("Downloading sample project", () => this.downloadSampleProject(dest));
|
|
16709
|
+
await run$1("Downloading sample project", () => this.utils.downloadSampleProject(dest));
|
|
15228
16710
|
if (flags.yarn) {
|
|
15229
|
-
await this.ensureYarn(dest);
|
|
16711
|
+
await this.utils.ensureYarn(dest);
|
|
15230
16712
|
await run$1(`cd ${name} && yarn set version stable`, { alias: "Setting Yarn to stable version" });
|
|
15231
16713
|
}
|
|
15232
16714
|
await run$1(`cd ${name} && ${installCmd}`, { alias: "Installing dependencies" });
|
|
@@ -15239,6 +16721,9 @@ var CoreCommands = class {
|
|
|
15239
16721
|
this.log.info("");
|
|
15240
16722
|
}
|
|
15241
16723
|
});
|
|
16724
|
+
/**
|
|
16725
|
+
* Clean the project, removing the "dist" directory
|
|
16726
|
+
*/
|
|
15242
16727
|
clean = $command({
|
|
15243
16728
|
name: "clean",
|
|
15244
16729
|
description: "Clean the project",
|
|
@@ -15246,104 +16731,30 @@ var CoreCommands = class {
|
|
|
15246
16731
|
await run$1.rm("./dist");
|
|
15247
16732
|
}
|
|
15248
16733
|
});
|
|
16734
|
+
/**
|
|
16735
|
+
* Ensure the project has the necessary Alepha configuration files.
|
|
16736
|
+
* Add the correct dependencies to package.json and install them.
|
|
16737
|
+
*/
|
|
15249
16738
|
init = $command({
|
|
15250
16739
|
name: "init",
|
|
15251
16740
|
description: "Add missing Alepha configuration files to the project",
|
|
15252
16741
|
flags: t.object({
|
|
15253
|
-
yarn: t.boolean({ description: "Use Yarn package manager" }),
|
|
15254
|
-
api: t.boolean({ description: "Include Alepha Server dependencies" }),
|
|
15255
|
-
react: t.boolean({ description: "Include Alepha React dependencies" })
|
|
16742
|
+
yarn: t.optional(t.boolean({ description: "Use Yarn package manager" })),
|
|
16743
|
+
api: t.optional(t.boolean({ description: "Include Alepha Server dependencies" })),
|
|
16744
|
+
react: t.optional(t.boolean({ description: "Include Alepha React dependencies" })),
|
|
16745
|
+
orm: t.optional(t.boolean({ description: "Include Alepha ORM dependencies" }))
|
|
15256
16746
|
}),
|
|
15257
|
-
handler: async ({ run: run$1, flags }) => {
|
|
15258
|
-
|
|
15259
|
-
|
|
15260
|
-
|
|
15261
|
-
|
|
15262
|
-
|
|
16747
|
+
handler: async ({ run: run$1, flags, root }) => {
|
|
16748
|
+
await run$1("Ensuring Alepha configuration files", async () => {
|
|
16749
|
+
await this.utils.ensureTsConfig(root);
|
|
16750
|
+
await this.utils.ensurePackageJson(root, flags);
|
|
16751
|
+
});
|
|
16752
|
+
if (flags.yarn) {
|
|
16753
|
+
await this.utils.ensureYarn(root);
|
|
16754
|
+
await run$1("yarn install", { alias: "Installing dependencies with Yarn" });
|
|
16755
|
+
} else await run$1("npm install", { alias: "Installing dependencies with npm" });
|
|
15263
16756
|
}
|
|
15264
16757
|
});
|
|
15265
|
-
async ensureYarn(root) {
|
|
15266
|
-
const tsconfigPath = join(root, ".yarnrc.yml");
|
|
15267
|
-
try {
|
|
15268
|
-
await access(tsconfigPath);
|
|
15269
|
-
} catch {
|
|
15270
|
-
await writeFile(tsconfigPath, "nodeLinker: node-modules");
|
|
15271
|
-
}
|
|
15272
|
-
}
|
|
15273
|
-
generatePackageJsonContent(modes) {
|
|
15274
|
-
const dependencies = {
|
|
15275
|
-
"@alepha/core": `^${version}`,
|
|
15276
|
-
"@alepha/logger": `^${version}`,
|
|
15277
|
-
"@alepha/datetime": `^${version}`
|
|
15278
|
-
};
|
|
15279
|
-
const devDependencies = {
|
|
15280
|
-
alepha: `^${version}`,
|
|
15281
|
-
"@alepha/vite": `^${version}`
|
|
15282
|
-
};
|
|
15283
|
-
if (modes.api) {
|
|
15284
|
-
dependencies["@alepha/server"] = `^${version}`;
|
|
15285
|
-
dependencies["@alepha/server-swagger"] = `^${version}`;
|
|
15286
|
-
dependencies["@alepha/server-multipart"] = `^${version}`;
|
|
15287
|
-
}
|
|
15288
|
-
if (modes.react) {
|
|
15289
|
-
dependencies["@alepha/server"] = `^${version}`;
|
|
15290
|
-
dependencies["@alepha/server-links"] = `^${version}`;
|
|
15291
|
-
dependencies["@alepha/react"] = `^${version}`;
|
|
15292
|
-
dependencies.react = "^19.2.0";
|
|
15293
|
-
devDependencies["@types/react"] = "^19.0.0";
|
|
15294
|
-
}
|
|
15295
|
-
return {
|
|
15296
|
-
dependencies,
|
|
15297
|
-
devDependencies,
|
|
15298
|
-
scripts: {
|
|
15299
|
-
dev: "alepha dev",
|
|
15300
|
-
build: "alepha build"
|
|
15301
|
-
}
|
|
15302
|
-
};
|
|
15303
|
-
}
|
|
15304
|
-
async ensurePackageJson(root, modes) {
|
|
15305
|
-
const packageJsonPath = join(root, "package.json");
|
|
15306
|
-
try {
|
|
15307
|
-
await access(packageJsonPath);
|
|
15308
|
-
} catch (error) {
|
|
15309
|
-
this.log.info("No package.json found. Creating one...");
|
|
15310
|
-
await writeFile(packageJsonPath, JSON.stringify(this.generatePackageJsonContent(modes), null, 2));
|
|
15311
|
-
return;
|
|
15312
|
-
}
|
|
15313
|
-
const content = await readFile(packageJsonPath, "utf8");
|
|
15314
|
-
const packageJson$1 = JSON.parse(content);
|
|
15315
|
-
if (!packageJson$1.type || packageJson$1.type !== "module") packageJson$1.type = "module";
|
|
15316
|
-
const newPackageJson = this.generatePackageJsonContent(modes);
|
|
15317
|
-
packageJson$1.type = "module";
|
|
15318
|
-
packageJson$1.dependencies ??= {};
|
|
15319
|
-
packageJson$1.devDependencies ??= {};
|
|
15320
|
-
packageJson$1.scripts ??= {};
|
|
15321
|
-
Object.assign(packageJson$1.dependencies, newPackageJson.dependencies);
|
|
15322
|
-
Object.assign(packageJson$1.devDependencies, newPackageJson.devDependencies);
|
|
15323
|
-
Object.assign(packageJson$1.scripts, newPackageJson.scripts);
|
|
15324
|
-
await writeFile(packageJsonPath, JSON.stringify(packageJson$1, null, 2));
|
|
15325
|
-
}
|
|
15326
|
-
async ensureTsConfig(root = process.cwd()) {
|
|
15327
|
-
const tsconfigPath = join(root, "tsconfig.json");
|
|
15328
|
-
try {
|
|
15329
|
-
await access(tsconfigPath);
|
|
15330
|
-
} catch {
|
|
15331
|
-
this.log.info("Missing tsconfig.json detected. Creating one...");
|
|
15332
|
-
await writeFile(tsconfigPath, tsconfigJson);
|
|
15333
|
-
}
|
|
15334
|
-
}
|
|
15335
|
-
async downloadSampleProject(targetDir) {
|
|
15336
|
-
const response = await fetch("https://api.github.com/repos/feunard/alepha/tarball/main", { headers: { "User-Agent": "Alepha-CLI" } });
|
|
15337
|
-
if (!response.ok) throw new AlephaError(`Failed to download: ${response.statusText}`);
|
|
15338
|
-
await pipeline(Readable.fromWeb(response.body), tar.extract({
|
|
15339
|
-
cwd: targetDir,
|
|
15340
|
-
strip: 3,
|
|
15341
|
-
filter: (path) => {
|
|
15342
|
-
const parts = path.split("/");
|
|
15343
|
-
return parts.length >= 3 && parts[1] === "apps" && parts[2] === "starter";
|
|
15344
|
-
}
|
|
15345
|
-
}));
|
|
15346
|
-
}
|
|
15347
16758
|
};
|
|
15348
16759
|
|
|
15349
16760
|
//#endregion
|
|
@@ -15351,10 +16762,7 @@ var CoreCommands = class {
|
|
|
15351
16762
|
var DrizzleCommands = class {
|
|
15352
16763
|
log = $logger();
|
|
15353
16764
|
runner = $inject(ProcessRunner);
|
|
15354
|
-
|
|
15355
|
-
description: "Project root",
|
|
15356
|
-
default: "."
|
|
15357
|
-
}) });
|
|
16765
|
+
utils = $inject(ProjectUtils);
|
|
15358
16766
|
/**
|
|
15359
16767
|
* Check if database migrations are up to date
|
|
15360
16768
|
*
|
|
@@ -15368,15 +16776,14 @@ var DrizzleCommands = class {
|
|
|
15368
16776
|
check = $command({
|
|
15369
16777
|
name: "db:check-migrations",
|
|
15370
16778
|
description: "Verify database migration files are up to date",
|
|
15371
|
-
flags: this.flags,
|
|
15372
16779
|
args: t.optional(t.text({
|
|
15373
16780
|
title: "path",
|
|
15374
16781
|
description: "Path to the Alepha server entry file"
|
|
15375
16782
|
})),
|
|
15376
|
-
handler: async ({
|
|
15377
|
-
const rootDir =
|
|
16783
|
+
handler: async ({ args, root }) => {
|
|
16784
|
+
const rootDir = root;
|
|
15378
16785
|
this.log.debug(`Using project root: ${rootDir}`);
|
|
15379
|
-
const { alepha: alepha$1 } = await this.loadAlephaFromServerEntryFile(rootDir, args);
|
|
16786
|
+
const { alepha: alepha$1 } = await this.utils.loadAlephaFromServerEntryFile(rootDir, args);
|
|
15380
16787
|
const models = [];
|
|
15381
16788
|
const repositories = alepha$1.descriptors("repository");
|
|
15382
16789
|
const kit = createRequire(import.meta.url)("drizzle-kit/api");
|
|
@@ -15429,14 +16836,13 @@ var DrizzleCommands = class {
|
|
|
15429
16836
|
name: "db:generate",
|
|
15430
16837
|
description: "Generate migration files based on current database schema",
|
|
15431
16838
|
summary: false,
|
|
15432
|
-
flags: this.flags,
|
|
15433
16839
|
args: t.optional(t.text({
|
|
15434
16840
|
title: "path",
|
|
15435
16841
|
description: "Path to the Alepha server entry file"
|
|
15436
16842
|
})),
|
|
15437
|
-
handler: async ({
|
|
15438
|
-
await this.runDrizzleKitCommand({
|
|
15439
|
-
|
|
16843
|
+
handler: async ({ args, root }) => {
|
|
16844
|
+
await this.utils.runDrizzleKitCommand({
|
|
16845
|
+
root,
|
|
15440
16846
|
args,
|
|
15441
16847
|
command: "generate",
|
|
15442
16848
|
logMessage: (providerName, dialect) => `Generate '${providerName}' migrations (${dialect}) ...`
|
|
@@ -15455,14 +16861,13 @@ var DrizzleCommands = class {
|
|
|
15455
16861
|
name: "db:push",
|
|
15456
16862
|
description: "Push database schema changes directly to the database",
|
|
15457
16863
|
summary: false,
|
|
15458
|
-
flags: this.flags,
|
|
15459
16864
|
args: t.optional(t.text({
|
|
15460
16865
|
title: "path",
|
|
15461
16866
|
description: "Path to the Alepha server entry file"
|
|
15462
16867
|
})),
|
|
15463
|
-
handler: async ({
|
|
15464
|
-
await this.runDrizzleKitCommand({
|
|
15465
|
-
|
|
16868
|
+
handler: async ({ root, args }) => {
|
|
16869
|
+
await this.utils.runDrizzleKitCommand({
|
|
16870
|
+
root,
|
|
15466
16871
|
args,
|
|
15467
16872
|
command: "push",
|
|
15468
16873
|
logMessage: (providerName, dialect) => `Push '${providerName}' schema (${dialect}) ...`
|
|
@@ -15481,14 +16886,13 @@ var DrizzleCommands = class {
|
|
|
15481
16886
|
name: "db:migrate",
|
|
15482
16887
|
description: "Apply pending database migrations",
|
|
15483
16888
|
summary: false,
|
|
15484
|
-
flags: this.flags,
|
|
15485
16889
|
args: t.optional(t.text({
|
|
15486
16890
|
title: "path",
|
|
15487
16891
|
description: "Path to the Alepha server entry file"
|
|
15488
16892
|
})),
|
|
15489
|
-
handler: async ({
|
|
15490
|
-
await this.runDrizzleKitCommand({
|
|
15491
|
-
|
|
16893
|
+
handler: async ({ root, args }) => {
|
|
16894
|
+
await this.utils.runDrizzleKitCommand({
|
|
16895
|
+
root,
|
|
15492
16896
|
args,
|
|
15493
16897
|
command: "migrate",
|
|
15494
16898
|
logMessage: (providerName, dialect) => `Migrate '${providerName}' database (${dialect}) ...`
|
|
@@ -15507,108 +16911,25 @@ var DrizzleCommands = class {
|
|
|
15507
16911
|
name: "db:studio",
|
|
15508
16912
|
description: "Launch Drizzle Studio database browser",
|
|
15509
16913
|
summary: false,
|
|
15510
|
-
flags: this.flags,
|
|
15511
16914
|
args: t.optional(t.text({
|
|
15512
16915
|
title: "path",
|
|
15513
16916
|
description: "Path to the Alepha server entry file"
|
|
15514
16917
|
})),
|
|
15515
|
-
handler: async ({
|
|
15516
|
-
await this.runDrizzleKitCommand({
|
|
15517
|
-
|
|
16918
|
+
handler: async ({ root, args }) => {
|
|
16919
|
+
await this.utils.runDrizzleKitCommand({
|
|
16920
|
+
root,
|
|
15518
16921
|
args,
|
|
15519
16922
|
command: "studio",
|
|
15520
16923
|
logMessage: (providerName, dialect) => `Launch Studio for '${providerName}' (${dialect}) ...`
|
|
15521
16924
|
});
|
|
15522
16925
|
}
|
|
15523
16926
|
});
|
|
15524
|
-
/**
|
|
15525
|
-
* Run a drizzle-kit command for all database providers
|
|
15526
|
-
*/
|
|
15527
|
-
async runDrizzleKitCommand(options) {
|
|
15528
|
-
const rootDir = join(process.cwd(), options.flags.root);
|
|
15529
|
-
this.log.debug(`Using project root: ${rootDir}`);
|
|
15530
|
-
const { alepha: alepha$1, entry } = await this.loadAlephaFromServerEntryFile(rootDir, options.args);
|
|
15531
|
-
const kit = this.getKitFromAlepha(alepha$1);
|
|
15532
|
-
const repositoryProvider = alepha$1.inject("RepositoryProvider");
|
|
15533
|
-
const accepted = /* @__PURE__ */ new Set([]);
|
|
15534
|
-
for (const descriptor of repositoryProvider.getRepositories()) {
|
|
15535
|
-
const provider = descriptor.provider;
|
|
15536
|
-
const providerName = provider.name;
|
|
15537
|
-
const dialect = provider.dialect;
|
|
15538
|
-
if (accepted.has(providerName)) continue;
|
|
15539
|
-
accepted.add(providerName);
|
|
15540
|
-
this.log.info("");
|
|
15541
|
-
this.log.info(options.logMessage(providerName, dialect));
|
|
15542
|
-
const drizzleConfigJsPath = await this.prepareDrizzleConfig({
|
|
15543
|
-
kit,
|
|
15544
|
-
provider,
|
|
15545
|
-
providerName,
|
|
15546
|
-
providerUrl: provider.url,
|
|
15547
|
-
dialect,
|
|
15548
|
-
entry,
|
|
15549
|
-
rootDir
|
|
15550
|
-
});
|
|
15551
|
-
await this.runner.exec(`drizzle-kit ${options.command} --config=${drizzleConfigJsPath}`);
|
|
15552
|
-
}
|
|
15553
|
-
}
|
|
15554
|
-
/**
|
|
15555
|
-
* Prepare Drizzle configuration files for a provider
|
|
15556
|
-
*/
|
|
15557
|
-
async prepareDrizzleConfig(options) {
|
|
15558
|
-
const models = Object.keys(options.kit.getModels(options.provider));
|
|
15559
|
-
const entitiesJs = this.generateEntitiesJs(options.entry, options.providerName, models);
|
|
15560
|
-
const config = {
|
|
15561
|
-
schema: await this.runner.writeConfigFile("entities.js", entitiesJs, options.rootDir),
|
|
15562
|
-
out: `./migrations/${options.providerName}`,
|
|
15563
|
-
dialect: options.dialect,
|
|
15564
|
-
dbCredentials: { url: options.providerUrl }
|
|
15565
|
-
};
|
|
15566
|
-
if (options.providerName === "pglite") config.driver = "pglite";
|
|
15567
|
-
const drizzleConfigJs = "export default " + JSON.stringify(config, null, 2);
|
|
15568
|
-
return await this.runner.writeConfigFile("drizzle.config.js", drizzleConfigJs, options.rootDir);
|
|
15569
|
-
}
|
|
15570
|
-
/**
|
|
15571
|
-
* Get DrizzleKitProvider from Alepha instance
|
|
15572
|
-
*/
|
|
15573
|
-
getKitFromAlepha(alepha$1) {
|
|
15574
|
-
return alepha$1["registry"].values().find((it) => it.instance.constructor.name === "DrizzleKitProvider")?.instance;
|
|
15575
|
-
}
|
|
15576
|
-
async loadAlephaFromServerEntryFile(rootDir, explicitEntry) {
|
|
15577
|
-
process.env.ALEPHA_SKIP_START = "true";
|
|
15578
|
-
const entry = await boot.getServerEntry(rootDir, explicitEntry);
|
|
15579
|
-
const mod = await tsImport(entry, { parentURL: import.meta.url });
|
|
15580
|
-
this.log.debug(`Load entry: ${entry}`);
|
|
15581
|
-
if (mod.default instanceof Alepha) return {
|
|
15582
|
-
alepha: mod.default,
|
|
15583
|
-
entry
|
|
15584
|
-
};
|
|
15585
|
-
const g = global;
|
|
15586
|
-
if (g.__alepha) return {
|
|
15587
|
-
alepha: g.__alepha,
|
|
15588
|
-
entry
|
|
15589
|
-
};
|
|
15590
|
-
throw new AlephaError(`Could not find Alepha instance in entry file: ${entry}`);
|
|
15591
|
-
}
|
|
15592
|
-
generateEntitiesJs(entry, provider, models = []) {
|
|
15593
|
-
return `
|
|
15594
|
-
import "${entry}";
|
|
15595
|
-
import { DrizzleKitProvider, Repository } from "@alepha/postgres";
|
|
15596
|
-
|
|
15597
|
-
const alepha = globalThis.__alepha;
|
|
15598
|
-
const kit = alepha.inject(DrizzleKitProvider);
|
|
15599
|
-
const provider = alepha.services(Repository).find((it) => it.provider.name === "${provider}").provider;
|
|
15600
|
-
const models = kit.getModels(provider);
|
|
15601
|
-
|
|
15602
|
-
${models.map((it) => `export const ${it} = models["${it}"];`).join("\n")}
|
|
15603
|
-
|
|
15604
|
-
`.trim();
|
|
15605
|
-
}
|
|
15606
16927
|
};
|
|
15607
16928
|
|
|
15608
16929
|
//#endregion
|
|
15609
16930
|
//#region src/commands/VerifyCommands.ts
|
|
15610
16931
|
var VerifyCommands = class {
|
|
15611
|
-
|
|
16932
|
+
processRunner = $inject(ProcessRunner);
|
|
15612
16933
|
/**
|
|
15613
16934
|
* Run a series of verification commands to ensure code quality and correctness.
|
|
15614
16935
|
*
|
|
@@ -15641,32 +16962,17 @@ var VerifyCommands = class {
|
|
|
15641
16962
|
name: "typecheck",
|
|
15642
16963
|
description: "Check TypeScript types across the codebase",
|
|
15643
16964
|
handler: async () => {
|
|
15644
|
-
await this.
|
|
16965
|
+
await this.processRunner.exec("tsc --noEmit");
|
|
15645
16966
|
}
|
|
15646
16967
|
});
|
|
15647
16968
|
};
|
|
15648
16969
|
|
|
15649
|
-
//#endregion
|
|
15650
|
-
//#region src/assets/viteConfigTs.ts
|
|
15651
|
-
const viteConfigTs = (serverEntry) => `
|
|
15652
|
-
import { viteAlepha } from "@alepha/vite";
|
|
15653
|
-
|
|
15654
|
-
export default {
|
|
15655
|
-
plugins: [
|
|
15656
|
-
viteAlepha(${serverEntry ? `{ serverEntry: "${serverEntry}" }` : ""}),
|
|
15657
|
-
],
|
|
15658
|
-
test: {
|
|
15659
|
-
globals: true,
|
|
15660
|
-
},
|
|
15661
|
-
};
|
|
15662
|
-
`.trim();
|
|
15663
|
-
|
|
15664
16970
|
//#endregion
|
|
15665
16971
|
//#region src/commands/ViteCommands.ts
|
|
15666
16972
|
var ViteCommands = class {
|
|
15667
16973
|
log = $logger();
|
|
15668
16974
|
runner = $inject(ProcessRunner);
|
|
15669
|
-
|
|
16975
|
+
utils = $inject(ProjectUtils);
|
|
15670
16976
|
run = $command({
|
|
15671
16977
|
name: "run",
|
|
15672
16978
|
description: "Run a TypeScript file directly",
|
|
@@ -15679,8 +16985,8 @@ var ViteCommands = class {
|
|
|
15679
16985
|
title: "path",
|
|
15680
16986
|
description: "Filepath to run"
|
|
15681
16987
|
}),
|
|
15682
|
-
handler: async ({ args, flags }) => {
|
|
15683
|
-
await this.
|
|
16988
|
+
handler: async ({ args, flags, root }) => {
|
|
16989
|
+
await this.utils.ensureTsConfig(root);
|
|
15684
16990
|
await this.runner.exec(`tsx ${flags.watch ? "watch " : ""}${args}`);
|
|
15685
16991
|
}
|
|
15686
16992
|
});
|
|
@@ -15697,18 +17003,19 @@ var ViteCommands = class {
|
|
|
15697
17003
|
title: "path",
|
|
15698
17004
|
description: "Filepath to run"
|
|
15699
17005
|
})),
|
|
15700
|
-
handler: async ({ args }) => {
|
|
15701
|
-
|
|
15702
|
-
await this.
|
|
15703
|
-
await this.ensurePackageJson(root);
|
|
17006
|
+
handler: async ({ args, root }) => {
|
|
17007
|
+
await this.utils.ensureTsConfig(root);
|
|
17008
|
+
await this.utils.ensurePackageJsonModule(root);
|
|
15704
17009
|
const entry = await boot.getServerEntry(root, args);
|
|
15705
17010
|
this.log.trace("Entry file found", { entry });
|
|
15706
17011
|
try {
|
|
15707
17012
|
await access(join(root, "index.html"));
|
|
15708
17013
|
} catch {
|
|
15709
17014
|
this.log.trace("No index.html found, running entry file with tsx");
|
|
17015
|
+
await this.runner.exec(`tsx watch ${entry}`);
|
|
17016
|
+
return;
|
|
15710
17017
|
}
|
|
15711
|
-
const configPath = await this.
|
|
17018
|
+
const configPath = await this.utils.getViteConfigPath(root, args ? entry : void 0);
|
|
15712
17019
|
this.log.trace("Vite config found", { configPath });
|
|
15713
17020
|
await this.runner.exec(`vite -c=${configPath}`);
|
|
15714
17021
|
}
|
|
@@ -15729,15 +17036,15 @@ var ViteCommands = class {
|
|
|
15729
17036
|
}),
|
|
15730
17037
|
handler: async ({ flags, args }) => {
|
|
15731
17038
|
const root = process.cwd();
|
|
15732
|
-
await this.
|
|
15733
|
-
await this.
|
|
17039
|
+
await this.utils.ensureTsConfig(root);
|
|
17040
|
+
await this.utils.ensurePackageJsonModule(root);
|
|
15734
17041
|
const entry = await boot.getServerEntry(root, args);
|
|
15735
17042
|
this.log.trace("Entry file found", { entry });
|
|
15736
17043
|
await rm("dist", {
|
|
15737
17044
|
recursive: true,
|
|
15738
17045
|
force: true
|
|
15739
17046
|
});
|
|
15740
|
-
const configPath = await this.
|
|
17047
|
+
const configPath = await this.utils.getViteConfigPath(root, args ? entry : void 0);
|
|
15741
17048
|
const env = {};
|
|
15742
17049
|
if (flags.stats) env.ALEPHA_BUILD_STATS = "true";
|
|
15743
17050
|
await this.runner.exec(`vite build -c=${configPath}`, env);
|
|
@@ -15746,35 +17053,12 @@ var ViteCommands = class {
|
|
|
15746
17053
|
test = $command({
|
|
15747
17054
|
name: "test",
|
|
15748
17055
|
description: "Run tests using Vitest",
|
|
15749
|
-
handler: async () => {
|
|
15750
|
-
await this.
|
|
15751
|
-
const configPath = await this.
|
|
17056
|
+
handler: async ({ root }) => {
|
|
17057
|
+
await this.utils.ensureTsConfig(root);
|
|
17058
|
+
const configPath = await this.utils.getViteConfigPath(root);
|
|
15752
17059
|
await this.runner.exec(`vitest run -c=${configPath}`);
|
|
15753
17060
|
}
|
|
15754
17061
|
});
|
|
15755
|
-
async configPath(root = process.cwd(), serverEntry) {
|
|
15756
|
-
try {
|
|
15757
|
-
const viteConfigPath = join(root, "vite.config.ts");
|
|
15758
|
-
await access(viteConfigPath);
|
|
15759
|
-
return viteConfigPath;
|
|
15760
|
-
} catch {
|
|
15761
|
-
return this.runner.writeConfigFile("vite.config.ts", viteConfigTs(serverEntry));
|
|
15762
|
-
}
|
|
15763
|
-
}
|
|
15764
|
-
async ensurePackageJson(root = process.cwd()) {
|
|
15765
|
-
const packageJsonPath = join(root, "package.json");
|
|
15766
|
-
try {
|
|
15767
|
-
await access(packageJsonPath);
|
|
15768
|
-
} catch (error) {
|
|
15769
|
-
throw new AlephaError("No package.json found in project root. Run 'npx alepha init' to create one.");
|
|
15770
|
-
}
|
|
15771
|
-
const content = await readFile(packageJsonPath, "utf8");
|
|
15772
|
-
const packageJson$1 = JSON.parse(content);
|
|
15773
|
-
if (!packageJson$1.type || packageJson$1.type !== "module") {
|
|
15774
|
-
packageJson$1.type = "module";
|
|
15775
|
-
await writeFile(packageJsonPath, JSON.stringify(packageJson$1, null, 2));
|
|
15776
|
-
}
|
|
15777
|
-
}
|
|
15778
17062
|
};
|
|
15779
17063
|
|
|
15780
17064
|
//#endregion
|