alepha 0.11.10 → 0.11.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -0
- package/dist/index.cjs +1692 -408
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +1622 -338
- package/dist/index.js.map +1 -1
- package/package.json +12 -6
- package/src/assets/tsconfigJson.ts +1 -0
- package/src/commands/BiomeCommands.ts +5 -24
- package/src/commands/CoreCommands.ts +41 -138
- package/src/commands/DrizzleCommands.ts +18 -193
- package/src/commands/VerifyCommands.ts +2 -6
- package/src/commands/ViteCommands.ts +24 -57
- package/src/services/ProjectUtils.ts +508 -0
package/dist/index.cjs
CHANGED
|
@@ -45,12 +45,13 @@ let node_process = require("node:process");
|
|
|
45
45
|
let node_readline_promises = require("node:readline/promises");
|
|
46
46
|
let node_child_process = require("node:child_process");
|
|
47
47
|
let node_stream = require("node:stream");
|
|
48
|
-
let
|
|
48
|
+
let node_stream_promises = require("node:stream/promises");
|
|
49
|
+
let node_fs = require("node:fs");
|
|
50
|
+
let node_url = require("node:url");
|
|
49
51
|
let tar = require("tar");
|
|
50
52
|
tar = __toESM(tar);
|
|
51
|
-
let node_fs = require("node:fs");
|
|
52
|
-
let node_module = require("node:module");
|
|
53
53
|
let tsx_esm_api = require("tsx/esm/api");
|
|
54
|
+
let node_module = require("node:module");
|
|
54
55
|
|
|
55
56
|
//#region ../core/src/constants/KIND.ts
|
|
56
57
|
/**
|
|
@@ -14798,7 +14799,8 @@ var CliProvider = class {
|
|
|
14798
14799
|
run: runner.run,
|
|
14799
14800
|
ask: this.asker.ask,
|
|
14800
14801
|
fs: node_fs_promises,
|
|
14801
|
-
glob: node_fs_promises.glob
|
|
14802
|
+
glob: node_fs_promises.glob,
|
|
14803
|
+
root: process.cwd()
|
|
14802
14804
|
};
|
|
14803
14805
|
await command.options.handler(args);
|
|
14804
14806
|
if (command.options.summary !== false) runner.summary();
|
|
@@ -15009,42 +15011,6 @@ const AlephaCommand = $module({
|
|
|
15009
15011
|
]
|
|
15010
15012
|
});
|
|
15011
15013
|
|
|
15012
|
-
//#endregion
|
|
15013
|
-
//#region src/assets/biomeJson.ts
|
|
15014
|
-
const biomeJson = `
|
|
15015
|
-
{
|
|
15016
|
-
"$schema": "https://biomejs.dev/schemas/latest/schema.json",
|
|
15017
|
-
"vcs": {
|
|
15018
|
-
"enabled": true,
|
|
15019
|
-
"clientKind": "git"
|
|
15020
|
-
},
|
|
15021
|
-
"files": {
|
|
15022
|
-
"ignoreUnknown": true,
|
|
15023
|
-
"includes": ["**", "!node_modules", "!dist"]
|
|
15024
|
-
},
|
|
15025
|
-
"formatter": {
|
|
15026
|
-
"enabled": true,
|
|
15027
|
-
"indentStyle": "space"
|
|
15028
|
-
},
|
|
15029
|
-
"linter": {
|
|
15030
|
-
"enabled": true,
|
|
15031
|
-
"rules": {
|
|
15032
|
-
"recommended": true
|
|
15033
|
-
},
|
|
15034
|
-
"domains": {
|
|
15035
|
-
"react": "recommended"
|
|
15036
|
-
}
|
|
15037
|
-
},
|
|
15038
|
-
"assist": {
|
|
15039
|
-
"actions": {
|
|
15040
|
-
"source": {
|
|
15041
|
-
"organizeImports": "on"
|
|
15042
|
-
}
|
|
15043
|
-
}
|
|
15044
|
-
}
|
|
15045
|
-
}
|
|
15046
|
-
`.trim();
|
|
15047
|
-
|
|
15048
15014
|
//#endregion
|
|
15049
15015
|
//#region src/services/ProcessRunner.ts
|
|
15050
15016
|
/**
|
|
@@ -15114,207 +15080,1322 @@ var ProcessRunner = class {
|
|
|
15114
15080
|
};
|
|
15115
15081
|
|
|
15116
15082
|
//#endregion
|
|
15117
|
-
//#region src/
|
|
15118
|
-
|
|
15119
|
-
|
|
15120
|
-
|
|
15121
|
-
|
|
15122
|
-
format = $command({
|
|
15123
|
-
name: "format",
|
|
15124
|
-
description: "Format the codebase using Biome",
|
|
15125
|
-
flags: this.biomeFlags,
|
|
15126
|
-
handler: async ({ flags }) => {
|
|
15127
|
-
const configPath = await this.configPath(flags.config);
|
|
15128
|
-
await this.runner.exec(`biome format --fix --config-path=${configPath}`);
|
|
15129
|
-
}
|
|
15130
|
-
});
|
|
15131
|
-
lint = $command({
|
|
15132
|
-
name: "lint",
|
|
15133
|
-
description: "Run linter across the codebase using Biome",
|
|
15134
|
-
flags: this.biomeFlags,
|
|
15135
|
-
handler: async ({ flags }) => {
|
|
15136
|
-
const configPath = await this.configPath(flags.config);
|
|
15137
|
-
await this.runner.exec(`biome check --formatter-enabled=false --fix --config-path=${configPath}`);
|
|
15138
|
-
}
|
|
15139
|
-
});
|
|
15140
|
-
async configPath(maybePath) {
|
|
15141
|
-
const root = process.cwd();
|
|
15142
|
-
if (maybePath) try {
|
|
15143
|
-
const path = (0, node_path.join)(root, maybePath);
|
|
15144
|
-
await (0, node_fs_promises.access)(path);
|
|
15145
|
-
return path;
|
|
15146
|
-
} catch {}
|
|
15147
|
-
try {
|
|
15148
|
-
const path = (0, node_path.join)(root, "biome.json");
|
|
15149
|
-
await (0, node_fs_promises.access)(path);
|
|
15150
|
-
return path;
|
|
15151
|
-
} catch {
|
|
15152
|
-
return await this.runner.writeConfigFile("biome.json", biomeJson);
|
|
15153
|
-
}
|
|
15154
|
-
}
|
|
15155
|
-
};
|
|
15156
|
-
|
|
15157
|
-
//#endregion
|
|
15158
|
-
//#region src/assets/tsconfigJson.ts
|
|
15159
|
-
const tsconfigJson = `
|
|
15160
|
-
{
|
|
15161
|
-
"compilerOptions": {
|
|
15162
|
-
"module": "nodenext",
|
|
15163
|
-
"target": "esnext",
|
|
15164
|
-
"strict": true,
|
|
15165
|
-
"jsx": "react-jsx",
|
|
15166
|
-
"verbatimModuleSyntax": true,
|
|
15167
|
-
"isolatedModules": true,
|
|
15168
|
-
"moduleDetection": "force",
|
|
15169
|
-
"skipLibCheck": true,
|
|
15170
|
-
"resolveJsonModule": true,
|
|
15171
|
-
"allowImportingTsExtensions": true
|
|
15172
|
-
},
|
|
15173
|
-
"exclude": ["node_modules", "dist"]
|
|
15174
|
-
}
|
|
15175
|
-
`.trim();
|
|
15176
|
-
|
|
15177
|
-
//#endregion
|
|
15178
|
-
//#region src/version.ts
|
|
15179
|
-
const packageJson = JSON.parse((0, node_fs.readFileSync)(new URL("../package.json", require("url").pathToFileURL(__filename).href), "utf-8"));
|
|
15180
|
-
const version = packageJson.version;
|
|
15083
|
+
//#region ../file/src/providers/FileSystemProvider.ts
|
|
15084
|
+
/**
|
|
15085
|
+
* FileSystem interface providing utilities for working with files.
|
|
15086
|
+
*/
|
|
15087
|
+
var FileSystemProvider = class {};
|
|
15181
15088
|
|
|
15182
15089
|
//#endregion
|
|
15183
|
-
//#region src/
|
|
15184
|
-
|
|
15185
|
-
|
|
15186
|
-
|
|
15187
|
-
|
|
15188
|
-
|
|
15189
|
-
|
|
15190
|
-
|
|
15191
|
-
|
|
15192
|
-
|
|
15193
|
-
|
|
15194
|
-
|
|
15195
|
-
|
|
15196
|
-
|
|
15090
|
+
//#region ../file/src/services/FileDetector.ts
|
|
15091
|
+
/**
|
|
15092
|
+
* Service for detecting file types and getting content types.
|
|
15093
|
+
*
|
|
15094
|
+
* @example
|
|
15095
|
+
* ```typescript
|
|
15096
|
+
* const detector = alepha.inject(FileDetector);
|
|
15097
|
+
*
|
|
15098
|
+
* // Get content type from filename
|
|
15099
|
+
* const mimeType = detector.getContentType("image.png"); // "image/png"
|
|
15100
|
+
*
|
|
15101
|
+
* // Detect file type by magic bytes
|
|
15102
|
+
* const stream = createReadStream('image.png');
|
|
15103
|
+
* const result = await detector.detectFileType(stream, 'image.png');
|
|
15104
|
+
* console.log(result.mimeType); // 'image/png'
|
|
15105
|
+
* console.log(result.verified); // true if magic bytes match
|
|
15106
|
+
* ```
|
|
15107
|
+
*/
|
|
15108
|
+
var FileDetector = class FileDetector {
|
|
15109
|
+
/**
|
|
15110
|
+
* Magic byte signatures for common file formats.
|
|
15111
|
+
* Each signature is represented as an array of bytes or null (wildcard).
|
|
15112
|
+
*/
|
|
15113
|
+
static MAGIC_BYTES = {
|
|
15114
|
+
png: [{
|
|
15115
|
+
signature: [
|
|
15116
|
+
137,
|
|
15117
|
+
80,
|
|
15118
|
+
78,
|
|
15119
|
+
71,
|
|
15120
|
+
13,
|
|
15121
|
+
10,
|
|
15122
|
+
26,
|
|
15123
|
+
10
|
|
15124
|
+
],
|
|
15125
|
+
mimeType: "image/png"
|
|
15126
|
+
}],
|
|
15127
|
+
jpg: [
|
|
15128
|
+
{
|
|
15129
|
+
signature: [
|
|
15130
|
+
255,
|
|
15131
|
+
216,
|
|
15132
|
+
255,
|
|
15133
|
+
224
|
|
15134
|
+
],
|
|
15135
|
+
mimeType: "image/jpeg"
|
|
15136
|
+
},
|
|
15137
|
+
{
|
|
15138
|
+
signature: [
|
|
15139
|
+
255,
|
|
15140
|
+
216,
|
|
15141
|
+
255,
|
|
15142
|
+
225
|
|
15143
|
+
],
|
|
15144
|
+
mimeType: "image/jpeg"
|
|
15145
|
+
},
|
|
15146
|
+
{
|
|
15147
|
+
signature: [
|
|
15148
|
+
255,
|
|
15149
|
+
216,
|
|
15150
|
+
255,
|
|
15151
|
+
226
|
|
15152
|
+
],
|
|
15153
|
+
mimeType: "image/jpeg"
|
|
15154
|
+
},
|
|
15155
|
+
{
|
|
15156
|
+
signature: [
|
|
15157
|
+
255,
|
|
15158
|
+
216,
|
|
15159
|
+
255,
|
|
15160
|
+
227
|
|
15161
|
+
],
|
|
15162
|
+
mimeType: "image/jpeg"
|
|
15163
|
+
},
|
|
15164
|
+
{
|
|
15165
|
+
signature: [
|
|
15166
|
+
255,
|
|
15167
|
+
216,
|
|
15168
|
+
255,
|
|
15169
|
+
232
|
|
15170
|
+
],
|
|
15171
|
+
mimeType: "image/jpeg"
|
|
15197
15172
|
}
|
|
15198
|
-
|
|
15199
|
-
|
|
15200
|
-
|
|
15201
|
-
|
|
15202
|
-
|
|
15203
|
-
|
|
15204
|
-
|
|
15205
|
-
|
|
15206
|
-
|
|
15207
|
-
|
|
15208
|
-
|
|
15209
|
-
|
|
15210
|
-
|
|
15211
|
-
|
|
15212
|
-
|
|
15213
|
-
|
|
15214
|
-
|
|
15215
|
-
|
|
15216
|
-
|
|
15217
|
-
|
|
15218
|
-
|
|
15219
|
-
|
|
15220
|
-
|
|
15221
|
-
|
|
15222
|
-
|
|
15223
|
-
|
|
15224
|
-
|
|
15225
|
-
|
|
15226
|
-
|
|
15173
|
+
],
|
|
15174
|
+
jpeg: [
|
|
15175
|
+
{
|
|
15176
|
+
signature: [
|
|
15177
|
+
255,
|
|
15178
|
+
216,
|
|
15179
|
+
255,
|
|
15180
|
+
224
|
|
15181
|
+
],
|
|
15182
|
+
mimeType: "image/jpeg"
|
|
15183
|
+
},
|
|
15184
|
+
{
|
|
15185
|
+
signature: [
|
|
15186
|
+
255,
|
|
15187
|
+
216,
|
|
15188
|
+
255,
|
|
15189
|
+
225
|
|
15190
|
+
],
|
|
15191
|
+
mimeType: "image/jpeg"
|
|
15192
|
+
},
|
|
15193
|
+
{
|
|
15194
|
+
signature: [
|
|
15195
|
+
255,
|
|
15196
|
+
216,
|
|
15197
|
+
255,
|
|
15198
|
+
226
|
|
15199
|
+
],
|
|
15200
|
+
mimeType: "image/jpeg"
|
|
15201
|
+
},
|
|
15202
|
+
{
|
|
15203
|
+
signature: [
|
|
15204
|
+
255,
|
|
15205
|
+
216,
|
|
15206
|
+
255,
|
|
15207
|
+
227
|
|
15208
|
+
],
|
|
15209
|
+
mimeType: "image/jpeg"
|
|
15210
|
+
},
|
|
15211
|
+
{
|
|
15212
|
+
signature: [
|
|
15213
|
+
255,
|
|
15214
|
+
216,
|
|
15215
|
+
255,
|
|
15216
|
+
232
|
|
15217
|
+
],
|
|
15218
|
+
mimeType: "image/jpeg"
|
|
15227
15219
|
}
|
|
15228
|
-
|
|
15229
|
-
|
|
15230
|
-
|
|
15231
|
-
|
|
15232
|
-
|
|
15220
|
+
],
|
|
15221
|
+
gif: [{
|
|
15222
|
+
signature: [
|
|
15223
|
+
71,
|
|
15224
|
+
73,
|
|
15225
|
+
70,
|
|
15226
|
+
56,
|
|
15227
|
+
55,
|
|
15228
|
+
97
|
|
15229
|
+
],
|
|
15230
|
+
mimeType: "image/gif"
|
|
15231
|
+
}, {
|
|
15232
|
+
signature: [
|
|
15233
|
+
71,
|
|
15234
|
+
73,
|
|
15235
|
+
70,
|
|
15236
|
+
56,
|
|
15237
|
+
57,
|
|
15238
|
+
97
|
|
15239
|
+
],
|
|
15240
|
+
mimeType: "image/gif"
|
|
15241
|
+
}],
|
|
15242
|
+
webp: [{
|
|
15243
|
+
signature: [
|
|
15244
|
+
82,
|
|
15245
|
+
73,
|
|
15246
|
+
70,
|
|
15247
|
+
70,
|
|
15248
|
+
null,
|
|
15249
|
+
null,
|
|
15250
|
+
null,
|
|
15251
|
+
null,
|
|
15252
|
+
87,
|
|
15253
|
+
69,
|
|
15254
|
+
66,
|
|
15255
|
+
80
|
|
15256
|
+
],
|
|
15257
|
+
mimeType: "image/webp"
|
|
15258
|
+
}],
|
|
15259
|
+
bmp: [{
|
|
15260
|
+
signature: [66, 77],
|
|
15261
|
+
mimeType: "image/bmp"
|
|
15262
|
+
}],
|
|
15263
|
+
ico: [{
|
|
15264
|
+
signature: [
|
|
15265
|
+
0,
|
|
15266
|
+
0,
|
|
15267
|
+
1,
|
|
15268
|
+
0
|
|
15269
|
+
],
|
|
15270
|
+
mimeType: "image/x-icon"
|
|
15271
|
+
}],
|
|
15272
|
+
tiff: [{
|
|
15273
|
+
signature: [
|
|
15274
|
+
73,
|
|
15275
|
+
73,
|
|
15276
|
+
42,
|
|
15277
|
+
0
|
|
15278
|
+
],
|
|
15279
|
+
mimeType: "image/tiff"
|
|
15280
|
+
}, {
|
|
15281
|
+
signature: [
|
|
15282
|
+
77,
|
|
15283
|
+
77,
|
|
15284
|
+
0,
|
|
15285
|
+
42
|
|
15286
|
+
],
|
|
15287
|
+
mimeType: "image/tiff"
|
|
15288
|
+
}],
|
|
15289
|
+
tif: [{
|
|
15290
|
+
signature: [
|
|
15291
|
+
73,
|
|
15292
|
+
73,
|
|
15293
|
+
42,
|
|
15294
|
+
0
|
|
15295
|
+
],
|
|
15296
|
+
mimeType: "image/tiff"
|
|
15297
|
+
}, {
|
|
15298
|
+
signature: [
|
|
15299
|
+
77,
|
|
15300
|
+
77,
|
|
15301
|
+
0,
|
|
15302
|
+
42
|
|
15303
|
+
],
|
|
15304
|
+
mimeType: "image/tiff"
|
|
15305
|
+
}],
|
|
15306
|
+
pdf: [{
|
|
15307
|
+
signature: [
|
|
15308
|
+
37,
|
|
15309
|
+
80,
|
|
15310
|
+
68,
|
|
15311
|
+
70,
|
|
15312
|
+
45
|
|
15313
|
+
],
|
|
15314
|
+
mimeType: "application/pdf"
|
|
15315
|
+
}],
|
|
15316
|
+
zip: [
|
|
15317
|
+
{
|
|
15318
|
+
signature: [
|
|
15319
|
+
80,
|
|
15320
|
+
75,
|
|
15321
|
+
3,
|
|
15322
|
+
4
|
|
15323
|
+
],
|
|
15324
|
+
mimeType: "application/zip"
|
|
15325
|
+
},
|
|
15326
|
+
{
|
|
15327
|
+
signature: [
|
|
15328
|
+
80,
|
|
15329
|
+
75,
|
|
15330
|
+
5,
|
|
15331
|
+
6
|
|
15332
|
+
],
|
|
15333
|
+
mimeType: "application/zip"
|
|
15334
|
+
},
|
|
15335
|
+
{
|
|
15336
|
+
signature: [
|
|
15337
|
+
80,
|
|
15338
|
+
75,
|
|
15339
|
+
7,
|
|
15340
|
+
8
|
|
15341
|
+
],
|
|
15342
|
+
mimeType: "application/zip"
|
|
15233
15343
|
}
|
|
15234
|
-
|
|
15235
|
-
|
|
15236
|
-
|
|
15237
|
-
|
|
15238
|
-
|
|
15239
|
-
|
|
15240
|
-
|
|
15241
|
-
|
|
15242
|
-
|
|
15243
|
-
|
|
15244
|
-
|
|
15245
|
-
|
|
15246
|
-
|
|
15247
|
-
|
|
15248
|
-
|
|
15249
|
-
|
|
15250
|
-
|
|
15251
|
-
|
|
15252
|
-
|
|
15253
|
-
|
|
15254
|
-
|
|
15255
|
-
|
|
15256
|
-
|
|
15257
|
-
|
|
15258
|
-
|
|
15259
|
-
|
|
15260
|
-
|
|
15261
|
-
|
|
15262
|
-
|
|
15263
|
-
|
|
15264
|
-
|
|
15265
|
-
|
|
15266
|
-
|
|
15267
|
-
|
|
15268
|
-
|
|
15269
|
-
|
|
15270
|
-
|
|
15271
|
-
|
|
15272
|
-
|
|
15273
|
-
|
|
15274
|
-
|
|
15275
|
-
|
|
15276
|
-
|
|
15277
|
-
|
|
15278
|
-
|
|
15279
|
-
|
|
15280
|
-
|
|
15281
|
-
|
|
15282
|
-
|
|
15283
|
-
|
|
15284
|
-
|
|
15285
|
-
|
|
15286
|
-
|
|
15287
|
-
|
|
15288
|
-
|
|
15289
|
-
|
|
15290
|
-
|
|
15291
|
-
|
|
15292
|
-
|
|
15293
|
-
|
|
15294
|
-
|
|
15295
|
-
devDependencies["@types/react"] = "^19.0.0";
|
|
15296
|
-
}
|
|
15297
|
-
return {
|
|
15298
|
-
dependencies,
|
|
15299
|
-
devDependencies,
|
|
15300
|
-
scripts: {
|
|
15301
|
-
dev: "alepha dev",
|
|
15302
|
-
build: "alepha build"
|
|
15344
|
+
],
|
|
15345
|
+
rar: [{
|
|
15346
|
+
signature: [
|
|
15347
|
+
82,
|
|
15348
|
+
97,
|
|
15349
|
+
114,
|
|
15350
|
+
33,
|
|
15351
|
+
26,
|
|
15352
|
+
7
|
|
15353
|
+
],
|
|
15354
|
+
mimeType: "application/vnd.rar"
|
|
15355
|
+
}],
|
|
15356
|
+
"7z": [{
|
|
15357
|
+
signature: [
|
|
15358
|
+
55,
|
|
15359
|
+
122,
|
|
15360
|
+
188,
|
|
15361
|
+
175,
|
|
15362
|
+
39,
|
|
15363
|
+
28
|
|
15364
|
+
],
|
|
15365
|
+
mimeType: "application/x-7z-compressed"
|
|
15366
|
+
}],
|
|
15367
|
+
tar: [{
|
|
15368
|
+
signature: [
|
|
15369
|
+
117,
|
|
15370
|
+
115,
|
|
15371
|
+
116,
|
|
15372
|
+
97,
|
|
15373
|
+
114
|
|
15374
|
+
],
|
|
15375
|
+
mimeType: "application/x-tar"
|
|
15376
|
+
}],
|
|
15377
|
+
gz: [{
|
|
15378
|
+
signature: [31, 139],
|
|
15379
|
+
mimeType: "application/gzip"
|
|
15380
|
+
}],
|
|
15381
|
+
tgz: [{
|
|
15382
|
+
signature: [31, 139],
|
|
15383
|
+
mimeType: "application/gzip"
|
|
15384
|
+
}],
|
|
15385
|
+
mp3: [
|
|
15386
|
+
{
|
|
15387
|
+
signature: [255, 251],
|
|
15388
|
+
mimeType: "audio/mpeg"
|
|
15389
|
+
},
|
|
15390
|
+
{
|
|
15391
|
+
signature: [255, 243],
|
|
15392
|
+
mimeType: "audio/mpeg"
|
|
15393
|
+
},
|
|
15394
|
+
{
|
|
15395
|
+
signature: [255, 242],
|
|
15396
|
+
mimeType: "audio/mpeg"
|
|
15397
|
+
},
|
|
15398
|
+
{
|
|
15399
|
+
signature: [
|
|
15400
|
+
73,
|
|
15401
|
+
68,
|
|
15402
|
+
51
|
|
15403
|
+
],
|
|
15404
|
+
mimeType: "audio/mpeg"
|
|
15303
15405
|
}
|
|
15304
|
-
|
|
15305
|
-
|
|
15306
|
-
|
|
15307
|
-
|
|
15308
|
-
|
|
15309
|
-
|
|
15310
|
-
|
|
15311
|
-
|
|
15312
|
-
|
|
15313
|
-
|
|
15314
|
-
|
|
15315
|
-
|
|
15316
|
-
|
|
15317
|
-
|
|
15406
|
+
],
|
|
15407
|
+
wav: [{
|
|
15408
|
+
signature: [
|
|
15409
|
+
82,
|
|
15410
|
+
73,
|
|
15411
|
+
70,
|
|
15412
|
+
70,
|
|
15413
|
+
null,
|
|
15414
|
+
null,
|
|
15415
|
+
null,
|
|
15416
|
+
null,
|
|
15417
|
+
87,
|
|
15418
|
+
65,
|
|
15419
|
+
86,
|
|
15420
|
+
69
|
|
15421
|
+
],
|
|
15422
|
+
mimeType: "audio/wav"
|
|
15423
|
+
}],
|
|
15424
|
+
ogg: [{
|
|
15425
|
+
signature: [
|
|
15426
|
+
79,
|
|
15427
|
+
103,
|
|
15428
|
+
103,
|
|
15429
|
+
83
|
|
15430
|
+
],
|
|
15431
|
+
mimeType: "audio/ogg"
|
|
15432
|
+
}],
|
|
15433
|
+
flac: [{
|
|
15434
|
+
signature: [
|
|
15435
|
+
102,
|
|
15436
|
+
76,
|
|
15437
|
+
97,
|
|
15438
|
+
67
|
|
15439
|
+
],
|
|
15440
|
+
mimeType: "audio/flac"
|
|
15441
|
+
}],
|
|
15442
|
+
mp4: [
|
|
15443
|
+
{
|
|
15444
|
+
signature: [
|
|
15445
|
+
null,
|
|
15446
|
+
null,
|
|
15447
|
+
null,
|
|
15448
|
+
null,
|
|
15449
|
+
102,
|
|
15450
|
+
116,
|
|
15451
|
+
121,
|
|
15452
|
+
112
|
|
15453
|
+
],
|
|
15454
|
+
mimeType: "video/mp4"
|
|
15455
|
+
},
|
|
15456
|
+
{
|
|
15457
|
+
signature: [
|
|
15458
|
+
null,
|
|
15459
|
+
null,
|
|
15460
|
+
null,
|
|
15461
|
+
null,
|
|
15462
|
+
102,
|
|
15463
|
+
116,
|
|
15464
|
+
121,
|
|
15465
|
+
112,
|
|
15466
|
+
105,
|
|
15467
|
+
115,
|
|
15468
|
+
111,
|
|
15469
|
+
109
|
|
15470
|
+
],
|
|
15471
|
+
mimeType: "video/mp4"
|
|
15472
|
+
},
|
|
15473
|
+
{
|
|
15474
|
+
signature: [
|
|
15475
|
+
null,
|
|
15476
|
+
null,
|
|
15477
|
+
null,
|
|
15478
|
+
null,
|
|
15479
|
+
102,
|
|
15480
|
+
116,
|
|
15481
|
+
121,
|
|
15482
|
+
112,
|
|
15483
|
+
109,
|
|
15484
|
+
112,
|
|
15485
|
+
52,
|
|
15486
|
+
50
|
|
15487
|
+
],
|
|
15488
|
+
mimeType: "video/mp4"
|
|
15489
|
+
}
|
|
15490
|
+
],
|
|
15491
|
+
webm: [{
|
|
15492
|
+
signature: [
|
|
15493
|
+
26,
|
|
15494
|
+
69,
|
|
15495
|
+
223,
|
|
15496
|
+
163
|
|
15497
|
+
],
|
|
15498
|
+
mimeType: "video/webm"
|
|
15499
|
+
}],
|
|
15500
|
+
avi: [{
|
|
15501
|
+
signature: [
|
|
15502
|
+
82,
|
|
15503
|
+
73,
|
|
15504
|
+
70,
|
|
15505
|
+
70,
|
|
15506
|
+
null,
|
|
15507
|
+
null,
|
|
15508
|
+
null,
|
|
15509
|
+
null,
|
|
15510
|
+
65,
|
|
15511
|
+
86,
|
|
15512
|
+
73,
|
|
15513
|
+
32
|
|
15514
|
+
],
|
|
15515
|
+
mimeType: "video/x-msvideo"
|
|
15516
|
+
}],
|
|
15517
|
+
mov: [{
|
|
15518
|
+
signature: [
|
|
15519
|
+
null,
|
|
15520
|
+
null,
|
|
15521
|
+
null,
|
|
15522
|
+
null,
|
|
15523
|
+
102,
|
|
15524
|
+
116,
|
|
15525
|
+
121,
|
|
15526
|
+
112,
|
|
15527
|
+
113,
|
|
15528
|
+
116,
|
|
15529
|
+
32,
|
|
15530
|
+
32
|
|
15531
|
+
],
|
|
15532
|
+
mimeType: "video/quicktime"
|
|
15533
|
+
}],
|
|
15534
|
+
mkv: [{
|
|
15535
|
+
signature: [
|
|
15536
|
+
26,
|
|
15537
|
+
69,
|
|
15538
|
+
223,
|
|
15539
|
+
163
|
|
15540
|
+
],
|
|
15541
|
+
mimeType: "video/x-matroska"
|
|
15542
|
+
}],
|
|
15543
|
+
docx: [{
|
|
15544
|
+
signature: [
|
|
15545
|
+
80,
|
|
15546
|
+
75,
|
|
15547
|
+
3,
|
|
15548
|
+
4
|
|
15549
|
+
],
|
|
15550
|
+
mimeType: "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
|
15551
|
+
}],
|
|
15552
|
+
xlsx: [{
|
|
15553
|
+
signature: [
|
|
15554
|
+
80,
|
|
15555
|
+
75,
|
|
15556
|
+
3,
|
|
15557
|
+
4
|
|
15558
|
+
],
|
|
15559
|
+
mimeType: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
|
15560
|
+
}],
|
|
15561
|
+
pptx: [{
|
|
15562
|
+
signature: [
|
|
15563
|
+
80,
|
|
15564
|
+
75,
|
|
15565
|
+
3,
|
|
15566
|
+
4
|
|
15567
|
+
],
|
|
15568
|
+
mimeType: "application/vnd.openxmlformats-officedocument.presentationml.presentation"
|
|
15569
|
+
}],
|
|
15570
|
+
doc: [{
|
|
15571
|
+
signature: [
|
|
15572
|
+
208,
|
|
15573
|
+
207,
|
|
15574
|
+
17,
|
|
15575
|
+
224,
|
|
15576
|
+
161,
|
|
15577
|
+
177,
|
|
15578
|
+
26,
|
|
15579
|
+
225
|
|
15580
|
+
],
|
|
15581
|
+
mimeType: "application/msword"
|
|
15582
|
+
}],
|
|
15583
|
+
xls: [{
|
|
15584
|
+
signature: [
|
|
15585
|
+
208,
|
|
15586
|
+
207,
|
|
15587
|
+
17,
|
|
15588
|
+
224,
|
|
15589
|
+
161,
|
|
15590
|
+
177,
|
|
15591
|
+
26,
|
|
15592
|
+
225
|
|
15593
|
+
],
|
|
15594
|
+
mimeType: "application/vnd.ms-excel"
|
|
15595
|
+
}],
|
|
15596
|
+
ppt: [{
|
|
15597
|
+
signature: [
|
|
15598
|
+
208,
|
|
15599
|
+
207,
|
|
15600
|
+
17,
|
|
15601
|
+
224,
|
|
15602
|
+
161,
|
|
15603
|
+
177,
|
|
15604
|
+
26,
|
|
15605
|
+
225
|
|
15606
|
+
],
|
|
15607
|
+
mimeType: "application/vnd.ms-powerpoint"
|
|
15608
|
+
}]
|
|
15609
|
+
};
|
|
15610
|
+
/**
|
|
15611
|
+
* All possible format signatures for checking against actual file content
|
|
15612
|
+
*/
|
|
15613
|
+
static ALL_SIGNATURES = Object.entries(FileDetector.MAGIC_BYTES).flatMap(([ext, signatures]) => signatures.map((sig) => ({
|
|
15614
|
+
ext,
|
|
15615
|
+
...sig
|
|
15616
|
+
})));
|
|
15617
|
+
/**
|
|
15618
|
+
* MIME type map for file extensions.
|
|
15619
|
+
*
|
|
15620
|
+
* Can be used to get the content type of file based on its extension.
|
|
15621
|
+
* Feel free to add more mime types in your project!
|
|
15622
|
+
*/
|
|
15623
|
+
static mimeMap = {
|
|
15624
|
+
json: "application/json",
|
|
15625
|
+
txt: "text/plain",
|
|
15626
|
+
html: "text/html",
|
|
15627
|
+
htm: "text/html",
|
|
15628
|
+
xml: "application/xml",
|
|
15629
|
+
csv: "text/csv",
|
|
15630
|
+
pdf: "application/pdf",
|
|
15631
|
+
md: "text/markdown",
|
|
15632
|
+
markdown: "text/markdown",
|
|
15633
|
+
rtf: "application/rtf",
|
|
15634
|
+
css: "text/css",
|
|
15635
|
+
js: "application/javascript",
|
|
15636
|
+
mjs: "application/javascript",
|
|
15637
|
+
ts: "application/typescript",
|
|
15638
|
+
jsx: "text/jsx",
|
|
15639
|
+
tsx: "text/tsx",
|
|
15640
|
+
zip: "application/zip",
|
|
15641
|
+
rar: "application/vnd.rar",
|
|
15642
|
+
"7z": "application/x-7z-compressed",
|
|
15643
|
+
tar: "application/x-tar",
|
|
15644
|
+
gz: "application/gzip",
|
|
15645
|
+
tgz: "application/gzip",
|
|
15646
|
+
png: "image/png",
|
|
15647
|
+
jpg: "image/jpeg",
|
|
15648
|
+
jpeg: "image/jpeg",
|
|
15649
|
+
gif: "image/gif",
|
|
15650
|
+
webp: "image/webp",
|
|
15651
|
+
svg: "image/svg+xml",
|
|
15652
|
+
bmp: "image/bmp",
|
|
15653
|
+
ico: "image/x-icon",
|
|
15654
|
+
tiff: "image/tiff",
|
|
15655
|
+
tif: "image/tiff",
|
|
15656
|
+
mp3: "audio/mpeg",
|
|
15657
|
+
wav: "audio/wav",
|
|
15658
|
+
ogg: "audio/ogg",
|
|
15659
|
+
m4a: "audio/mp4",
|
|
15660
|
+
aac: "audio/aac",
|
|
15661
|
+
flac: "audio/flac",
|
|
15662
|
+
mp4: "video/mp4",
|
|
15663
|
+
webm: "video/webm",
|
|
15664
|
+
avi: "video/x-msvideo",
|
|
15665
|
+
mov: "video/quicktime",
|
|
15666
|
+
wmv: "video/x-ms-wmv",
|
|
15667
|
+
flv: "video/x-flv",
|
|
15668
|
+
mkv: "video/x-matroska",
|
|
15669
|
+
doc: "application/msword",
|
|
15670
|
+
docx: "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
|
15671
|
+
xls: "application/vnd.ms-excel",
|
|
15672
|
+
xlsx: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
|
15673
|
+
ppt: "application/vnd.ms-powerpoint",
|
|
15674
|
+
pptx: "application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
|
15675
|
+
woff: "font/woff",
|
|
15676
|
+
woff2: "font/woff2",
|
|
15677
|
+
ttf: "font/ttf",
|
|
15678
|
+
otf: "font/otf",
|
|
15679
|
+
eot: "application/vnd.ms-fontobject"
|
|
15680
|
+
};
|
|
15681
|
+
/**
|
|
15682
|
+
* Returns the content type of file based on its filename.
|
|
15683
|
+
*
|
|
15684
|
+
* @param filename - The filename to check
|
|
15685
|
+
* @returns The MIME type
|
|
15686
|
+
*
|
|
15687
|
+
* @example
|
|
15688
|
+
* ```typescript
|
|
15689
|
+
* const detector = alepha.inject(FileDetector);
|
|
15690
|
+
* const mimeType = detector.getContentType("image.png"); // "image/png"
|
|
15691
|
+
* ```
|
|
15692
|
+
*/
|
|
15693
|
+
getContentType(filename) {
|
|
15694
|
+
const ext = filename.toLowerCase().split(".").pop() || "";
|
|
15695
|
+
return FileDetector.mimeMap[ext] || "application/octet-stream";
|
|
15696
|
+
}
|
|
15697
|
+
/**
|
|
15698
|
+
* Detects the file type by checking magic bytes against the stream content.
|
|
15699
|
+
*
|
|
15700
|
+
* @param stream - The readable stream to check
|
|
15701
|
+
* @param filename - The filename (used to get the extension)
|
|
15702
|
+
* @returns File type information including MIME type, extension, and verification status
|
|
15703
|
+
*
|
|
15704
|
+
* @example
|
|
15705
|
+
* ```typescript
|
|
15706
|
+
* const detector = alepha.inject(FileDetector);
|
|
15707
|
+
* const stream = createReadStream('image.png');
|
|
15708
|
+
* const result = await detector.detectFileType(stream, 'image.png');
|
|
15709
|
+
* console.log(result.mimeType); // 'image/png'
|
|
15710
|
+
* console.log(result.verified); // true if magic bytes match
|
|
15711
|
+
* ```
|
|
15712
|
+
*/
|
|
15713
|
+
async detectFileType(stream, filename) {
|
|
15714
|
+
const expectedMimeType = this.getContentType(filename);
|
|
15715
|
+
const lastDotIndex = filename.lastIndexOf(".");
|
|
15716
|
+
const ext = lastDotIndex > 0 ? filename.substring(lastDotIndex + 1).toLowerCase() : "";
|
|
15717
|
+
const { buffer, stream: newStream } = await this.peekBytes(stream, 16);
|
|
15718
|
+
const expectedSignatures = FileDetector.MAGIC_BYTES[ext];
|
|
15719
|
+
if (expectedSignatures) {
|
|
15720
|
+
for (const { signature, mimeType } of expectedSignatures) if (this.matchesSignature(buffer, signature)) return {
|
|
15721
|
+
mimeType,
|
|
15722
|
+
extension: ext,
|
|
15723
|
+
verified: true,
|
|
15724
|
+
stream: newStream
|
|
15725
|
+
};
|
|
15726
|
+
}
|
|
15727
|
+
for (const { ext: detectedExt, signature, mimeType } of FileDetector.ALL_SIGNATURES) if (detectedExt !== ext && this.matchesSignature(buffer, signature)) return {
|
|
15728
|
+
mimeType,
|
|
15729
|
+
extension: detectedExt,
|
|
15730
|
+
verified: true,
|
|
15731
|
+
stream: newStream
|
|
15732
|
+
};
|
|
15733
|
+
return {
|
|
15734
|
+
mimeType: expectedMimeType,
|
|
15735
|
+
extension: ext,
|
|
15736
|
+
verified: false,
|
|
15737
|
+
stream: newStream
|
|
15738
|
+
};
|
|
15739
|
+
}
|
|
15740
|
+
/**
|
|
15741
|
+
* Reads all bytes from a stream and returns the first N bytes along with a new stream containing all data.
|
|
15742
|
+
* This approach reads the entire stream upfront to avoid complex async handling issues.
|
|
15743
|
+
*
|
|
15744
|
+
* @protected
|
|
15745
|
+
*/
|
|
15746
|
+
async peekBytes(stream, numBytes) {
|
|
15747
|
+
const chunks = [];
|
|
15748
|
+
for await (const chunk of stream) chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
|
15749
|
+
const allData = Buffer.concat(chunks);
|
|
15750
|
+
return {
|
|
15751
|
+
buffer: allData.subarray(0, numBytes),
|
|
15752
|
+
stream: node_stream.Readable.from(allData)
|
|
15753
|
+
};
|
|
15754
|
+
}
|
|
15755
|
+
/**
|
|
15756
|
+
* Checks if a buffer matches a magic byte signature.
|
|
15757
|
+
*
|
|
15758
|
+
* @protected
|
|
15759
|
+
*/
|
|
15760
|
+
matchesSignature(buffer, signature) {
|
|
15761
|
+
if (buffer.length < signature.length) return false;
|
|
15762
|
+
for (let i = 0; i < signature.length; i++) if (signature[i] !== null && buffer[i] !== signature[i]) return false;
|
|
15763
|
+
return true;
|
|
15764
|
+
}
|
|
15765
|
+
};
|
|
15766
|
+
|
|
15767
|
+
//#endregion
|
|
15768
|
+
//#region ../file/src/providers/NodeFileSystemProvider.ts
|
|
15769
|
+
/**
|
|
15770
|
+
* Node.js implementation of FileSystem interface.
|
|
15771
|
+
*
|
|
15772
|
+
* @example
|
|
15773
|
+
* ```typescript
|
|
15774
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
15775
|
+
*
|
|
15776
|
+
* // Create from URL
|
|
15777
|
+
* const file1 = fs.createFile({ url: "file:///path/to/file.png" });
|
|
15778
|
+
*
|
|
15779
|
+
* // Create from Buffer
|
|
15780
|
+
* const file2 = fs.createFile({ buffer: Buffer.from("hello"), name: "hello.txt" });
|
|
15781
|
+
*
|
|
15782
|
+
* // Create from text
|
|
15783
|
+
* const file3 = fs.createFile({ text: "Hello, world!", name: "greeting.txt" });
|
|
15784
|
+
*
|
|
15785
|
+
* // File operations
|
|
15786
|
+
* await fs.mkdir("/tmp/mydir", { recursive: true });
|
|
15787
|
+
* await fs.cp("/src/file.txt", "/dest/file.txt");
|
|
15788
|
+
* await fs.mv("/old/path.txt", "/new/path.txt");
|
|
15789
|
+
* const files = await fs.ls("/tmp");
|
|
15790
|
+
* await fs.rm("/tmp/file.txt");
|
|
15791
|
+
* ```
|
|
15792
|
+
*/
|
|
15793
|
+
var NodeFileSystemProvider = class {
|
|
15794
|
+
detector = $inject(FileDetector);
|
|
15795
|
+
/**
|
|
15796
|
+
* Creates a FileLike object from various sources.
|
|
15797
|
+
*
|
|
15798
|
+
* @param options - Options for creating the file
|
|
15799
|
+
* @returns A FileLike object
|
|
15800
|
+
*
|
|
15801
|
+
* @example
|
|
15802
|
+
* ```typescript
|
|
15803
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
15804
|
+
*
|
|
15805
|
+
* // From URL
|
|
15806
|
+
* const file1 = fs.createFile({ url: "https://example.com/image.png" });
|
|
15807
|
+
*
|
|
15808
|
+
* // From Buffer
|
|
15809
|
+
* const file2 = fs.createFile({
|
|
15810
|
+
* buffer: Buffer.from("hello"),
|
|
15811
|
+
* name: "hello.txt",
|
|
15812
|
+
* type: "text/plain"
|
|
15813
|
+
* });
|
|
15814
|
+
*
|
|
15815
|
+
* // From text
|
|
15816
|
+
* const file3 = fs.createFile({ text: "Hello!", name: "greeting.txt" });
|
|
15817
|
+
*
|
|
15818
|
+
* // From stream with detection
|
|
15819
|
+
* const stream = createReadStream("/path/to/file.png");
|
|
15820
|
+
* const file4 = fs.createFile({ stream, name: "image.png" });
|
|
15821
|
+
* ```
|
|
15822
|
+
*/
|
|
15823
|
+
createFile(options) {
|
|
15824
|
+
if ("url" in options) return this.createFileFromUrl(options.url, {
|
|
15825
|
+
type: options.type,
|
|
15826
|
+
name: options.name
|
|
15827
|
+
});
|
|
15828
|
+
if ("file" in options) return this.createFileFromWebFile(options.file, {
|
|
15829
|
+
type: options.type,
|
|
15830
|
+
name: options.name,
|
|
15831
|
+
size: options.size
|
|
15832
|
+
});
|
|
15833
|
+
if ("buffer" in options) return this.createFileFromBuffer(options.buffer, {
|
|
15834
|
+
type: options.type,
|
|
15835
|
+
name: options.name
|
|
15836
|
+
});
|
|
15837
|
+
if ("arrayBuffer" in options) return this.createFileFromBuffer(Buffer.from(options.arrayBuffer), {
|
|
15838
|
+
type: options.type,
|
|
15839
|
+
name: options.name
|
|
15840
|
+
});
|
|
15841
|
+
if ("text" in options) return this.createFileFromBuffer(Buffer.from(options.text, "utf-8"), {
|
|
15842
|
+
type: options.type || "text/plain",
|
|
15843
|
+
name: options.name || "file.txt"
|
|
15844
|
+
});
|
|
15845
|
+
if ("stream" in options) return this.createFileFromStream(options.stream, {
|
|
15846
|
+
type: options.type,
|
|
15847
|
+
name: options.name,
|
|
15848
|
+
size: options.size
|
|
15849
|
+
});
|
|
15850
|
+
throw new AlephaError("Invalid createFile options: no valid source provided");
|
|
15851
|
+
}
|
|
15852
|
+
/**
|
|
15853
|
+
* Removes a file or directory.
|
|
15854
|
+
*
|
|
15855
|
+
* @param path - The path to remove
|
|
15856
|
+
* @param options - Remove options
|
|
15857
|
+
*
|
|
15858
|
+
* @example
|
|
15859
|
+
* ```typescript
|
|
15860
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
15861
|
+
*
|
|
15862
|
+
* // Remove a file
|
|
15863
|
+
* await fs.rm("/tmp/file.txt");
|
|
15864
|
+
*
|
|
15865
|
+
* // Remove a directory recursively
|
|
15866
|
+
* await fs.rm("/tmp/mydir", { recursive: true });
|
|
15867
|
+
*
|
|
15868
|
+
* // Remove with force (no error if doesn't exist)
|
|
15869
|
+
* await fs.rm("/tmp/maybe-exists.txt", { force: true });
|
|
15870
|
+
* ```
|
|
15871
|
+
*/
|
|
15872
|
+
async rm(path, options) {
|
|
15873
|
+
await (0, node_fs_promises.rm)(path, options);
|
|
15874
|
+
}
|
|
15875
|
+
/**
|
|
15876
|
+
* Copies a file or directory.
|
|
15877
|
+
*
|
|
15878
|
+
* @param src - Source path
|
|
15879
|
+
* @param dest - Destination path
|
|
15880
|
+
* @param options - Copy options
|
|
15881
|
+
*
|
|
15882
|
+
* @example
|
|
15883
|
+
* ```typescript
|
|
15884
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
15885
|
+
*
|
|
15886
|
+
* // Copy a file
|
|
15887
|
+
* await fs.cp("/src/file.txt", "/dest/file.txt");
|
|
15888
|
+
*
|
|
15889
|
+
* // Copy a directory recursively
|
|
15890
|
+
* await fs.cp("/src/dir", "/dest/dir", { recursive: true });
|
|
15891
|
+
*
|
|
15892
|
+
* // Copy with force (overwrite existing)
|
|
15893
|
+
* await fs.cp("/src/file.txt", "/dest/file.txt", { force: true });
|
|
15894
|
+
* ```
|
|
15895
|
+
*/
|
|
15896
|
+
async cp(src, dest, options) {
|
|
15897
|
+
if ((await (0, node_fs_promises.stat)(src)).isDirectory()) {
|
|
15898
|
+
if (!options?.recursive) throw new Error(`Cannot copy directory without recursive option: ${src}`);
|
|
15899
|
+
await (0, node_fs_promises.cp)(src, dest, {
|
|
15900
|
+
recursive: true,
|
|
15901
|
+
force: options?.force ?? false
|
|
15902
|
+
});
|
|
15903
|
+
} else await (0, node_fs_promises.copyFile)(src, dest);
|
|
15904
|
+
}
|
|
15905
|
+
/**
|
|
15906
|
+
* Moves/renames a file or directory.
|
|
15907
|
+
*
|
|
15908
|
+
* @param src - Source path
|
|
15909
|
+
* @param dest - Destination path
|
|
15910
|
+
*
|
|
15911
|
+
* @example
|
|
15912
|
+
* ```typescript
|
|
15913
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
15914
|
+
*
|
|
15915
|
+
* // Move/rename a file
|
|
15916
|
+
* await fs.mv("/old/path.txt", "/new/path.txt");
|
|
15917
|
+
*
|
|
15918
|
+
* // Move a directory
|
|
15919
|
+
* await fs.mv("/old/dir", "/new/dir");
|
|
15920
|
+
* ```
|
|
15921
|
+
*/
|
|
15922
|
+
async mv(src, dest) {
|
|
15923
|
+
await (0, node_fs_promises.rename)(src, dest);
|
|
15924
|
+
}
|
|
15925
|
+
/**
|
|
15926
|
+
* Creates a directory.
|
|
15927
|
+
*
|
|
15928
|
+
* @param path - The directory path to create
|
|
15929
|
+
* @param options - Mkdir options
|
|
15930
|
+
*
|
|
15931
|
+
* @example
|
|
15932
|
+
* ```typescript
|
|
15933
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
15934
|
+
*
|
|
15935
|
+
* // Create a directory
|
|
15936
|
+
* await fs.mkdir("/tmp/mydir");
|
|
15937
|
+
*
|
|
15938
|
+
* // Create nested directories
|
|
15939
|
+
* await fs.mkdir("/tmp/path/to/dir", { recursive: true });
|
|
15940
|
+
*
|
|
15941
|
+
* // Create with specific permissions
|
|
15942
|
+
* await fs.mkdir("/tmp/mydir", { mode: 0o755 });
|
|
15943
|
+
* ```
|
|
15944
|
+
*/
|
|
15945
|
+
async mkdir(path, options) {
|
|
15946
|
+
await (0, node_fs_promises.mkdir)(path, options);
|
|
15947
|
+
}
|
|
15948
|
+
/**
|
|
15949
|
+
* Lists files in a directory.
|
|
15950
|
+
*
|
|
15951
|
+
* @param path - The directory path to list
|
|
15952
|
+
* @param options - List options
|
|
15953
|
+
* @returns Array of filenames
|
|
15954
|
+
*
|
|
15955
|
+
* @example
|
|
15956
|
+
* ```typescript
|
|
15957
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
15958
|
+
*
|
|
15959
|
+
* // List files in a directory
|
|
15960
|
+
* const files = await fs.ls("/tmp");
|
|
15961
|
+
* console.log(files); // ["file1.txt", "file2.txt", "subdir"]
|
|
15962
|
+
*
|
|
15963
|
+
* // List with hidden files
|
|
15964
|
+
* const allFiles = await fs.ls("/tmp", { hidden: true });
|
|
15965
|
+
*
|
|
15966
|
+
* // List recursively
|
|
15967
|
+
* const allFilesRecursive = await fs.ls("/tmp", { recursive: true });
|
|
15968
|
+
* ```
|
|
15969
|
+
*/
|
|
15970
|
+
async ls(path, options) {
|
|
15971
|
+
const entries = await (0, node_fs_promises.readdir)(path);
|
|
15972
|
+
const filteredEntries = options?.hidden ? entries : entries.filter((e) => !e.startsWith("."));
|
|
15973
|
+
if (options?.recursive) {
|
|
15974
|
+
const allFiles = [];
|
|
15975
|
+
for (const entry of filteredEntries) {
|
|
15976
|
+
const fullPath = (0, node_path.join)(path, entry);
|
|
15977
|
+
if ((await (0, node_fs_promises.stat)(fullPath)).isDirectory()) {
|
|
15978
|
+
allFiles.push(entry);
|
|
15979
|
+
const subFiles = await this.ls(fullPath, options);
|
|
15980
|
+
allFiles.push(...subFiles.map((f) => (0, node_path.join)(entry, f)));
|
|
15981
|
+
} else allFiles.push(entry);
|
|
15982
|
+
}
|
|
15983
|
+
return allFiles;
|
|
15984
|
+
}
|
|
15985
|
+
return filteredEntries;
|
|
15986
|
+
}
|
|
15987
|
+
/**
|
|
15988
|
+
* Checks if a file or directory exists.
|
|
15989
|
+
*
|
|
15990
|
+
* @param path - The path to check
|
|
15991
|
+
* @returns True if the path exists, false otherwise
|
|
15992
|
+
*
|
|
15993
|
+
* @example
|
|
15994
|
+
* ```typescript
|
|
15995
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
15996
|
+
*
|
|
15997
|
+
* if (await fs.exists("/tmp/file.txt")) {
|
|
15998
|
+
* console.log("File exists");
|
|
15999
|
+
* }
|
|
16000
|
+
* ```
|
|
16001
|
+
*/
|
|
16002
|
+
async exists(path) {
|
|
16003
|
+
try {
|
|
16004
|
+
await (0, node_fs_promises.access)(path);
|
|
16005
|
+
return true;
|
|
16006
|
+
} catch {
|
|
16007
|
+
return false;
|
|
16008
|
+
}
|
|
16009
|
+
}
|
|
16010
|
+
/**
|
|
16011
|
+
* Reads the content of a file.
|
|
16012
|
+
*
|
|
16013
|
+
* @param path - The file path to read
|
|
16014
|
+
* @returns The file content as a Buffer
|
|
16015
|
+
*
|
|
16016
|
+
* @example
|
|
16017
|
+
* ```typescript
|
|
16018
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
16019
|
+
*
|
|
16020
|
+
* const buffer = await fs.readFile("/tmp/file.txt");
|
|
16021
|
+
* console.log(buffer.toString("utf-8"));
|
|
16022
|
+
* ```
|
|
16023
|
+
*/
|
|
16024
|
+
async readFile(path) {
|
|
16025
|
+
return await (0, node_fs_promises.readFile)(path);
|
|
16026
|
+
}
|
|
16027
|
+
/**
|
|
16028
|
+
* Writes data to a file.
|
|
16029
|
+
*
|
|
16030
|
+
* @param path - The file path to write to
|
|
16031
|
+
* @param data - The data to write (Buffer or string)
|
|
16032
|
+
*
|
|
16033
|
+
* @example
|
|
16034
|
+
* ```typescript
|
|
16035
|
+
* const fs = alepha.inject(NodeFileSystemProvider);
|
|
16036
|
+
*
|
|
16037
|
+
* // Write string
|
|
16038
|
+
* await fs.writeFile("/tmp/file.txt", "Hello, world!");
|
|
16039
|
+
*
|
|
16040
|
+
* // Write Buffer
|
|
16041
|
+
* await fs.writeFile("/tmp/file.bin", Buffer.from([0x01, 0x02, 0x03]));
|
|
16042
|
+
* ```
|
|
16043
|
+
*/
|
|
16044
|
+
async writeFile(path, data) {
|
|
16045
|
+
await (0, node_fs_promises.writeFile)(path, data);
|
|
16046
|
+
}
|
|
16047
|
+
/**
|
|
16048
|
+
* Creates a FileLike object from a Web File.
|
|
16049
|
+
*
|
|
16050
|
+
* @protected
|
|
16051
|
+
*/
|
|
16052
|
+
createFileFromWebFile(source, options = {}) {
|
|
16053
|
+
const name = options.name ?? source.name;
|
|
16054
|
+
return {
|
|
16055
|
+
name,
|
|
16056
|
+
type: options.type ?? (source.type || this.detector.getContentType(name)),
|
|
16057
|
+
size: options.size ?? source.size ?? 0,
|
|
16058
|
+
lastModified: source.lastModified || Date.now(),
|
|
16059
|
+
stream: () => source.stream(),
|
|
16060
|
+
arrayBuffer: async () => {
|
|
16061
|
+
return await source.arrayBuffer();
|
|
16062
|
+
},
|
|
16063
|
+
text: async () => {
|
|
16064
|
+
return await source.text();
|
|
16065
|
+
}
|
|
16066
|
+
};
|
|
16067
|
+
}
|
|
16068
|
+
/**
|
|
16069
|
+
* Creates a FileLike object from a Buffer.
|
|
16070
|
+
*
|
|
16071
|
+
* @protected
|
|
16072
|
+
*/
|
|
16073
|
+
createFileFromBuffer(source, options = {}) {
|
|
16074
|
+
const name = options.name ?? "file";
|
|
16075
|
+
return {
|
|
16076
|
+
name,
|
|
16077
|
+
type: options.type ?? this.detector.getContentType(options.name ?? name),
|
|
16078
|
+
size: source.byteLength,
|
|
16079
|
+
lastModified: Date.now(),
|
|
16080
|
+
stream: () => node_stream.Readable.from(source),
|
|
16081
|
+
arrayBuffer: async () => {
|
|
16082
|
+
return this.bufferToArrayBuffer(source);
|
|
16083
|
+
},
|
|
16084
|
+
text: async () => {
|
|
16085
|
+
return source.toString("utf-8");
|
|
16086
|
+
}
|
|
16087
|
+
};
|
|
16088
|
+
}
|
|
16089
|
+
/**
|
|
16090
|
+
* Creates a FileLike object from a stream.
|
|
16091
|
+
*
|
|
16092
|
+
* @protected
|
|
16093
|
+
*/
|
|
16094
|
+
createFileFromStream(source, options = {}) {
|
|
16095
|
+
let buffer = null;
|
|
16096
|
+
return {
|
|
16097
|
+
name: options.name ?? "file",
|
|
16098
|
+
type: options.type ?? this.detector.getContentType(options.name ?? "file"),
|
|
16099
|
+
size: options.size ?? 0,
|
|
16100
|
+
lastModified: Date.now(),
|
|
16101
|
+
stream: () => source,
|
|
16102
|
+
_buffer: null,
|
|
16103
|
+
arrayBuffer: async () => {
|
|
16104
|
+
buffer ??= await this.streamToBuffer(source);
|
|
16105
|
+
return this.bufferToArrayBuffer(buffer);
|
|
16106
|
+
},
|
|
16107
|
+
text: async () => {
|
|
16108
|
+
buffer ??= await this.streamToBuffer(source);
|
|
16109
|
+
return buffer.toString("utf-8");
|
|
16110
|
+
}
|
|
16111
|
+
};
|
|
16112
|
+
}
|
|
16113
|
+
/**
|
|
16114
|
+
* Creates a FileLike object from a URL.
|
|
16115
|
+
*
|
|
16116
|
+
* @protected
|
|
16117
|
+
*/
|
|
16118
|
+
createFileFromUrl(url, options = {}) {
|
|
16119
|
+
const parsedUrl = new URL(url);
|
|
16120
|
+
const filename = options.name || parsedUrl.pathname.split("/").pop() || "file";
|
|
16121
|
+
let buffer = null;
|
|
16122
|
+
return {
|
|
16123
|
+
name: filename,
|
|
16124
|
+
type: options.type ?? this.detector.getContentType(filename),
|
|
16125
|
+
size: 0,
|
|
16126
|
+
lastModified: Date.now(),
|
|
16127
|
+
stream: () => this.createStreamFromUrl(url),
|
|
16128
|
+
arrayBuffer: async () => {
|
|
16129
|
+
buffer ??= await this.loadFromUrl(url);
|
|
16130
|
+
return this.bufferToArrayBuffer(buffer);
|
|
16131
|
+
},
|
|
16132
|
+
text: async () => {
|
|
16133
|
+
buffer ??= await this.loadFromUrl(url);
|
|
16134
|
+
return buffer.toString("utf-8");
|
|
16135
|
+
},
|
|
16136
|
+
filepath: url
|
|
16137
|
+
};
|
|
16138
|
+
}
|
|
16139
|
+
/**
|
|
16140
|
+
* Gets a streaming response from a URL.
|
|
16141
|
+
*
|
|
16142
|
+
* @protected
|
|
16143
|
+
*/
|
|
16144
|
+
getStreamingResponse(url) {
|
|
16145
|
+
const stream = new node_stream.PassThrough();
|
|
16146
|
+
fetch(url).then((res) => node_stream.Readable.fromWeb(res.body).pipe(stream)).catch((err) => stream.destroy(err));
|
|
16147
|
+
return stream;
|
|
16148
|
+
}
|
|
16149
|
+
/**
|
|
16150
|
+
* Loads data from a URL.
|
|
16151
|
+
*
|
|
16152
|
+
* @protected
|
|
16153
|
+
*/
|
|
16154
|
+
async loadFromUrl(url) {
|
|
16155
|
+
const parsedUrl = new URL(url);
|
|
16156
|
+
if (parsedUrl.protocol === "file:") return await (0, node_fs_promises.readFile)((0, node_url.fileURLToPath)(url));
|
|
16157
|
+
else if (parsedUrl.protocol === "http:" || parsedUrl.protocol === "https:") {
|
|
16158
|
+
const response = await fetch(url);
|
|
16159
|
+
if (!response.ok) throw new Error(`Failed to fetch ${url}: ${response.status} ${response.statusText}`);
|
|
16160
|
+
const arrayBuffer = await response.arrayBuffer();
|
|
16161
|
+
return Buffer.from(arrayBuffer);
|
|
16162
|
+
} else throw new Error(`Unsupported protocol: ${parsedUrl.protocol}`);
|
|
16163
|
+
}
|
|
16164
|
+
/**
|
|
16165
|
+
* Creates a stream from a URL.
|
|
16166
|
+
*
|
|
16167
|
+
* @protected
|
|
16168
|
+
*/
|
|
16169
|
+
createStreamFromUrl(url) {
|
|
16170
|
+
const parsedUrl = new URL(url);
|
|
16171
|
+
if (parsedUrl.protocol === "file:") return (0, node_fs.createReadStream)((0, node_url.fileURLToPath)(url));
|
|
16172
|
+
else if (parsedUrl.protocol === "http:" || parsedUrl.protocol === "https:") return this.getStreamingResponse(url);
|
|
16173
|
+
else throw new AlephaError(`Unsupported protocol: ${parsedUrl.protocol}`);
|
|
16174
|
+
}
|
|
16175
|
+
/**
|
|
16176
|
+
* Converts a stream-like object to a Buffer.
|
|
16177
|
+
*
|
|
16178
|
+
* @protected
|
|
16179
|
+
*/
|
|
16180
|
+
async streamToBuffer(streamLike) {
|
|
16181
|
+
const stream = streamLike instanceof node_stream.Readable ? streamLike : node_stream.Readable.fromWeb(streamLike);
|
|
16182
|
+
return new Promise((resolve, reject) => {
|
|
16183
|
+
const buffer = [];
|
|
16184
|
+
stream.on("data", (chunk) => buffer.push(Buffer.from(chunk)));
|
|
16185
|
+
stream.on("end", () => resolve(Buffer.concat(buffer)));
|
|
16186
|
+
stream.on("error", (err) => reject(new AlephaError("Error converting stream", { cause: err })));
|
|
16187
|
+
});
|
|
16188
|
+
}
|
|
16189
|
+
/**
|
|
16190
|
+
* Converts a Node.js Buffer to an ArrayBuffer.
|
|
16191
|
+
*
|
|
16192
|
+
* @protected
|
|
16193
|
+
*/
|
|
16194
|
+
bufferToArrayBuffer(buffer) {
|
|
16195
|
+
return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength);
|
|
16196
|
+
}
|
|
16197
|
+
};
|
|
16198
|
+
|
|
16199
|
+
//#endregion
|
|
16200
|
+
//#region ../file/src/index.ts
|
|
16201
|
+
/**
|
|
16202
|
+
* Provides file system capabilities for Alepha applications with support for multiple file sources and operations.
|
|
16203
|
+
*
|
|
16204
|
+
* The file module enables working with files from various sources (URLs, buffers, streams) and provides
|
|
16205
|
+
* utilities for file type detection, content type determination, and common file system operations.
|
|
16206
|
+
*
|
|
16207
|
+
* @see {@link FileDetector}
|
|
16208
|
+
* @see {@link FileSystemProvider}
|
|
16209
|
+
* @see {@link NodeFileSystemProvider}
|
|
16210
|
+
* @module alepha.file
|
|
16211
|
+
*/
|
|
16212
|
+
const AlephaFile = $module({
|
|
16213
|
+
name: "alepha.file",
|
|
16214
|
+
descriptors: [],
|
|
16215
|
+
services: [
|
|
16216
|
+
FileDetector,
|
|
16217
|
+
FileSystemProvider,
|
|
16218
|
+
NodeFileSystemProvider
|
|
16219
|
+
],
|
|
16220
|
+
register: (alepha$1) => alepha$1.with({
|
|
16221
|
+
optional: true,
|
|
16222
|
+
provide: FileSystemProvider,
|
|
16223
|
+
use: NodeFileSystemProvider
|
|
16224
|
+
})
|
|
16225
|
+
});
|
|
16226
|
+
|
|
16227
|
+
//#endregion
|
|
16228
|
+
//#region src/assets/biomeJson.ts
|
|
16229
|
+
const biomeJson = `
|
|
16230
|
+
{
|
|
16231
|
+
"$schema": "https://biomejs.dev/schemas/latest/schema.json",
|
|
16232
|
+
"vcs": {
|
|
16233
|
+
"enabled": true,
|
|
16234
|
+
"clientKind": "git"
|
|
16235
|
+
},
|
|
16236
|
+
"files": {
|
|
16237
|
+
"ignoreUnknown": true,
|
|
16238
|
+
"includes": ["**", "!node_modules", "!dist"]
|
|
16239
|
+
},
|
|
16240
|
+
"formatter": {
|
|
16241
|
+
"enabled": true,
|
|
16242
|
+
"indentStyle": "space"
|
|
16243
|
+
},
|
|
16244
|
+
"linter": {
|
|
16245
|
+
"enabled": true,
|
|
16246
|
+
"rules": {
|
|
16247
|
+
"recommended": true
|
|
16248
|
+
},
|
|
16249
|
+
"domains": {
|
|
16250
|
+
"react": "recommended"
|
|
16251
|
+
}
|
|
16252
|
+
},
|
|
16253
|
+
"assist": {
|
|
16254
|
+
"actions": {
|
|
16255
|
+
"source": {
|
|
16256
|
+
"organizeImports": "on"
|
|
16257
|
+
}
|
|
16258
|
+
}
|
|
16259
|
+
}
|
|
16260
|
+
}
|
|
16261
|
+
`.trim();
|
|
16262
|
+
|
|
16263
|
+
//#endregion
|
|
16264
|
+
//#region src/assets/tsconfigJson.ts
|
|
16265
|
+
const tsconfigJson = `
|
|
16266
|
+
{
|
|
16267
|
+
"compilerOptions": {
|
|
16268
|
+
"module": "nodenext",
|
|
16269
|
+
"target": "esnext",
|
|
16270
|
+
"strict": true,
|
|
16271
|
+
"jsx": "react-jsx",
|
|
16272
|
+
"verbatimModuleSyntax": true,
|
|
16273
|
+
"isolatedModules": true,
|
|
16274
|
+
"moduleDetection": "force",
|
|
16275
|
+
"skipLibCheck": true,
|
|
16276
|
+
"resolveJsonModule": true,
|
|
16277
|
+
"noEmit": true,
|
|
16278
|
+
"allowImportingTsExtensions": true
|
|
16279
|
+
},
|
|
16280
|
+
"exclude": ["node_modules", "dist"]
|
|
16281
|
+
}
|
|
16282
|
+
`.trim();
|
|
16283
|
+
|
|
16284
|
+
//#endregion
|
|
16285
|
+
//#region src/assets/viteConfigTs.ts
|
|
16286
|
+
const viteConfigTs = (serverEntry) => `
|
|
16287
|
+
import { viteAlepha } from "@alepha/vite";
|
|
16288
|
+
|
|
16289
|
+
export default {
|
|
16290
|
+
plugins: [
|
|
16291
|
+
viteAlepha(${serverEntry ? `{ serverEntry: "${serverEntry}" }` : ""}),
|
|
16292
|
+
],
|
|
16293
|
+
test: {
|
|
16294
|
+
globals: true,
|
|
16295
|
+
},
|
|
16296
|
+
};
|
|
16297
|
+
`.trim();
|
|
16298
|
+
|
|
16299
|
+
//#endregion
|
|
16300
|
+
//#region src/version.ts
|
|
16301
|
+
const packageJson = JSON.parse((0, node_fs.readFileSync)(new URL("../package.json", require("url").pathToFileURL(__filename).href), "utf-8"));
|
|
16302
|
+
const version = packageJson.version;
|
|
16303
|
+
|
|
16304
|
+
//#endregion
|
|
16305
|
+
//#region src/services/ProjectUtils.ts
|
|
16306
|
+
/**
|
|
16307
|
+
* Utility service for common project operations used by CLI commands.
|
|
16308
|
+
*
|
|
16309
|
+
* This service provides helper methods for:
|
|
16310
|
+
* - Project configuration file management (tsconfig.json, package.json, etc.)
|
|
16311
|
+
* - Package manager setup (Yarn, npm, pnpm)
|
|
16312
|
+
* - Sample project downloading
|
|
16313
|
+
* - Drizzle ORM/Kit utilities
|
|
16314
|
+
* - Alepha instance loading
|
|
16315
|
+
*/
|
|
16316
|
+
var ProjectUtils = class {
|
|
16317
|
+
log = $logger();
|
|
16318
|
+
runner = $inject(ProcessRunner);
|
|
16319
|
+
fs = $inject(FileSystemProvider);
|
|
16320
|
+
/**
|
|
16321
|
+
* Ensure Yarn is configured in the project directory.
|
|
16322
|
+
*
|
|
16323
|
+
* Creates a .yarnrc.yml file with node-modules linker if it doesn't exist.
|
|
16324
|
+
*
|
|
16325
|
+
* @param root - The root directory of the project
|
|
16326
|
+
*/
|
|
16327
|
+
async ensureYarn(root) {
|
|
16328
|
+
const yarnrcPath = (0, node_path.join)(root, ".yarnrc.yml");
|
|
16329
|
+
try {
|
|
16330
|
+
await (0, node_fs_promises.access)(yarnrcPath);
|
|
16331
|
+
} catch {
|
|
16332
|
+
await (0, node_fs_promises.writeFile)(yarnrcPath, "nodeLinker: node-modules");
|
|
16333
|
+
}
|
|
16334
|
+
const npmLockPath = (0, node_path.join)(root, "package-lock.json");
|
|
16335
|
+
const pnpmLockPath = (0, node_path.join)(root, "pnpm-lock.yaml");
|
|
16336
|
+
await this.fs.rm(npmLockPath, { force: true });
|
|
16337
|
+
await this.fs.rm(pnpmLockPath, { force: true });
|
|
16338
|
+
}
|
|
16339
|
+
/**
|
|
16340
|
+
* Generate package.json content with Alepha dependencies.
|
|
16341
|
+
*
|
|
16342
|
+
* @param modes - Configuration for which dependencies to include
|
|
16343
|
+
* @returns Package.json partial with dependencies, devDependencies, and scripts
|
|
16344
|
+
*/
|
|
16345
|
+
generatePackageJsonContent(modes) {
|
|
16346
|
+
const dependencies = {
|
|
16347
|
+
"@alepha/core": `^${version}`,
|
|
16348
|
+
"@alepha/logger": `^${version}`,
|
|
16349
|
+
"@alepha/datetime": `^${version}`
|
|
16350
|
+
};
|
|
16351
|
+
const devDependencies = {
|
|
16352
|
+
alepha: `^${version}`,
|
|
16353
|
+
"@alepha/vite": `^${version}`
|
|
16354
|
+
};
|
|
16355
|
+
if (modes.api) {
|
|
16356
|
+
dependencies["@alepha/server"] = `^${version}`;
|
|
16357
|
+
dependencies["@alepha/server-swagger"] = `^${version}`;
|
|
16358
|
+
dependencies["@alepha/server-multipart"] = `^${version}`;
|
|
16359
|
+
}
|
|
16360
|
+
if (modes.orm) dependencies["@alepha/orm"] = `^${version}`;
|
|
16361
|
+
if (modes.react) {
|
|
16362
|
+
dependencies["@alepha/server"] = `^${version}`;
|
|
16363
|
+
dependencies["@alepha/server-links"] = `^${version}`;
|
|
16364
|
+
dependencies["@alepha/react"] = `^${version}`;
|
|
16365
|
+
dependencies.react = "^19.2.0";
|
|
16366
|
+
devDependencies["@types/react"] = "^19.2.0";
|
|
16367
|
+
}
|
|
16368
|
+
return {
|
|
16369
|
+
dependencies,
|
|
16370
|
+
devDependencies,
|
|
16371
|
+
scripts: {
|
|
16372
|
+
dev: "alepha dev",
|
|
16373
|
+
build: "alepha build"
|
|
16374
|
+
}
|
|
16375
|
+
};
|
|
16376
|
+
}
|
|
16377
|
+
/**
|
|
16378
|
+
* Ensure package.json exists and has correct configuration.
|
|
16379
|
+
*
|
|
16380
|
+
* Creates a new package.json if none exists, or updates an existing one to:
|
|
16381
|
+
* - Set "type": "module"
|
|
16382
|
+
* - Add Alepha dependencies
|
|
16383
|
+
* - Add standard scripts
|
|
16384
|
+
*
|
|
16385
|
+
* @param root - The root directory of the project
|
|
16386
|
+
* @param modes - Configuration for which dependencies to include
|
|
16387
|
+
*/
|
|
16388
|
+
async ensurePackageJson(root, modes) {
|
|
16389
|
+
const packageJsonPath = (0, node_path.join)(root, "package.json");
|
|
16390
|
+
try {
|
|
16391
|
+
await (0, node_fs_promises.access)(packageJsonPath);
|
|
16392
|
+
} catch (error) {
|
|
16393
|
+
await (0, node_fs_promises.writeFile)(packageJsonPath, JSON.stringify(this.generatePackageJsonContent(modes), null, 2));
|
|
16394
|
+
return;
|
|
16395
|
+
}
|
|
16396
|
+
const content = await (0, node_fs_promises.readFile)(packageJsonPath, "utf8");
|
|
16397
|
+
const packageJson$1 = JSON.parse(content);
|
|
16398
|
+
if (!packageJson$1.type || packageJson$1.type !== "module") packageJson$1.type = "module";
|
|
15318
16399
|
const newPackageJson = this.generatePackageJsonContent(modes);
|
|
15319
16400
|
packageJson$1.type = "module";
|
|
15320
16401
|
packageJson$1.dependencies ??= {};
|
|
@@ -15325,27 +16406,357 @@ var CoreCommands = class {
|
|
|
15325
16406
|
Object.assign(packageJson$1.scripts, newPackageJson.scripts);
|
|
15326
16407
|
await (0, node_fs_promises.writeFile)(packageJsonPath, JSON.stringify(packageJson$1, null, 2));
|
|
15327
16408
|
}
|
|
15328
|
-
|
|
16409
|
+
/**
|
|
16410
|
+
* Ensure package.json exists and is configured as ES module.
|
|
16411
|
+
*
|
|
16412
|
+
* Similar to ensurePackageJson but only validates/sets the "type": "module" field.
|
|
16413
|
+
* Throws an error if no package.json exists.
|
|
16414
|
+
*
|
|
16415
|
+
* @param root - The root directory of the project
|
|
16416
|
+
* @throws {AlephaError} If no package.json is found
|
|
16417
|
+
*/
|
|
16418
|
+
async ensurePackageJsonModule(root) {
|
|
16419
|
+
const packageJsonPath = (0, node_path.join)(root, "package.json");
|
|
16420
|
+
try {
|
|
16421
|
+
await (0, node_fs_promises.access)(packageJsonPath);
|
|
16422
|
+
} catch (error) {
|
|
16423
|
+
throw new AlephaError("No package.json found in project root. Run 'npx alepha init' to create one.");
|
|
16424
|
+
}
|
|
16425
|
+
const content = await (0, node_fs_promises.readFile)(packageJsonPath, "utf8");
|
|
16426
|
+
const packageJson$1 = JSON.parse(content);
|
|
16427
|
+
if (!packageJson$1.type || packageJson$1.type !== "module") {
|
|
16428
|
+
packageJson$1.type = "module";
|
|
16429
|
+
await (0, node_fs_promises.writeFile)(packageJsonPath, JSON.stringify(packageJson$1, null, 2));
|
|
16430
|
+
}
|
|
16431
|
+
}
|
|
16432
|
+
/**
|
|
16433
|
+
* Ensure tsconfig.json exists in the project.
|
|
16434
|
+
*
|
|
16435
|
+
* Creates a standard Alepha tsconfig.json if none exists.
|
|
16436
|
+
*
|
|
16437
|
+
* @param root - The root directory of the project
|
|
16438
|
+
*/
|
|
16439
|
+
async ensureTsConfig(root) {
|
|
15329
16440
|
const tsconfigPath = (0, node_path.join)(root, "tsconfig.json");
|
|
15330
16441
|
try {
|
|
15331
16442
|
await (0, node_fs_promises.access)(tsconfigPath);
|
|
15332
16443
|
} catch {
|
|
15333
|
-
this.log.info("Missing tsconfig.json detected. Creating one...");
|
|
15334
16444
|
await (0, node_fs_promises.writeFile)(tsconfigPath, tsconfigJson);
|
|
15335
16445
|
}
|
|
15336
16446
|
}
|
|
15337
|
-
|
|
15338
|
-
|
|
15339
|
-
|
|
15340
|
-
|
|
15341
|
-
|
|
15342
|
-
|
|
15343
|
-
|
|
15344
|
-
|
|
15345
|
-
|
|
16447
|
+
/**
|
|
16448
|
+
* Download Alepha starter project from GitHub.
|
|
16449
|
+
*
|
|
16450
|
+
* Downloads and extracts the apps/starter directory from the main Alepha repository.
|
|
16451
|
+
*
|
|
16452
|
+
* @param targetDir - The directory where the project should be extracted
|
|
16453
|
+
* @throws {AlephaError} If the download fails
|
|
16454
|
+
*/
|
|
16455
|
+
async downloadSampleProject(targetDir) {
|
|
16456
|
+
const response = await fetch("https://api.github.com/repos/feunard/alepha/tarball/main", { headers: { "User-Agent": "Alepha-CLI" } });
|
|
16457
|
+
if (!response.ok) throw new AlephaError(`Failed to download: ${response.statusText}`);
|
|
16458
|
+
await (0, node_stream_promises.pipeline)(node_stream.Readable.fromWeb(response.body), tar.extract({
|
|
16459
|
+
cwd: targetDir,
|
|
16460
|
+
strip: 3,
|
|
16461
|
+
filter: (path) => {
|
|
16462
|
+
const parts = path.split("/");
|
|
16463
|
+
return parts.length >= 3 && parts[1] === "apps" && parts[2] === "starter";
|
|
16464
|
+
}
|
|
16465
|
+
}));
|
|
16466
|
+
}
|
|
16467
|
+
/**
|
|
16468
|
+
* Get the path to Biome configuration file.
|
|
16469
|
+
*
|
|
16470
|
+
* Looks for an existing biome.json in the project root, or creates one if it doesn't exist.
|
|
16471
|
+
*
|
|
16472
|
+
* @param maybePath - Optional custom path to biome config
|
|
16473
|
+
* @returns Absolute path to the biome.json config file
|
|
16474
|
+
*/
|
|
16475
|
+
async getBiomeConfigPath(maybePath) {
|
|
16476
|
+
const root = process.cwd();
|
|
16477
|
+
if (maybePath) try {
|
|
16478
|
+
const path = (0, node_path.join)(root, maybePath);
|
|
16479
|
+
await (0, node_fs_promises.access)(path);
|
|
16480
|
+
return path;
|
|
16481
|
+
} catch {}
|
|
16482
|
+
try {
|
|
16483
|
+
const path = (0, node_path.join)(root, "biome.json");
|
|
16484
|
+
await (0, node_fs_promises.access)(path);
|
|
16485
|
+
return path;
|
|
16486
|
+
} catch {
|
|
16487
|
+
return await this.runner.writeConfigFile("biome.json", biomeJson);
|
|
16488
|
+
}
|
|
16489
|
+
}
|
|
16490
|
+
/**
|
|
16491
|
+
* Get the path to Vite configuration file.
|
|
16492
|
+
*
|
|
16493
|
+
* Looks for an existing vite.config.ts in the project root, or creates one if it doesn't exist.
|
|
16494
|
+
*
|
|
16495
|
+
* @param root - The root directory of the project (defaults to process.cwd())
|
|
16496
|
+
* @param serverEntry - Optional path to the server entry file to include in the config
|
|
16497
|
+
* @returns Absolute path to the vite.config.ts file
|
|
16498
|
+
*/
|
|
16499
|
+
async getViteConfigPath(root, serverEntry) {
|
|
16500
|
+
try {
|
|
16501
|
+
const viteConfigPath = (0, node_path.join)(root, "vite.config.ts");
|
|
16502
|
+
await (0, node_fs_promises.access)(viteConfigPath);
|
|
16503
|
+
return viteConfigPath;
|
|
16504
|
+
} catch {
|
|
16505
|
+
return this.runner.writeConfigFile("vite.config.ts", viteConfigTs(serverEntry));
|
|
16506
|
+
}
|
|
16507
|
+
}
|
|
16508
|
+
/**
|
|
16509
|
+
* Load Alepha instance from a server entry file.
|
|
16510
|
+
*
|
|
16511
|
+
* Dynamically imports the server entry file and extracts the Alepha instance.
|
|
16512
|
+
* Skips the automatic start process.
|
|
16513
|
+
*
|
|
16514
|
+
* @param rootDir - The root directory of the project
|
|
16515
|
+
* @param explicitEntry - Optional explicit path to the entry file
|
|
16516
|
+
* @returns Object containing the Alepha instance and the entry file path
|
|
16517
|
+
* @throws {AlephaError} If the Alepha instance cannot be found
|
|
16518
|
+
*/
|
|
16519
|
+
async loadAlephaFromServerEntryFile(rootDir, explicitEntry) {
|
|
16520
|
+
process.env.ALEPHA_SKIP_START = "true";
|
|
16521
|
+
const entry = await boot.getServerEntry(rootDir, explicitEntry);
|
|
16522
|
+
const mod = await (0, tsx_esm_api.tsImport)(entry, { parentURL: require("url").pathToFileURL(__filename).href });
|
|
16523
|
+
this.log.debug(`Load entry: ${entry}`);
|
|
16524
|
+
if (mod.default instanceof Alepha) return {
|
|
16525
|
+
alepha: mod.default,
|
|
16526
|
+
entry
|
|
16527
|
+
};
|
|
16528
|
+
const g = global;
|
|
16529
|
+
if (g.__alepha) return {
|
|
16530
|
+
alepha: g.__alepha,
|
|
16531
|
+
entry
|
|
16532
|
+
};
|
|
16533
|
+
throw new AlephaError(`Could not find Alepha instance in entry file: ${entry}`);
|
|
16534
|
+
}
|
|
16535
|
+
/**
|
|
16536
|
+
* Get DrizzleKitProvider from an Alepha instance.
|
|
16537
|
+
*
|
|
16538
|
+
* Searches the Alepha registry for the DrizzleKitProvider instance.
|
|
16539
|
+
*
|
|
16540
|
+
* @param alepha - The Alepha instance to search
|
|
16541
|
+
* @returns The DrizzleKitProvider instance
|
|
16542
|
+
*/
|
|
16543
|
+
getKitFromAlepha(alepha$1) {
|
|
16544
|
+
return alepha$1["registry"].values().find((it) => it.instance.constructor.name === "DrizzleKitProvider")?.instance;
|
|
16545
|
+
}
|
|
16546
|
+
/**
|
|
16547
|
+
* Generate JavaScript code for Drizzle entities export.
|
|
16548
|
+
*
|
|
16549
|
+
* Creates a temporary entities.js file that imports from the entry file
|
|
16550
|
+
* and exports database models for Drizzle Kit to process.
|
|
16551
|
+
*
|
|
16552
|
+
* @param entry - Path to the server entry file
|
|
16553
|
+
* @param provider - Name of the database provider
|
|
16554
|
+
* @param models - Array of model names to export
|
|
16555
|
+
* @returns JavaScript code as a string
|
|
16556
|
+
*/
|
|
16557
|
+
generateEntitiesJs(entry, provider, models = []) {
|
|
16558
|
+
return `
|
|
16559
|
+
import "${entry}";
|
|
16560
|
+
import { DrizzleKitProvider, Repository } from "@alepha/orm";
|
|
16561
|
+
|
|
16562
|
+
const alepha = globalThis.__alepha;
|
|
16563
|
+
const kit = alepha.inject(DrizzleKitProvider);
|
|
16564
|
+
const provider = alepha.services(Repository).find((it) => it.provider.name === "${provider}").provider;
|
|
16565
|
+
const models = kit.getModels(provider);
|
|
16566
|
+
|
|
16567
|
+
${models.map((it) => `export const ${it} = models["${it}"];`).join("\n")}
|
|
16568
|
+
|
|
16569
|
+
`.trim();
|
|
16570
|
+
}
|
|
16571
|
+
/**
|
|
16572
|
+
* Prepare Drizzle configuration files for a database provider.
|
|
16573
|
+
*
|
|
16574
|
+
* Creates temporary entities.js and drizzle.config.js files needed
|
|
16575
|
+
* for Drizzle Kit commands to run properly.
|
|
16576
|
+
*
|
|
16577
|
+
* @param options - Configuration options including kit, provider info, and paths
|
|
16578
|
+
* @returns Path to the generated drizzle.config.js file
|
|
16579
|
+
*/
|
|
16580
|
+
async prepareDrizzleConfig(options) {
|
|
16581
|
+
const models = Object.keys(options.kit.getModels(options.provider));
|
|
16582
|
+
const entitiesJs = this.generateEntitiesJs(options.entry, options.providerName, models);
|
|
16583
|
+
const config = {
|
|
16584
|
+
schema: await this.runner.writeConfigFile("entities.js", entitiesJs, options.rootDir),
|
|
16585
|
+
out: `./migrations/${options.providerName}`,
|
|
16586
|
+
dialect: options.dialect,
|
|
16587
|
+
dbCredentials: { url: options.providerUrl }
|
|
16588
|
+
};
|
|
16589
|
+
if (options.providerName === "pglite") config.driver = "pglite";
|
|
16590
|
+
const drizzleConfigJs = `export default ${JSON.stringify(config, null, 2)}`;
|
|
16591
|
+
return await this.runner.writeConfigFile("drizzle.config.js", drizzleConfigJs, options.rootDir);
|
|
16592
|
+
}
|
|
16593
|
+
/**
|
|
16594
|
+
* Run a drizzle-kit command for all database providers in an Alepha instance.
|
|
16595
|
+
*
|
|
16596
|
+
* Iterates through all repository providers, prepares Drizzle config for each,
|
|
16597
|
+
* and executes the specified drizzle-kit command.
|
|
16598
|
+
*
|
|
16599
|
+
* @param options - Configuration including command to run, flags, and logging
|
|
16600
|
+
*/
|
|
16601
|
+
async runDrizzleKitCommand(options) {
|
|
16602
|
+
const rootDir = options.root;
|
|
16603
|
+
this.log.debug(`Using project root: ${rootDir}`);
|
|
16604
|
+
const { alepha: alepha$1, entry } = await this.loadAlephaFromServerEntryFile(rootDir, options.args);
|
|
16605
|
+
const kit = this.getKitFromAlepha(alepha$1);
|
|
16606
|
+
const repositoryProvider = alepha$1.inject("RepositoryProvider");
|
|
16607
|
+
const accepted = /* @__PURE__ */ new Set([]);
|
|
16608
|
+
for (const descriptor of repositoryProvider.getRepositories()) {
|
|
16609
|
+
const provider = descriptor.provider;
|
|
16610
|
+
const providerName = provider.name;
|
|
16611
|
+
const dialect = provider.dialect;
|
|
16612
|
+
if (accepted.has(providerName)) continue;
|
|
16613
|
+
accepted.add(providerName);
|
|
16614
|
+
this.log.info("");
|
|
16615
|
+
this.log.info(options.logMessage(providerName, dialect));
|
|
16616
|
+
const drizzleConfigJsPath = await this.prepareDrizzleConfig({
|
|
16617
|
+
kit,
|
|
16618
|
+
provider,
|
|
16619
|
+
providerName,
|
|
16620
|
+
providerUrl: provider.url,
|
|
16621
|
+
dialect,
|
|
16622
|
+
entry,
|
|
16623
|
+
rootDir
|
|
16624
|
+
});
|
|
16625
|
+
await this.runner.exec(`drizzle-kit ${options.command} --config=${drizzleConfigJsPath}`);
|
|
16626
|
+
}
|
|
16627
|
+
}
|
|
16628
|
+
};
|
|
16629
|
+
|
|
16630
|
+
//#endregion
|
|
16631
|
+
//#region src/commands/BiomeCommands.ts
|
|
16632
|
+
var BiomeCommands = class {
|
|
16633
|
+
log = $logger();
|
|
16634
|
+
runner = $inject(ProcessRunner);
|
|
16635
|
+
utils = $inject(ProjectUtils);
|
|
16636
|
+
biomeFlags = t.object({ config: t.optional(t.text({ aliases: ["c"] })) });
|
|
16637
|
+
format = $command({
|
|
16638
|
+
name: "format",
|
|
16639
|
+
description: "Format the codebase using Biome",
|
|
16640
|
+
flags: this.biomeFlags,
|
|
16641
|
+
handler: async ({ flags }) => {
|
|
16642
|
+
const configPath = await this.utils.getBiomeConfigPath(flags.config);
|
|
16643
|
+
await this.runner.exec(`biome format --fix --config-path=${configPath}`);
|
|
16644
|
+
}
|
|
16645
|
+
});
|
|
16646
|
+
lint = $command({
|
|
16647
|
+
name: "lint",
|
|
16648
|
+
description: "Run linter across the codebase using Biome",
|
|
16649
|
+
flags: this.biomeFlags,
|
|
16650
|
+
handler: async ({ flags }) => {
|
|
16651
|
+
const configPath = await this.utils.getBiomeConfigPath(flags.config);
|
|
16652
|
+
await this.runner.exec(`biome check --formatter-enabled=false --fix --config-path=${configPath}`);
|
|
16653
|
+
}
|
|
16654
|
+
});
|
|
16655
|
+
};
|
|
16656
|
+
|
|
16657
|
+
//#endregion
|
|
16658
|
+
//#region src/commands/CoreCommands.ts
|
|
16659
|
+
var CoreCommands = class {
|
|
16660
|
+
log = $logger();
|
|
16661
|
+
cli = $inject(CliProvider);
|
|
16662
|
+
utils = $inject(ProjectUtils);
|
|
16663
|
+
/**
|
|
16664
|
+
* Called when no command is provided
|
|
16665
|
+
*/
|
|
16666
|
+
root = $command({
|
|
16667
|
+
root: true,
|
|
16668
|
+
flags: t.object({ version: t.optional(t.boolean({
|
|
16669
|
+
description: "Show Alepha CLI version",
|
|
16670
|
+
aliases: ["v"]
|
|
16671
|
+
})) }),
|
|
16672
|
+
handler: async ({ flags }) => {
|
|
16673
|
+
if (flags.version) {
|
|
16674
|
+
this.log.info(version);
|
|
16675
|
+
return;
|
|
15346
16676
|
}
|
|
15347
|
-
|
|
15348
|
-
|
|
16677
|
+
this.cli.printHelp();
|
|
16678
|
+
}
|
|
16679
|
+
});
|
|
16680
|
+
/**
|
|
16681
|
+
* Create a new Alepha project based on one of the sample projects (for now, only one sample project is available)
|
|
16682
|
+
*/
|
|
16683
|
+
create = $command({
|
|
16684
|
+
name: "create",
|
|
16685
|
+
description: "Create a new Alepha project",
|
|
16686
|
+
aliases: ["new"],
|
|
16687
|
+
args: t.text({ title: "name" }),
|
|
16688
|
+
flags: t.object({
|
|
16689
|
+
yarn: t.optional(t.boolean({ description: "Use Yarn package manager" })),
|
|
16690
|
+
pnpm: t.optional(t.boolean({ description: "Use pnpm package manager" }))
|
|
16691
|
+
}),
|
|
16692
|
+
summary: false,
|
|
16693
|
+
handler: async ({ run: run$1, args, flags, root }) => {
|
|
16694
|
+
const name = args;
|
|
16695
|
+
const dest = (0, node_path.join)(root, name);
|
|
16696
|
+
try {
|
|
16697
|
+
await (0, node_fs_promises.access)(dest);
|
|
16698
|
+
this.log.error(`Directory "${name}" already exists. Please choose a different project name.`);
|
|
16699
|
+
return;
|
|
16700
|
+
} catch {}
|
|
16701
|
+
let installCmd = "npm install";
|
|
16702
|
+
let execCmd = "npx";
|
|
16703
|
+
if (flags.yarn) {
|
|
16704
|
+
installCmd = "yarn";
|
|
16705
|
+
execCmd = "yarn";
|
|
16706
|
+
} else if (flags.pnpm) {
|
|
16707
|
+
installCmd = "pnpm install";
|
|
16708
|
+
execCmd = "pnpm";
|
|
16709
|
+
}
|
|
16710
|
+
await (0, node_fs_promises.mkdir)(dest, { recursive: true }).catch(() => null);
|
|
16711
|
+
await run$1("Downloading sample project", () => this.utils.downloadSampleProject(dest));
|
|
16712
|
+
if (flags.yarn) {
|
|
16713
|
+
await this.utils.ensureYarn(dest);
|
|
16714
|
+
await run$1(`cd ${name} && yarn set version stable`, { alias: "Setting Yarn to stable version" });
|
|
16715
|
+
}
|
|
16716
|
+
await run$1(`cd ${name} && ${installCmd}`, { alias: "Installing dependencies" });
|
|
16717
|
+
await run$1(`cd ${name} && npx alepha lint`, { alias: "Linting code" });
|
|
16718
|
+
await run$1(`cd ${name} && npx alepha typecheck`, { alias: "Type checking" });
|
|
16719
|
+
await run$1(`cd ${name} && npx alepha test`, { alias: "Running tests" });
|
|
16720
|
+
await run$1(`cd ${name} && npx alepha build`, { alias: "Building project" });
|
|
16721
|
+
this.log.info("");
|
|
16722
|
+
this.log.info(`$ cd ${name} && ${execCmd} alepha dev`.trim());
|
|
16723
|
+
this.log.info("");
|
|
16724
|
+
}
|
|
16725
|
+
});
|
|
16726
|
+
/**
|
|
16727
|
+
* Clean the project, removing the "dist" directory
|
|
16728
|
+
*/
|
|
16729
|
+
clean = $command({
|
|
16730
|
+
name: "clean",
|
|
16731
|
+
description: "Clean the project",
|
|
16732
|
+
handler: async ({ run: run$1 }) => {
|
|
16733
|
+
await run$1.rm("./dist");
|
|
16734
|
+
}
|
|
16735
|
+
});
|
|
16736
|
+
/**
|
|
16737
|
+
* Ensure the project has the necessary Alepha configuration files.
|
|
16738
|
+
* Add the correct dependencies to package.json and install them.
|
|
16739
|
+
*/
|
|
16740
|
+
init = $command({
|
|
16741
|
+
name: "init",
|
|
16742
|
+
description: "Add missing Alepha configuration files to the project",
|
|
16743
|
+
flags: t.object({
|
|
16744
|
+
yarn: t.optional(t.boolean({ description: "Use Yarn package manager" })),
|
|
16745
|
+
api: t.optional(t.boolean({ description: "Include Alepha Server dependencies" })),
|
|
16746
|
+
react: t.optional(t.boolean({ description: "Include Alepha React dependencies" })),
|
|
16747
|
+
orm: t.optional(t.boolean({ description: "Include Alepha ORM dependencies" }))
|
|
16748
|
+
}),
|
|
16749
|
+
handler: async ({ run: run$1, flags, root }) => {
|
|
16750
|
+
await run$1("Ensuring Alepha configuration files", async () => {
|
|
16751
|
+
await this.utils.ensureTsConfig(root);
|
|
16752
|
+
await this.utils.ensurePackageJson(root, flags);
|
|
16753
|
+
});
|
|
16754
|
+
if (flags.yarn) {
|
|
16755
|
+
await this.utils.ensureYarn(root);
|
|
16756
|
+
await run$1("yarn install", { alias: "Installing dependencies with Yarn" });
|
|
16757
|
+
} else await run$1("npm install", { alias: "Installing dependencies with npm" });
|
|
16758
|
+
}
|
|
16759
|
+
});
|
|
15349
16760
|
};
|
|
15350
16761
|
|
|
15351
16762
|
//#endregion
|
|
@@ -15353,10 +16764,7 @@ var CoreCommands = class {
|
|
|
15353
16764
|
var DrizzleCommands = class {
|
|
15354
16765
|
log = $logger();
|
|
15355
16766
|
runner = $inject(ProcessRunner);
|
|
15356
|
-
|
|
15357
|
-
description: "Project root",
|
|
15358
|
-
default: "."
|
|
15359
|
-
}) });
|
|
16767
|
+
utils = $inject(ProjectUtils);
|
|
15360
16768
|
/**
|
|
15361
16769
|
* Check if database migrations are up to date
|
|
15362
16770
|
*
|
|
@@ -15370,15 +16778,14 @@ var DrizzleCommands = class {
|
|
|
15370
16778
|
check = $command({
|
|
15371
16779
|
name: "db:check-migrations",
|
|
15372
16780
|
description: "Verify database migration files are up to date",
|
|
15373
|
-
flags: this.flags,
|
|
15374
16781
|
args: t.optional(t.text({
|
|
15375
16782
|
title: "path",
|
|
15376
16783
|
description: "Path to the Alepha server entry file"
|
|
15377
16784
|
})),
|
|
15378
|
-
handler: async ({
|
|
15379
|
-
const rootDir =
|
|
16785
|
+
handler: async ({ args, root }) => {
|
|
16786
|
+
const rootDir = root;
|
|
15380
16787
|
this.log.debug(`Using project root: ${rootDir}`);
|
|
15381
|
-
const { alepha: alepha$1 } = await this.loadAlephaFromServerEntryFile(rootDir, args);
|
|
16788
|
+
const { alepha: alepha$1 } = await this.utils.loadAlephaFromServerEntryFile(rootDir, args);
|
|
15382
16789
|
const models = [];
|
|
15383
16790
|
const repositories = alepha$1.descriptors("repository");
|
|
15384
16791
|
const kit = (0, node_module.createRequire)(require("url").pathToFileURL(__filename).href)("drizzle-kit/api");
|
|
@@ -15431,14 +16838,13 @@ var DrizzleCommands = class {
|
|
|
15431
16838
|
name: "db:generate",
|
|
15432
16839
|
description: "Generate migration files based on current database schema",
|
|
15433
16840
|
summary: false,
|
|
15434
|
-
flags: this.flags,
|
|
15435
16841
|
args: t.optional(t.text({
|
|
15436
16842
|
title: "path",
|
|
15437
16843
|
description: "Path to the Alepha server entry file"
|
|
15438
16844
|
})),
|
|
15439
|
-
handler: async ({
|
|
15440
|
-
await this.runDrizzleKitCommand({
|
|
15441
|
-
|
|
16845
|
+
handler: async ({ args, root }) => {
|
|
16846
|
+
await this.utils.runDrizzleKitCommand({
|
|
16847
|
+
root,
|
|
15442
16848
|
args,
|
|
15443
16849
|
command: "generate",
|
|
15444
16850
|
logMessage: (providerName, dialect) => `Generate '${providerName}' migrations (${dialect}) ...`
|
|
@@ -15457,14 +16863,13 @@ var DrizzleCommands = class {
|
|
|
15457
16863
|
name: "db:push",
|
|
15458
16864
|
description: "Push database schema changes directly to the database",
|
|
15459
16865
|
summary: false,
|
|
15460
|
-
flags: this.flags,
|
|
15461
16866
|
args: t.optional(t.text({
|
|
15462
16867
|
title: "path",
|
|
15463
16868
|
description: "Path to the Alepha server entry file"
|
|
15464
16869
|
})),
|
|
15465
|
-
handler: async ({
|
|
15466
|
-
await this.runDrizzleKitCommand({
|
|
15467
|
-
|
|
16870
|
+
handler: async ({ root, args }) => {
|
|
16871
|
+
await this.utils.runDrizzleKitCommand({
|
|
16872
|
+
root,
|
|
15468
16873
|
args,
|
|
15469
16874
|
command: "push",
|
|
15470
16875
|
logMessage: (providerName, dialect) => `Push '${providerName}' schema (${dialect}) ...`
|
|
@@ -15483,14 +16888,13 @@ var DrizzleCommands = class {
|
|
|
15483
16888
|
name: "db:migrate",
|
|
15484
16889
|
description: "Apply pending database migrations",
|
|
15485
16890
|
summary: false,
|
|
15486
|
-
flags: this.flags,
|
|
15487
16891
|
args: t.optional(t.text({
|
|
15488
16892
|
title: "path",
|
|
15489
16893
|
description: "Path to the Alepha server entry file"
|
|
15490
16894
|
})),
|
|
15491
|
-
handler: async ({
|
|
15492
|
-
await this.runDrizzleKitCommand({
|
|
15493
|
-
|
|
16895
|
+
handler: async ({ root, args }) => {
|
|
16896
|
+
await this.utils.runDrizzleKitCommand({
|
|
16897
|
+
root,
|
|
15494
16898
|
args,
|
|
15495
16899
|
command: "migrate",
|
|
15496
16900
|
logMessage: (providerName, dialect) => `Migrate '${providerName}' database (${dialect}) ...`
|
|
@@ -15509,108 +16913,25 @@ var DrizzleCommands = class {
|
|
|
15509
16913
|
name: "db:studio",
|
|
15510
16914
|
description: "Launch Drizzle Studio database browser",
|
|
15511
16915
|
summary: false,
|
|
15512
|
-
flags: this.flags,
|
|
15513
16916
|
args: t.optional(t.text({
|
|
15514
16917
|
title: "path",
|
|
15515
16918
|
description: "Path to the Alepha server entry file"
|
|
15516
16919
|
})),
|
|
15517
|
-
handler: async ({
|
|
15518
|
-
await this.runDrizzleKitCommand({
|
|
15519
|
-
|
|
16920
|
+
handler: async ({ root, args }) => {
|
|
16921
|
+
await this.utils.runDrizzleKitCommand({
|
|
16922
|
+
root,
|
|
15520
16923
|
args,
|
|
15521
16924
|
command: "studio",
|
|
15522
16925
|
logMessage: (providerName, dialect) => `Launch Studio for '${providerName}' (${dialect}) ...`
|
|
15523
16926
|
});
|
|
15524
16927
|
}
|
|
15525
16928
|
});
|
|
15526
|
-
/**
|
|
15527
|
-
* Run a drizzle-kit command for all database providers
|
|
15528
|
-
*/
|
|
15529
|
-
async runDrizzleKitCommand(options) {
|
|
15530
|
-
const rootDir = (0, node_path.join)(process.cwd(), options.flags.root);
|
|
15531
|
-
this.log.debug(`Using project root: ${rootDir}`);
|
|
15532
|
-
const { alepha: alepha$1, entry } = await this.loadAlephaFromServerEntryFile(rootDir, options.args);
|
|
15533
|
-
const kit = this.getKitFromAlepha(alepha$1);
|
|
15534
|
-
const repositoryProvider = alepha$1.inject("RepositoryProvider");
|
|
15535
|
-
const accepted = /* @__PURE__ */ new Set([]);
|
|
15536
|
-
for (const descriptor of repositoryProvider.getRepositories()) {
|
|
15537
|
-
const provider = descriptor.provider;
|
|
15538
|
-
const providerName = provider.name;
|
|
15539
|
-
const dialect = provider.dialect;
|
|
15540
|
-
if (accepted.has(providerName)) continue;
|
|
15541
|
-
accepted.add(providerName);
|
|
15542
|
-
this.log.info("");
|
|
15543
|
-
this.log.info(options.logMessage(providerName, dialect));
|
|
15544
|
-
const drizzleConfigJsPath = await this.prepareDrizzleConfig({
|
|
15545
|
-
kit,
|
|
15546
|
-
provider,
|
|
15547
|
-
providerName,
|
|
15548
|
-
providerUrl: provider.url,
|
|
15549
|
-
dialect,
|
|
15550
|
-
entry,
|
|
15551
|
-
rootDir
|
|
15552
|
-
});
|
|
15553
|
-
await this.runner.exec(`drizzle-kit ${options.command} --config=${drizzleConfigJsPath}`);
|
|
15554
|
-
}
|
|
15555
|
-
}
|
|
15556
|
-
/**
|
|
15557
|
-
* Prepare Drizzle configuration files for a provider
|
|
15558
|
-
*/
|
|
15559
|
-
async prepareDrizzleConfig(options) {
|
|
15560
|
-
const models = Object.keys(options.kit.getModels(options.provider));
|
|
15561
|
-
const entitiesJs = this.generateEntitiesJs(options.entry, options.providerName, models);
|
|
15562
|
-
const config = {
|
|
15563
|
-
schema: await this.runner.writeConfigFile("entities.js", entitiesJs, options.rootDir),
|
|
15564
|
-
out: `./migrations/${options.providerName}`,
|
|
15565
|
-
dialect: options.dialect,
|
|
15566
|
-
dbCredentials: { url: options.providerUrl }
|
|
15567
|
-
};
|
|
15568
|
-
if (options.providerName === "pglite") config.driver = "pglite";
|
|
15569
|
-
const drizzleConfigJs = "export default " + JSON.stringify(config, null, 2);
|
|
15570
|
-
return await this.runner.writeConfigFile("drizzle.config.js", drizzleConfigJs, options.rootDir);
|
|
15571
|
-
}
|
|
15572
|
-
/**
|
|
15573
|
-
* Get DrizzleKitProvider from Alepha instance
|
|
15574
|
-
*/
|
|
15575
|
-
getKitFromAlepha(alepha$1) {
|
|
15576
|
-
return alepha$1["registry"].values().find((it) => it.instance.constructor.name === "DrizzleKitProvider")?.instance;
|
|
15577
|
-
}
|
|
15578
|
-
async loadAlephaFromServerEntryFile(rootDir, explicitEntry) {
|
|
15579
|
-
process.env.ALEPHA_SKIP_START = "true";
|
|
15580
|
-
const entry = await boot.getServerEntry(rootDir, explicitEntry);
|
|
15581
|
-
const mod = await (0, tsx_esm_api.tsImport)(entry, { parentURL: require("url").pathToFileURL(__filename).href });
|
|
15582
|
-
this.log.debug(`Load entry: ${entry}`);
|
|
15583
|
-
if (mod.default instanceof Alepha) return {
|
|
15584
|
-
alepha: mod.default,
|
|
15585
|
-
entry
|
|
15586
|
-
};
|
|
15587
|
-
const g = global;
|
|
15588
|
-
if (g.__alepha) return {
|
|
15589
|
-
alepha: g.__alepha,
|
|
15590
|
-
entry
|
|
15591
|
-
};
|
|
15592
|
-
throw new AlephaError(`Could not find Alepha instance in entry file: ${entry}`);
|
|
15593
|
-
}
|
|
15594
|
-
generateEntitiesJs(entry, provider, models = []) {
|
|
15595
|
-
return `
|
|
15596
|
-
import "${entry}";
|
|
15597
|
-
import { DrizzleKitProvider, Repository } from "@alepha/postgres";
|
|
15598
|
-
|
|
15599
|
-
const alepha = globalThis.__alepha;
|
|
15600
|
-
const kit = alepha.inject(DrizzleKitProvider);
|
|
15601
|
-
const provider = alepha.services(Repository).find((it) => it.provider.name === "${provider}").provider;
|
|
15602
|
-
const models = kit.getModels(provider);
|
|
15603
|
-
|
|
15604
|
-
${models.map((it) => `export const ${it} = models["${it}"];`).join("\n")}
|
|
15605
|
-
|
|
15606
|
-
`.trim();
|
|
15607
|
-
}
|
|
15608
16929
|
};
|
|
15609
16930
|
|
|
15610
16931
|
//#endregion
|
|
15611
16932
|
//#region src/commands/VerifyCommands.ts
|
|
15612
16933
|
var VerifyCommands = class {
|
|
15613
|
-
|
|
16934
|
+
processRunner = $inject(ProcessRunner);
|
|
15614
16935
|
/**
|
|
15615
16936
|
* Run a series of verification commands to ensure code quality and correctness.
|
|
15616
16937
|
*
|
|
@@ -15643,32 +16964,17 @@ var VerifyCommands = class {
|
|
|
15643
16964
|
name: "typecheck",
|
|
15644
16965
|
description: "Check TypeScript types across the codebase",
|
|
15645
16966
|
handler: async () => {
|
|
15646
|
-
await this.
|
|
16967
|
+
await this.processRunner.exec("tsc --noEmit");
|
|
15647
16968
|
}
|
|
15648
16969
|
});
|
|
15649
16970
|
};
|
|
15650
16971
|
|
|
15651
|
-
//#endregion
|
|
15652
|
-
//#region src/assets/viteConfigTs.ts
|
|
15653
|
-
const viteConfigTs = (serverEntry) => `
|
|
15654
|
-
import { viteAlepha } from "@alepha/vite";
|
|
15655
|
-
|
|
15656
|
-
export default {
|
|
15657
|
-
plugins: [
|
|
15658
|
-
viteAlepha(${serverEntry ? `{ serverEntry: "${serverEntry}" }` : ""}),
|
|
15659
|
-
],
|
|
15660
|
-
test: {
|
|
15661
|
-
globals: true,
|
|
15662
|
-
},
|
|
15663
|
-
};
|
|
15664
|
-
`.trim();
|
|
15665
|
-
|
|
15666
16972
|
//#endregion
|
|
15667
16973
|
//#region src/commands/ViteCommands.ts
|
|
15668
16974
|
var ViteCommands = class {
|
|
15669
16975
|
log = $logger();
|
|
15670
16976
|
runner = $inject(ProcessRunner);
|
|
15671
|
-
|
|
16977
|
+
utils = $inject(ProjectUtils);
|
|
15672
16978
|
run = $command({
|
|
15673
16979
|
name: "run",
|
|
15674
16980
|
description: "Run a TypeScript file directly",
|
|
@@ -15681,8 +16987,8 @@ var ViteCommands = class {
|
|
|
15681
16987
|
title: "path",
|
|
15682
16988
|
description: "Filepath to run"
|
|
15683
16989
|
}),
|
|
15684
|
-
handler: async ({ args, flags }) => {
|
|
15685
|
-
await this.
|
|
16990
|
+
handler: async ({ args, flags, root }) => {
|
|
16991
|
+
await this.utils.ensureTsConfig(root);
|
|
15686
16992
|
await this.runner.exec(`tsx ${flags.watch ? "watch " : ""}${args}`);
|
|
15687
16993
|
}
|
|
15688
16994
|
});
|
|
@@ -15699,18 +17005,19 @@ var ViteCommands = class {
|
|
|
15699
17005
|
title: "path",
|
|
15700
17006
|
description: "Filepath to run"
|
|
15701
17007
|
})),
|
|
15702
|
-
handler: async ({ args }) => {
|
|
15703
|
-
|
|
15704
|
-
await this.
|
|
15705
|
-
await this.ensurePackageJson(root);
|
|
17008
|
+
handler: async ({ args, root }) => {
|
|
17009
|
+
await this.utils.ensureTsConfig(root);
|
|
17010
|
+
await this.utils.ensurePackageJsonModule(root);
|
|
15706
17011
|
const entry = await boot.getServerEntry(root, args);
|
|
15707
17012
|
this.log.trace("Entry file found", { entry });
|
|
15708
17013
|
try {
|
|
15709
17014
|
await (0, node_fs_promises.access)((0, node_path.join)(root, "index.html"));
|
|
15710
17015
|
} catch {
|
|
15711
17016
|
this.log.trace("No index.html found, running entry file with tsx");
|
|
17017
|
+
await this.runner.exec(`tsx watch ${entry}`);
|
|
17018
|
+
return;
|
|
15712
17019
|
}
|
|
15713
|
-
const configPath = await this.
|
|
17020
|
+
const configPath = await this.utils.getViteConfigPath(root, args ? entry : void 0);
|
|
15714
17021
|
this.log.trace("Vite config found", { configPath });
|
|
15715
17022
|
await this.runner.exec(`vite -c=${configPath}`);
|
|
15716
17023
|
}
|
|
@@ -15731,15 +17038,15 @@ var ViteCommands = class {
|
|
|
15731
17038
|
}),
|
|
15732
17039
|
handler: async ({ flags, args }) => {
|
|
15733
17040
|
const root = process.cwd();
|
|
15734
|
-
await this.
|
|
15735
|
-
await this.
|
|
17041
|
+
await this.utils.ensureTsConfig(root);
|
|
17042
|
+
await this.utils.ensurePackageJsonModule(root);
|
|
15736
17043
|
const entry = await boot.getServerEntry(root, args);
|
|
15737
17044
|
this.log.trace("Entry file found", { entry });
|
|
15738
17045
|
await (0, node_fs_promises.rm)("dist", {
|
|
15739
17046
|
recursive: true,
|
|
15740
17047
|
force: true
|
|
15741
17048
|
});
|
|
15742
|
-
const configPath = await this.
|
|
17049
|
+
const configPath = await this.utils.getViteConfigPath(root, args ? entry : void 0);
|
|
15743
17050
|
const env = {};
|
|
15744
17051
|
if (flags.stats) env.ALEPHA_BUILD_STATS = "true";
|
|
15745
17052
|
await this.runner.exec(`vite build -c=${configPath}`, env);
|
|
@@ -15748,35 +17055,12 @@ var ViteCommands = class {
|
|
|
15748
17055
|
test = $command({
|
|
15749
17056
|
name: "test",
|
|
15750
17057
|
description: "Run tests using Vitest",
|
|
15751
|
-
handler: async () => {
|
|
15752
|
-
await this.
|
|
15753
|
-
const configPath = await this.
|
|
17058
|
+
handler: async ({ root }) => {
|
|
17059
|
+
await this.utils.ensureTsConfig(root);
|
|
17060
|
+
const configPath = await this.utils.getViteConfigPath(root);
|
|
15754
17061
|
await this.runner.exec(`vitest run -c=${configPath}`);
|
|
15755
17062
|
}
|
|
15756
17063
|
});
|
|
15757
|
-
async configPath(root = process.cwd(), serverEntry) {
|
|
15758
|
-
try {
|
|
15759
|
-
const viteConfigPath = (0, node_path.join)(root, "vite.config.ts");
|
|
15760
|
-
await (0, node_fs_promises.access)(viteConfigPath);
|
|
15761
|
-
return viteConfigPath;
|
|
15762
|
-
} catch {
|
|
15763
|
-
return this.runner.writeConfigFile("vite.config.ts", viteConfigTs(serverEntry));
|
|
15764
|
-
}
|
|
15765
|
-
}
|
|
15766
|
-
async ensurePackageJson(root = process.cwd()) {
|
|
15767
|
-
const packageJsonPath = (0, node_path.join)(root, "package.json");
|
|
15768
|
-
try {
|
|
15769
|
-
await (0, node_fs_promises.access)(packageJsonPath);
|
|
15770
|
-
} catch (error) {
|
|
15771
|
-
throw new AlephaError("No package.json found in project root. Run 'npx alepha init' to create one.");
|
|
15772
|
-
}
|
|
15773
|
-
const content = await (0, node_fs_promises.readFile)(packageJsonPath, "utf8");
|
|
15774
|
-
const packageJson$1 = JSON.parse(content);
|
|
15775
|
-
if (!packageJson$1.type || packageJson$1.type !== "module") {
|
|
15776
|
-
packageJson$1.type = "module";
|
|
15777
|
-
await (0, node_fs_promises.writeFile)(packageJsonPath, JSON.stringify(packageJson$1, null, 2));
|
|
15778
|
-
}
|
|
15779
|
-
}
|
|
15780
17064
|
};
|
|
15781
17065
|
|
|
15782
17066
|
//#endregion
|