@reliverse/dler 2.0.5 → 2.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/dist/cmds/build/cmd.js +1 -15
  2. package/dist/cmds/publish/cmd.js +2 -4
  3. package/package.json +16 -12
  4. package/src/cli.ts +0 -8
  5. package/src/cmds/build/cmd.ts +0 -582
  6. package/src/cmds/clean/cmd.ts +0 -166
  7. package/src/cmds/clean/impl.ts +0 -900
  8. package/src/cmds/clean/presets.ts +0 -158
  9. package/src/cmds/clean/types.ts +0 -71
  10. package/src/cmds/init/cmd.ts +0 -68
  11. package/src/cmds/init/impl/config.ts +0 -105
  12. package/src/cmds/init/impl/generators.ts +0 -220
  13. package/src/cmds/init/impl/prompts.ts +0 -137
  14. package/src/cmds/init/impl/types.ts +0 -25
  15. package/src/cmds/init/impl/utils.ts +0 -17
  16. package/src/cmds/init/impl/validators.ts +0 -55
  17. package/src/cmds/integrate/cmd.ts +0 -82
  18. package/src/cmds/integrate/impl.ts +0 -204
  19. package/src/cmds/integrate/integrations/base.ts +0 -69
  20. package/src/cmds/integrate/integrations/nextjs.ts +0 -227
  21. package/src/cmds/integrate/integrations/registry.ts +0 -45
  22. package/src/cmds/integrate/integrations/ultracite.ts +0 -53
  23. package/src/cmds/integrate/types.ts +0 -48
  24. package/src/cmds/integrate/utils/biome.ts +0 -173
  25. package/src/cmds/integrate/utils/context.ts +0 -148
  26. package/src/cmds/integrate/utils/temp.ts +0 -47
  27. package/src/cmds/perf/analysis/bundle.ts +0 -311
  28. package/src/cmds/perf/analysis/filesystem.ts +0 -324
  29. package/src/cmds/perf/analysis/monorepo.ts +0 -439
  30. package/src/cmds/perf/benchmarks/command.ts +0 -230
  31. package/src/cmds/perf/benchmarks/memory.ts +0 -249
  32. package/src/cmds/perf/benchmarks/runner.ts +0 -220
  33. package/src/cmds/perf/cmd.ts +0 -285
  34. package/src/cmds/perf/impl.ts +0 -411
  35. package/src/cmds/perf/reporters/console.ts +0 -331
  36. package/src/cmds/perf/reporters/html.ts +0 -984
  37. package/src/cmds/perf/reporters/json.ts +0 -42
  38. package/src/cmds/perf/types.ts +0 -220
  39. package/src/cmds/perf/utils/cache.ts +0 -234
  40. package/src/cmds/perf/utils/formatter.ts +0 -190
  41. package/src/cmds/perf/utils/stats.ts +0 -153
  42. package/src/cmds/publish/cmd.ts +0 -215
  43. package/src/cmds/shell/cmd.ts +0 -61
  44. package/src/cmds/tsc/cache.ts +0 -237
  45. package/src/cmds/tsc/cmd.ts +0 -139
  46. package/src/cmds/tsc/impl.ts +0 -855
  47. package/src/cmds/tsc/types.ts +0 -66
  48. package/tsconfig.json +0 -9
@@ -1,249 +0,0 @@
1
- // apps/dler/src/cmds/perf/benchmarks/memory.ts
2
-
3
- import type { MemoryUsage } from "../types";
4
-
5
- export interface MemoryProfile {
6
- timestamp: number;
7
- memory: MemoryUsage;
8
- label?: string;
9
- }
10
-
11
- export interface MemorySnapshot {
12
- before: MemoryUsage;
13
- after: MemoryUsage;
14
- peak: MemoryUsage;
15
- growth: number;
16
- duration: number;
17
- }
18
-
19
- export class MemoryProfiler {
20
- private snapshots: MemoryProfile[] = [];
21
- private startMemory: MemoryUsage | null = null;
22
- private peakMemory: MemoryUsage | null = null;
23
-
24
- start(label?: string): void {
25
- this.startMemory = process.memoryUsage();
26
- this.peakMemory = { ...this.startMemory };
27
- this.snapshots.push({
28
- timestamp: Date.now(),
29
- memory: this.startMemory,
30
- label: label ?? "start",
31
- });
32
- }
33
-
34
- snapshot(label?: string): void {
35
- const current = process.memoryUsage();
36
- this.snapshots.push({
37
- timestamp: Date.now(),
38
- memory: current,
39
- label: label ?? `snapshot-${this.snapshots.length}`,
40
- });
41
-
42
- // Update peak memory
43
- if (!this.peakMemory) {
44
- this.peakMemory = { ...current };
45
- } else {
46
- this.peakMemory = {
47
- rss: Math.max(this.peakMemory.rss, current.rss),
48
- heapTotal: Math.max(this.peakMemory.heapTotal, current.heapTotal),
49
- heapUsed: Math.max(this.peakMemory.heapUsed, current.heapUsed),
50
- external: Math.max(this.peakMemory.external, current.external),
51
- arrayBuffers: Math.max(
52
- this.peakMemory.arrayBuffers,
53
- current.arrayBuffers,
54
- ),
55
- };
56
- }
57
- }
58
-
59
- stop(): MemorySnapshot | null {
60
- if (!this.startMemory) {
61
- return null;
62
- }
63
-
64
- const endMemory = process.memoryUsage();
65
- const duration =
66
- this.snapshots.length > 0
67
- ? this.snapshots[this.snapshots.length - 1]!.timestamp -
68
- this.snapshots[0]!.timestamp
69
- : 0;
70
-
71
- const snapshot: MemorySnapshot = {
72
- before: this.startMemory,
73
- after: endMemory,
74
- peak: this.peakMemory ?? endMemory,
75
- growth: endMemory.rss - this.startMemory.rss,
76
- duration,
77
- };
78
-
79
- // Reset state
80
- this.startMemory = null;
81
- this.peakMemory = null;
82
- this.snapshots = [];
83
-
84
- return snapshot;
85
- }
86
-
87
- getSnapshots(): MemoryProfile[] {
88
- return [...this.snapshots];
89
- }
90
-
91
- getMemoryGrowth(): number {
92
- if (this.snapshots.length < 2) return 0;
93
-
94
- const first = this.snapshots[0]!.memory;
95
- const last = this.snapshots[this.snapshots.length - 1]!.memory;
96
-
97
- return last.rss - first.rss;
98
- }
99
-
100
- getPeakMemory(): MemoryUsage | null {
101
- return this.peakMemory;
102
- }
103
-
104
- getAverageMemory(): MemoryUsage | null {
105
- if (this.snapshots.length === 0) return null;
106
-
107
- const sum = this.snapshots.reduce(
108
- (acc, snapshot) => ({
109
- rss: acc.rss + snapshot.memory.rss,
110
- heapTotal: acc.heapTotal + snapshot.memory.heapTotal,
111
- heapUsed: acc.heapUsed + snapshot.memory.heapUsed,
112
- external: acc.external + snapshot.memory.external,
113
- arrayBuffers: acc.arrayBuffers + snapshot.memory.arrayBuffers,
114
- }),
115
- { rss: 0, heapTotal: 0, heapUsed: 0, external: 0, arrayBuffers: 0 },
116
- );
117
-
118
- const count = this.snapshots.length;
119
- return {
120
- rss: sum.rss / count,
121
- heapTotal: sum.heapTotal / count,
122
- heapUsed: sum.heapUsed / count,
123
- external: sum.external / count,
124
- arrayBuffers: sum.arrayBuffers / count,
125
- };
126
- }
127
- }
128
-
129
- export const createMemoryProfiler = (): MemoryProfiler => {
130
- return new MemoryProfiler();
131
- };
132
-
133
- export const measureMemoryUsage = (
134
- fn: () => void | Promise<void>,
135
- ): Promise<MemorySnapshot> => {
136
- return new Promise((resolve) => {
137
- const profiler = createMemoryProfiler();
138
- profiler.start("measurement");
139
-
140
- const executeFn = async () => {
141
- try {
142
- await fn();
143
- } finally {
144
- const snapshot = profiler.stop();
145
- resolve(snapshot!);
146
- }
147
- };
148
-
149
- executeFn();
150
- });
151
- };
152
-
153
- export const getCurrentMemoryUsage = (): MemoryUsage => {
154
- return process.memoryUsage();
155
- };
156
-
157
- export const getMemoryInfo = (): {
158
- total: number;
159
- free: number;
160
- used: number;
161
- percentage: number;
162
- } => {
163
- const usage = process.memoryUsage();
164
-
165
- // Note: This is a simplified calculation
166
- // In reality, getting accurate system memory info is more complex
167
- const total = usage.rss * 4; // Rough estimate
168
- const used = usage.rss;
169
- const free = total - used;
170
- const percentage = (used / total) * 100;
171
-
172
- return {
173
- total,
174
- free,
175
- used,
176
- percentage: Math.min(percentage, 100),
177
- };
178
- };
179
-
180
- export const formatMemoryUsage = (usage: MemoryUsage): string => {
181
- const format = (bytes: number) => {
182
- if (bytes === 0) return "0 B";
183
- const k = 1024;
184
- const sizes = ["B", "KB", "MB", "GB"];
185
- const i = Math.floor(Math.log(bytes) / Math.log(k));
186
- return `${parseFloat((bytes / Math.pow(k, i)).toFixed(2))} ${sizes[i]}`;
187
- };
188
-
189
- return `RSS: ${format(usage.rss)}, Heap: ${format(usage.heapUsed)}/${format(usage.heapTotal)}, External: ${format(usage.external)}`;
190
- };
191
-
192
- export const detectMemoryLeaks = (
193
- snapshots: MemoryProfile[],
194
- ): {
195
- hasLeak: boolean;
196
- severity: "low" | "medium" | "high";
197
- growthRate: number;
198
- suggestion: string;
199
- } => {
200
- if (snapshots.length < 3) {
201
- return {
202
- hasLeak: false,
203
- severity: "low",
204
- growthRate: 0,
205
- suggestion: "Need more snapshots to detect leaks",
206
- };
207
- }
208
-
209
- const rssValues = snapshots.map((s) => s.memory.rss);
210
- const growthRate =
211
- (rssValues[rssValues.length - 1]! - rssValues[0]!) / snapshots.length;
212
-
213
- // Simple heuristic: if memory grows consistently, it might be a leak
214
- const isConsistentGrowth = rssValues.every(
215
- (val, i) => i === 0 || val >= rssValues[i - 1]! * 0.95,
216
- );
217
-
218
- const hasLeak = isConsistentGrowth && growthRate > 1024 * 1024; // 1MB per snapshot
219
-
220
- let severity: "low" | "medium" | "high" = "low";
221
- let suggestion = "";
222
-
223
- if (hasLeak) {
224
- if (growthRate > 10 * 1024 * 1024) {
225
- // 10MB per snapshot
226
- severity = "high";
227
- suggestion =
228
- "Critical memory leak detected. Check for unclosed resources, event listeners, or circular references.";
229
- } else if (growthRate > 5 * 1024 * 1024) {
230
- // 5MB per snapshot
231
- severity = "medium";
232
- suggestion =
233
- "Moderate memory leak detected. Monitor memory usage and consider garbage collection.";
234
- } else {
235
- severity = "low";
236
- suggestion =
237
- "Minor memory growth detected. Monitor for patterns over time.";
238
- }
239
- } else {
240
- suggestion = "No significant memory leaks detected.";
241
- }
242
-
243
- return {
244
- hasLeak,
245
- severity,
246
- growthRate,
247
- suggestion,
248
- };
249
- };
@@ -1,220 +0,0 @@
1
- // apps/dler/src/cmds/perf/benchmarks/runner.ts
2
-
3
- import { logger } from "@reliverse/dler-logger";
4
- import pMap from "@reliverse/dler-mapper";
5
- import type { BenchmarkResult, Measurement, MemoryStats } from "../types";
6
- import { formatProgress } from "../utils/formatter";
7
-
8
- import {
9
- calculateMemoryAverage,
10
- calculateStatistics,
11
- findPeakMemory,
12
- } from "../utils/stats";
13
- import { executeCommandWithMemoryTracking } from "./command";
14
-
15
- export interface BenchmarkRunnerOptions {
16
- command: string;
17
- runs: number;
18
- warmup: number;
19
- concurrency: number;
20
- cwd?: string;
21
- timeout?: number;
22
- env?: Record<string, string>;
23
- verbose?: boolean;
24
- }
25
-
26
- export class BenchmarkRunner {
27
- private options: BenchmarkRunnerOptions;
28
-
29
- constructor(options: BenchmarkRunnerOptions) {
30
- this.options = options;
31
- }
32
-
33
- async run(): Promise<BenchmarkResult> {
34
- const { command, runs, warmup, concurrency, verbose } = this.options;
35
- const startTime = Date.now();
36
-
37
- if (verbose) {
38
- logger.info(`🚀 Starting benchmark for: ${command}`);
39
- logger.info(
40
- ` Runs: ${runs}, Warmup: ${warmup}, Concurrency: ${concurrency}`,
41
- );
42
- }
43
-
44
- // Run warmup iterations
45
- if (warmup > 0) {
46
- if (verbose) {
47
- logger.info(`🔥 Running ${warmup} warmup iterations...`);
48
- }
49
-
50
- await this.runWarmup();
51
- }
52
-
53
- // Run actual benchmark iterations
54
- if (verbose) {
55
- logger.info(`📊 Running ${runs} benchmark iterations...`);
56
- }
57
-
58
- const measurements = await this.runBenchmark();
59
- const statistics = this.calculateStatistics(measurements);
60
- const memory = this.calculateMemoryStats(measurements);
61
- const executionTime = Date.now() - startTime;
62
-
63
- // Check for failures
64
- const failures = measurements.filter((m) => !m.success);
65
- const success = failures.length === 0;
66
-
67
- if (verbose && failures.length > 0) {
68
- logger.warn(`⚠️ ${failures.length} out of ${runs} runs failed`);
69
- }
70
-
71
- return {
72
- command,
73
- runs,
74
- warmup,
75
- concurrency,
76
- measurements,
77
- statistics,
78
- memory,
79
- executionTime,
80
- success,
81
- error: failures.length > 0 ? `${failures.length} runs failed` : undefined,
82
- };
83
- }
84
-
85
- private async runWarmup(): Promise<void> {
86
- const { command, warmup, cwd, timeout, env } = this.options;
87
-
88
- // Run warmup iterations sequentially to avoid interference
89
- for (let i = 0; i < warmup; i++) {
90
- try {
91
- await executeCommandWithMemoryTracking(command, {
92
- cwd,
93
- timeout,
94
- env,
95
- });
96
- } catch {
97
- // Ignore warmup failures
98
- }
99
- }
100
- }
101
-
102
- private async runBenchmark(): Promise<Measurement[]> {
103
- const { command, runs, concurrency, cwd, timeout, env, verbose } =
104
- this.options;
105
-
106
- const runIndices = Array.from({ length: runs }, (_, i) => i);
107
-
108
- const measurements = await pMap(
109
- runIndices,
110
- async (runIndex) => {
111
- if (verbose) {
112
- logger.info(formatProgress(runIndex + 1, runs));
113
- }
114
-
115
- const measurement = await executeCommandWithMemoryTracking(command, {
116
- cwd,
117
- timeout,
118
- env,
119
- });
120
-
121
- measurement.run = runIndex + 1;
122
- return measurement;
123
- },
124
- {
125
- concurrency,
126
- stopOnError: false,
127
- },
128
- );
129
-
130
- return measurements;
131
- }
132
-
133
- private calculateStatistics(
134
- measurements: Measurement[],
135
- ): ReturnType<typeof calculateStatistics> {
136
- const durations = measurements
137
- .filter((m) => m.success)
138
- .map((m) => m.duration);
139
-
140
- if (durations.length === 0) {
141
- return calculateStatistics([]);
142
- }
143
-
144
- return calculateStatistics(durations);
145
- }
146
-
147
- private calculateMemoryStats(measurements: Measurement[]): MemoryStats {
148
- const successfulMeasurements = measurements.filter((m) => m.success);
149
-
150
- if (successfulMeasurements.length === 0) {
151
- return {
152
- peak: {
153
- rss: 0,
154
- heapTotal: 0,
155
- heapUsed: 0,
156
- external: 0,
157
- arrayBuffers: 0,
158
- },
159
- average: {
160
- rss: 0,
161
- heapTotal: 0,
162
- heapUsed: 0,
163
- external: 0,
164
- arrayBuffers: 0,
165
- },
166
- growth: 0,
167
- };
168
- }
169
-
170
- const rssValues = successfulMeasurements.map((m) => m.memory.rss);
171
- const heapTotalValues = successfulMeasurements.map(
172
- (m) => m.memory.heapTotal,
173
- );
174
- const heapUsedValues = successfulMeasurements.map((m) => m.memory.heapUsed);
175
- const externalValues = successfulMeasurements.map((m) => m.memory.external);
176
- const arrayBuffersValues = successfulMeasurements.map(
177
- (m) => m.memory.arrayBuffers,
178
- );
179
-
180
- const peak: MemoryStats["peak"] = {
181
- rss: findPeakMemory(rssValues),
182
- heapTotal: findPeakMemory(heapTotalValues),
183
- heapUsed: findPeakMemory(heapUsedValues),
184
- external: findPeakMemory(externalValues),
185
- arrayBuffers: findPeakMemory(arrayBuffersValues),
186
- };
187
-
188
- const average: MemoryStats["average"] = {
189
- rss: calculateMemoryAverage(rssValues),
190
- heapTotal: calculateMemoryAverage(heapTotalValues),
191
- heapUsed: calculateMemoryAverage(heapUsedValues),
192
- external: calculateMemoryAverage(externalValues),
193
- arrayBuffers: calculateMemoryAverage(arrayBuffersValues),
194
- };
195
-
196
- const growth =
197
- rssValues.length > 1
198
- ? rssValues[rssValues.length - 1]! - rssValues[0]!
199
- : 0;
200
-
201
- return {
202
- peak,
203
- average,
204
- growth,
205
- };
206
- }
207
- }
208
-
209
- export const runBenchmark = async (
210
- options: BenchmarkRunnerOptions,
211
- ): Promise<BenchmarkResult> => {
212
- const runner = new BenchmarkRunner(options);
213
- return runner.run();
214
- };
215
-
216
- export const createBenchmarkRunner = (
217
- options: BenchmarkRunnerOptions,
218
- ): BenchmarkRunner => {
219
- return new BenchmarkRunner(options);
220
- };