openrxiv-cli 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/dist/api/api-client.d.ts +96 -0
  2. package/dist/api/api-client.d.ts.map +1 -0
  3. package/dist/api/api-client.js +257 -0
  4. package/dist/aws/bucket-explorer.d.ts +26 -0
  5. package/dist/aws/bucket-explorer.d.ts.map +1 -0
  6. package/dist/aws/bucket-explorer.js +220 -0
  7. package/dist/aws/config.d.ts +5 -0
  8. package/dist/aws/config.d.ts.map +1 -0
  9. package/dist/aws/config.js +36 -0
  10. package/dist/aws/downloader.d.ts +13 -0
  11. package/dist/aws/downloader.d.ts.map +1 -0
  12. package/dist/aws/downloader.js +115 -0
  13. package/dist/aws/month-lister.d.ts +18 -0
  14. package/dist/aws/month-lister.d.ts.map +1 -0
  15. package/dist/aws/month-lister.js +90 -0
  16. package/dist/commands/batch-info.d.ts +3 -0
  17. package/dist/commands/batch-info.d.ts.map +1 -0
  18. package/dist/commands/batch-info.js +213 -0
  19. package/dist/commands/batch-process.d.ts +3 -0
  20. package/dist/commands/batch-process.d.ts.map +1 -0
  21. package/dist/commands/batch-process.js +557 -0
  22. package/dist/commands/download.d.ts +3 -0
  23. package/dist/commands/download.d.ts.map +1 -0
  24. package/dist/commands/download.js +76 -0
  25. package/dist/commands/index.d.ts +6 -0
  26. package/dist/commands/index.d.ts.map +1 -0
  27. package/dist/commands/index.js +5 -0
  28. package/dist/commands/list.d.ts +3 -0
  29. package/dist/commands/list.d.ts.map +1 -0
  30. package/dist/commands/list.js +18 -0
  31. package/dist/commands/summary.d.ts +3 -0
  32. package/dist/commands/summary.d.ts.map +1 -0
  33. package/dist/commands/summary.js +249 -0
  34. package/dist/index.d.ts +7 -0
  35. package/dist/index.d.ts.map +1 -0
  36. package/dist/index.js +35 -0
  37. package/dist/utils/batches.d.ts +9 -0
  38. package/dist/utils/batches.d.ts.map +1 -0
  39. package/dist/utils/batches.js +61 -0
  40. package/dist/utils/batches.test.d.ts +2 -0
  41. package/dist/utils/batches.test.d.ts.map +1 -0
  42. package/dist/utils/batches.test.js +119 -0
  43. package/dist/utils/default-server.d.ts +3 -0
  44. package/dist/utils/default-server.d.ts.map +1 -0
  45. package/dist/utils/default-server.js +20 -0
  46. package/dist/utils/index.d.ts +5 -0
  47. package/dist/utils/index.d.ts.map +1 -0
  48. package/dist/utils/index.js +5 -0
  49. package/dist/utils/meca-processor.d.ts +28 -0
  50. package/dist/utils/meca-processor.d.ts.map +1 -0
  51. package/dist/utils/meca-processor.js +503 -0
  52. package/dist/utils/meca-processor.test.d.ts +2 -0
  53. package/dist/utils/meca-processor.test.d.ts.map +1 -0
  54. package/dist/utils/meca-processor.test.js +123 -0
  55. package/dist/utils/months.d.ts +36 -0
  56. package/dist/utils/months.d.ts.map +1 -0
  57. package/dist/utils/months.js +135 -0
  58. package/dist/utils/months.test.d.ts +2 -0
  59. package/dist/utils/months.test.d.ts.map +1 -0
  60. package/dist/utils/months.test.js +209 -0
  61. package/dist/utils/requester-pays-error.d.ts +6 -0
  62. package/dist/utils/requester-pays-error.d.ts.map +1 -0
  63. package/dist/utils/requester-pays-error.js +20 -0
  64. package/dist/version.d.ts +3 -0
  65. package/dist/version.d.ts.map +1 -0
  66. package/dist/version.js +2 -0
  67. package/package.json +67 -0
@@ -0,0 +1,115 @@
1
+ import { GetObjectCommand, HeadObjectCommand } from '@aws-sdk/client-s3';
2
+ import { createWriteStream } from 'fs';
3
+ import { mkdir } from 'fs/promises';
4
+ import { dirname, join } from 'path';
5
+ import { pipeline } from 'stream/promises';
6
+ import { Transform } from 'stream';
7
+ import chalk from 'chalk';
8
+ import ora from 'ora';
9
+ import cliProgress from 'cli-progress';
10
+ import { getS3Client, getGlobalRequesterPays } from './config.js';
11
+ import { getDefaultServer } from '../utils/default-server.js';
12
+ import { getBucketName } from './bucket-explorer.js';
13
+ export async function downloadFile(path, options) {
14
+ const { output = './downloads', server = getDefaultServer() } = options;
15
+ const bucket = getBucketName(server);
16
+ const client = await getS3Client();
17
+ console.log(chalk.blue(`Downloading: ${path}`));
18
+ console.log(chalk.blue('=============================='));
19
+ try {
20
+ // Get file metadata
21
+ const headCommandOptions = {
22
+ Bucket: bucket,
23
+ Key: path,
24
+ };
25
+ // Only add RequestPayer if requester pays is enabled
26
+ if (getGlobalRequesterPays()) {
27
+ headCommandOptions.RequestPayer = 'requester';
28
+ }
29
+ const headCommand = new HeadObjectCommand(headCommandOptions);
30
+ const metadata = await client.send(headCommand);
31
+ const fileSize = metadata.ContentLength || 0;
32
+ const fileName = options.filename || path.split('/').pop() || 'unknown';
33
+ const outputPath = join(output, fileName);
34
+ // Create output directory
35
+ await mkdir(dirname(outputPath), { recursive: true });
36
+ // Start download
37
+ const spinner = ora('Preparing download...').start();
38
+ const getCommandOptions = {
39
+ Bucket: bucket,
40
+ Key: path,
41
+ };
42
+ // Only add RequestPayer if requester pays is enabled
43
+ if (getGlobalRequesterPays()) {
44
+ getCommandOptions.RequestPayer = 'requester';
45
+ }
46
+ const getCommand = new GetObjectCommand(getCommandOptions);
47
+ const response = await client.send(getCommand);
48
+ if (!response.Body) {
49
+ throw new Error('No file content received');
50
+ }
51
+ spinner.succeed('Download started');
52
+ // Create progress bar
53
+ const progressBar = new cliProgress.SingleBar({
54
+ format: 'Downloading |{bar}| {percentage}% | {value}/{total} bytes | Speed: {speed} | ETA: {eta}',
55
+ barCompleteChar: '\u2588',
56
+ barIncompleteChar: '\u2591',
57
+ hideCursor: true,
58
+ });
59
+ progressBar.start(fileSize, 0);
60
+ let downloadedBytes = 0;
61
+ const startTime = Date.now();
62
+ // Create transform stream to track progress
63
+ const progressStream = new (class extends Transform {
64
+ constructor() {
65
+ super();
66
+ }
67
+ _transform(chunk, encoding, callback) {
68
+ downloadedBytes += chunk.length;
69
+ const elapsed = (Date.now() - startTime) / 1000;
70
+ const speed = downloadedBytes / elapsed;
71
+ const eta = (fileSize - downloadedBytes) / speed;
72
+ progressBar.update(downloadedBytes);
73
+ callback(null, chunk);
74
+ }
75
+ })();
76
+ // Download file
77
+ const writeStream = createWriteStream(outputPath);
78
+ await pipeline(response.Body, progressStream, writeStream);
79
+ progressBar.stop();
80
+ console.log(chalk.green(`✓ Download completed: ${outputPath}`));
81
+ console.log(chalk.blue(`File size: ${formatFileSize(fileSize)}`));
82
+ }
83
+ catch (error) {
84
+ if (error instanceof Error) {
85
+ // Check for specific AWS errors that indicate requester pays is needed
86
+ if (error.message.includes('Access Denied') || error.message.includes('403')) {
87
+ if (!getGlobalRequesterPays()) {
88
+ throw new Error(`Download failed: Access denied. This bucket requires requester pays for downloads. ` +
89
+ `Try running with --requester-pays flag or ensure your IAM role has requester pays permissions.`);
90
+ }
91
+ else {
92
+ throw new Error(`Download failed: Access denied. Check your AWS credentials and permissions.`);
93
+ }
94
+ }
95
+ else if (error.message.includes('NoSuchKey')) {
96
+ throw new Error(`Download failed: File not found in S3 bucket.`);
97
+ }
98
+ else if (error.message.includes('NoSuchBucket')) {
99
+ throw new Error(`Download failed: S3 bucket not found.`);
100
+ }
101
+ else {
102
+ throw new Error(`Download failed: ${error.message}`);
103
+ }
104
+ }
105
+ throw error;
106
+ }
107
+ }
108
+ function formatFileSize(bytes) {
109
+ if (bytes === 0)
110
+ return '0 B';
111
+ const k = 1024;
112
+ const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
113
+ const i = Math.floor(Math.log(bytes) / Math.log(k));
114
+ return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
115
+ }
@@ -0,0 +1,18 @@
1
+ export interface S3FileInfo {
2
+ s3Bucket: string;
3
+ s3Key: string;
4
+ fileSize: number;
5
+ lastModified: Date;
6
+ batch: string;
7
+ }
8
+ export interface ListMonthOptions {
9
+ month?: string;
10
+ batch?: string;
11
+ server?: 'biorxiv' | 'medrxiv';
12
+ limit?: number;
13
+ }
14
+ /**
15
+ * Lists MECA files in S3 for a specific month with pagination support
16
+ */
17
+ export declare function listMonthFiles(options: ListMonthOptions): Promise<S3FileInfo[]>;
18
+ //# sourceMappingURL=month-lister.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"month-lister.d.ts","sourceRoot":"","sources":["../../src/aws/month-lister.ts"],"names":[],"mappings":"AAMA,MAAM,WAAW,UAAU;IACzB,QAAQ,EAAE,MAAM,CAAC;IACjB,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,MAAM,CAAC;IACjB,YAAY,EAAE,IAAI,CAAC;IACnB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,gBAAgB;IAC/B,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,SAAS,GAAG,SAAS,CAAC;IAC/B,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,wBAAsB,cAAc,CAAC,OAAO,EAAE,gBAAgB,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC,CAoGrF"}
@@ -0,0 +1,90 @@
1
+ import { ListObjectsV2Command } from '@aws-sdk/client-s3';
2
+ import { getS3Client } from './config.js';
3
+ import { getFolderStructure } from 'openrxiv-utils';
4
+ import { getDefaultServer } from '../utils/default-server.js';
5
+ import { getBucketName } from './bucket-explorer.js';
6
+ /**
7
+ * Lists MECA files in S3 for a specific month with pagination support
8
+ */
9
+ export async function listMonthFiles(options) {
10
+ const { month, batch, limit = 1000, server = getDefaultServer() } = options;
11
+ const awsBucket = getBucketName(server);
12
+ if (!month && !batch) {
13
+ throw new Error('Either month or batch must be specified');
14
+ }
15
+ const description = month ? `month: ${month}` : `batch: ${batch}`;
16
+ console.log(`🔍 Listing files for ${description} from AWS S3 bucket: ${awsBucket}`);
17
+ try {
18
+ const s3Client = await getS3Client();
19
+ // Determine folder structure based on options
20
+ const folder = getFolderStructure({ month, batch, server: options.server || 'biorxiv' });
21
+ const s3Prefix = folder.prefix;
22
+ console.log(`🔍 Content Type: ${folder.type === 'current' ? 'Current Content' : 'Back Content'}`);
23
+ if (folder.batch) {
24
+ console.log(`🔍 Batch: ${folder.batch}`);
25
+ }
26
+ console.log(`🔍 Searching S3 prefix: ${s3Prefix}`);
27
+ const allFiles = [];
28
+ let continuationToken;
29
+ let batchCount = 0;
30
+ // Use pagination to get all files
31
+ do {
32
+ batchCount++;
33
+ console.log(`📦 Fetching batch ${batchCount}...`);
34
+ const listCommand = new ListObjectsV2Command({
35
+ Bucket: awsBucket,
36
+ Prefix: s3Prefix,
37
+ MaxKeys: Math.min(1000, limit - allFiles.length), // Don't fetch more than we need
38
+ ContinuationToken: continuationToken,
39
+ RequestPayer: 'requester',
40
+ });
41
+ const response = await s3Client.send(listCommand);
42
+ if (response.Contents) {
43
+ for (const item of response.Contents) {
44
+ if (!item.Key || !item.Size)
45
+ continue;
46
+ // Only process .meca files
47
+ if (!item.Key.endsWith('.meca'))
48
+ continue;
49
+ // Extract S3 file information
50
+ const s3Key = item.Key;
51
+ const fileSize = item.Size;
52
+ const lastModified = item.LastModified || new Date();
53
+ const fileInfo = {
54
+ s3Bucket: awsBucket,
55
+ s3Key: s3Key, // This is already the full path from S3
56
+ fileSize: fileSize,
57
+ lastModified: lastModified,
58
+ batch: folder.batch,
59
+ };
60
+ allFiles.push(fileInfo);
61
+ // Check if we've reached the limit
62
+ if (allFiles.length >= limit) {
63
+ console.log(`📋 Reached limit of ${limit} files`);
64
+ break;
65
+ }
66
+ }
67
+ console.log(` Found ${response.Contents.length} files in this batch`);
68
+ }
69
+ continuationToken = response.NextContinuationToken;
70
+ // Break if we've reached the limit
71
+ if (allFiles.length >= limit) {
72
+ break;
73
+ }
74
+ } while (continuationToken);
75
+ console.log(`📋 Found ${allFiles.length} MECA files in S3 bucket`);
76
+ return allFiles;
77
+ }
78
+ catch (error) {
79
+ if (error instanceof Error) {
80
+ console.error(`❌ Error listing S3 files: ${error.message}`);
81
+ if (error.message.includes('AWS credentials not configured')) {
82
+ console.error('💡 Run "biorxiv config set-credentials" to configure AWS access');
83
+ }
84
+ }
85
+ else {
86
+ console.error('❌ Unknown error listing S3 files:', error);
87
+ }
88
+ return [];
89
+ }
90
+ }
@@ -0,0 +1,3 @@
1
+ import { Command } from 'commander';
2
+ export declare const monthInfoCommand: Command;
3
+ //# sourceMappingURL=batch-info.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"batch-info.d.ts","sourceRoot":"","sources":["../../src/commands/batch-info.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AAQpC,eAAO,MAAM,gBAAgB,SAczB,CAAC"}
@@ -0,0 +1,213 @@
1
+ import { Command } from 'commander';
2
+ import { ListObjectsV2Command } from '@aws-sdk/client-s3';
3
+ import chalk from 'chalk';
4
+ import { getS3Client } from '../aws/config.js';
5
+ import { getFolderStructure } from 'openrxiv-utils';
6
+ import { getBucketName } from '../aws/bucket-explorer.js';
7
+ import { getDefaultServer } from '../utils/index.js';
8
+ export const monthInfoCommand = new Command('batch-info')
9
+ .description('List detailed metadata for all files in a specific month or batch from bioRxiv or medRxiv')
10
+ .option('-m, --month <month>', 'Month to list (e.g., "January_2024" or "2024-01")')
11
+ .option('-b, --batch <batch>', 'Batch to list (e.g., "1", "batch-1", "Batch_01")')
12
+ .option('-s, --server <server>', 'Server to use: "biorxiv" or "medrxiv"', getDefaultServer())
13
+ .action(async (options) => {
14
+ try {
15
+ await listMonthMetadata(options);
16
+ }
17
+ catch (error) {
18
+ console.error('Error listing month metadata:', error);
19
+ process.exit(1);
20
+ }
21
+ });
22
+ async function listMonthMetadata(options) {
23
+ const client = await getS3Client();
24
+ const { month, batch, server = getDefaultServer() } = options;
25
+ const bucketName = getBucketName(server);
26
+ if (!month && !batch) {
27
+ console.error('❌ Error: Either --month or --batch option must be specified');
28
+ process.exit(1);
29
+ }
30
+ // Determine folder structure based on options
31
+ const contentStructure = getFolderStructure({ month, batch, server });
32
+ const prefix = contentStructure.prefix;
33
+ const description = month ? `Month: ${month}` : `Batch: ${batch}`;
34
+ console.log(chalk.blue(`📅 Month/Batch Information: ${description}`));
35
+ console.log(chalk.blue('===================================='));
36
+ console.log(chalk.gray(`🔍 Content Type: ${contentStructure.type === 'current' ? 'Current Content' : 'Back Content'}`));
37
+ if (contentStructure.batch) {
38
+ console.log(chalk.gray(`🔍 Batch: ${contentStructure.batch}`));
39
+ }
40
+ console.log(chalk.gray(`🔍 Scanning S3 prefix: ${prefix}`));
41
+ console.log('');
42
+ const allFiles = [];
43
+ let continuationToken;
44
+ let batchCount = 0;
45
+ try {
46
+ // Use pagination to get all files
47
+ do {
48
+ batchCount++;
49
+ console.log(chalk.gray(`📦 Fetching batch ${batchCount}...`));
50
+ const command = new ListObjectsV2Command({
51
+ Bucket: bucketName,
52
+ Prefix: prefix,
53
+ MaxKeys: 1000,
54
+ ContinuationToken: continuationToken,
55
+ RequestPayer: 'requester',
56
+ });
57
+ const response = await client.send(command);
58
+ if (response.Contents) {
59
+ for (const item of response.Contents) {
60
+ if (!item.Key)
61
+ continue;
62
+ const type = getContentType(item.Key);
63
+ allFiles.push({
64
+ key: item.Key,
65
+ size: item.Size || 0,
66
+ lastModified: item.LastModified || new Date(),
67
+ type,
68
+ fileName: item.Key.split('/').pop() || 'unknown',
69
+ fileExtension: item.Key.split('.').pop() || 'none',
70
+ });
71
+ }
72
+ }
73
+ continuationToken = response.NextContinuationToken;
74
+ if (response.Contents) {
75
+ console.log(chalk.gray(` Found ${response.Contents.length} files in this batch`));
76
+ }
77
+ } while (continuationToken);
78
+ console.log(chalk.green(`✅ Total files found: ${allFiles.length}`));
79
+ console.log('');
80
+ displaySummary(allFiles, month || batch || 'unknown', server);
81
+ }
82
+ catch (error) {
83
+ if (error instanceof Error) {
84
+ throw new Error(`Failed to list month metadata: ${error.message}`);
85
+ }
86
+ throw error;
87
+ }
88
+ }
89
+ function getContentType(key) {
90
+ if (key.endsWith('.meca'))
91
+ return 'meca';
92
+ if (key.endsWith('.pdf'))
93
+ return 'pdf';
94
+ if (key.endsWith('.xml'))
95
+ return 'xml';
96
+ return 'other';
97
+ }
98
+ function displaySummary(files, month, server = getDefaultServer()) {
99
+ console.log(chalk.blue.bold('📊 Summary Statistics'));
100
+ console.log(chalk.blue('===================='));
101
+ console.log('');
102
+ // Show content structure info if available
103
+ try {
104
+ const contentStructure = getFolderStructure({ month, server });
105
+ console.log(chalk.cyan('📁 Content Structure:'));
106
+ console.log(` Type: ${chalk.yellow(contentStructure.type === 'current' ? 'Current Content' : 'Back Content')}`);
107
+ if (contentStructure.batch) {
108
+ console.log(` Batch: ${chalk.yellow(contentStructure.batch)}`);
109
+ }
110
+ console.log('');
111
+ }
112
+ catch (error) {
113
+ // Ignore errors in summary display
114
+ }
115
+ // File type breakdown
116
+ const typeCounts = files.reduce((acc, file) => {
117
+ acc[file.type] = (acc[file.type] || 0) + 1;
118
+ return acc;
119
+ }, {});
120
+ console.log(chalk.cyan('📁 File Types:'));
121
+ for (const [type, count] of Object.entries(typeCounts)) {
122
+ const percentage = ((count / files.length) * 100).toFixed(1);
123
+ console.log(` ${chalk.yellow(type.toUpperCase())}: ${chalk.green(count)} (${percentage}%)`);
124
+ }
125
+ console.log('');
126
+ // Size statistics
127
+ const mecaFiles = files.filter((f) => f.type === 'meca');
128
+ if (mecaFiles.length > 0) {
129
+ const sizes = mecaFiles.map((f) => f.size);
130
+ const totalSize = sizes.reduce((sum, size) => sum + size, 0);
131
+ const avgSize = totalSize / sizes.length;
132
+ const minSize = Math.min(...sizes);
133
+ const maxSize = Math.max(...sizes);
134
+ console.log(chalk.cyan('📦 MECA File Sizes:'));
135
+ console.log(` Total: ${chalk.green(formatFileSize(totalSize))}`);
136
+ console.log(` Average: ${chalk.green(formatFileSize(avgSize))}`);
137
+ console.log(` Range: ${chalk.green(formatFileSize(minSize))} - ${chalk.green(formatFileSize(maxSize))}`);
138
+ console.log('');
139
+ }
140
+ // Date range
141
+ const dates = files.map((f) => f.lastModified);
142
+ const earliest = new Date(Math.min(...dates.map((d) => d.getTime())));
143
+ const latest = new Date(Math.max(...dates.map((d) => d.getTime())));
144
+ console.log(chalk.cyan('📅 Upload Date Range:'));
145
+ console.log(` Earliest: ${chalk.green(earliest.toLocaleDateString())}`);
146
+ console.log(` Latest: ${chalk.green(latest.toLocaleDateString())}`);
147
+ console.log('');
148
+ // Upload date histogram
149
+ const sortedDates = displayUploadDateHistogram(files);
150
+ console.log('');
151
+ // Show batch analysis
152
+ console.log('');
153
+ analyzeBatchPatterns(sortedDates);
154
+ }
155
+ function displayUploadDateHistogram(files) {
156
+ console.log(chalk.cyan('📊 Upload Date Distribution:'));
157
+ console.log(chalk.cyan('============================'));
158
+ console.log('');
159
+ // Group files by date
160
+ const dateGroups = new Map();
161
+ for (const file of files) {
162
+ const dateKey = file.lastModified.toLocaleDateString();
163
+ dateGroups.set(dateKey, (dateGroups.get(dateKey) || 0) + 1);
164
+ }
165
+ // Sort dates chronologically
166
+ const sortedDates = Array.from(dateGroups.entries()).sort((a, b) => {
167
+ return new Date(a[0]).getTime() - new Date(b[0]).getTime();
168
+ });
169
+ // Find the maximum count for scaling
170
+ const maxCount = Math.max(...Array.from(dateGroups.values()));
171
+ const maxBarLength = 50; // Maximum bar length in characters
172
+ // Display histogram
173
+ for (const [date, count] of sortedDates) {
174
+ const barLength = Math.round((count / maxCount) * maxBarLength);
175
+ const bar = '█'.repeat(barLength);
176
+ const percentage = ((count / files.length) * 100).toFixed(1);
177
+ // Color code by upload volume
178
+ let countColor = chalk.green;
179
+ if (count > maxCount * 0.8) {
180
+ countColor = chalk.red; // High volume
181
+ }
182
+ else if (count > maxCount * 0.5) {
183
+ countColor = chalk.yellow; // Medium volume
184
+ }
185
+ else {
186
+ countColor = chalk.green; // Low volume
187
+ }
188
+ console.log(`${chalk.cyan(date.padStart(10))} ${countColor(count.toString().padStart(4))} ${chalk.gray('│')} ${chalk.blue(bar)} ${chalk.gray(`(${percentage}%)`)}`);
189
+ }
190
+ return sortedDates;
191
+ }
192
+ function analyzeBatchPatterns(dateGroups) {
193
+ console.log(chalk.cyan('🔍 Batch Analysis'));
194
+ console.log(chalk.cyan('================='));
195
+ console.log('');
196
+ if (dateGroups.length === 0)
197
+ return;
198
+ // Analyze upload patterns
199
+ const totalDays = dateGroups.length;
200
+ const totalFiles = dateGroups.reduce((sum, [, count]) => sum + count, 0);
201
+ const avgFilesPerDay = totalFiles / totalDays;
202
+ console.log(` Total active days: ${chalk.green(totalDays)}`);
203
+ console.log(` Average files per day: ${chalk.green(avgFilesPerDay.toFixed(1))}`);
204
+ console.log('');
205
+ }
206
+ function formatFileSize(bytes) {
207
+ if (bytes === 0)
208
+ return '0 B';
209
+ const k = 1024;
210
+ const sizes = ['B', 'KB', 'MB', 'GB'];
211
+ const i = Math.floor(Math.log(bytes) / Math.log(k));
212
+ return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
213
+ }
@@ -0,0 +1,3 @@
1
+ import { Command } from 'commander';
2
+ export declare const batchProcessCommand: Command;
3
+ //# sourceMappingURL=batch-process.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"batch-process.d.ts","sourceRoot":"","sources":["../../src/commands/batch-process.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAU,MAAM,WAAW,CAAC;AAwC5C,eAAO,MAAM,mBAAmB,SA0Q5B,CAAC"}