@coralogix/rum-cli 1.1.26 → 1.1.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/README.md +86 -75
  2. package/api/client/request-builder.ts +40 -57
  3. package/api/dsym.api.ts +16 -13
  4. package/api/proguard.api.ts +64 -0
  5. package/api/react-native.api.ts +8 -14
  6. package/api/source-maps.api.ts +7 -13
  7. package/cli/commands/proguard/index.ts +5 -0
  8. package/cli/commands/proguard/upload-proguard-command.ts +36 -0
  9. package/cli/rum-cli.ts +5 -0
  10. package/config/config.ts +2 -0
  11. package/consts/consts.ts +0 -1
  12. package/dist/api/client/client-factory.d.ts +40 -0
  13. package/dist/api/client/request-builder.d.ts +27 -11
  14. package/dist/api/client/request-builder.js +15 -21
  15. package/dist/api/dsym.api.d.ts +1 -1
  16. package/dist/api/dsym.api.js +9 -7
  17. package/dist/api/proguard.api.d.ts +4 -0
  18. package/dist/api/proguard.api.js +52 -0
  19. package/dist/api/react-native.api.js +5 -4
  20. package/dist/api/source-maps.api.js +5 -4
  21. package/dist/cli/commands/proguard/index.d.ts +4 -0
  22. package/dist/cli/commands/proguard/index.js +6 -0
  23. package/dist/cli/commands/proguard/upload-proguard-command.d.ts +2 -0
  24. package/dist/cli/commands/proguard/upload-proguard-command.js +45 -0
  25. package/dist/cli/rum-cli.js +4 -1
  26. package/dist/config/config.d.ts +1 -0
  27. package/dist/config/config.js +2 -1
  28. package/dist/consts/consts.js +2 -3
  29. package/dist/models/proguard-commands.model.d.ts +11 -0
  30. package/dist/models/proguard-commands.model.js +3 -0
  31. package/dist/proto-models/com/coralogix/blobset/v2/entity_metadata.d.ts +18 -0
  32. package/dist/proto-models/com/coralogix/blobset/v2/entity_metadata.js +139 -5
  33. package/dist/proto-models/com/coralogix/blobset/v2/entity_type.d.ts +2 -1
  34. package/dist/proto-models/com/coralogix/blobset/v2/entity_type.js +7 -1
  35. package/dist/proto-models/com/coralogix/rum/v2/release_entity_metadata.d.ts +18 -0
  36. package/dist/proto-models/com/coralogix/rum/v2/release_entity_metadata.js +139 -5
  37. package/dist/proto-models/com/coralogix/rum/v2/release_entity_type.d.ts +2 -1
  38. package/dist/proto-models/com/coralogix/rum/v2/release_entity_type.js +7 -1
  39. package/dist/proto-models/com/coralogix/rum/v2/rum_sdk_versions.d.ts +1 -0
  40. package/dist/proto-models/com/coralogix/rum/v2/rum_sdk_versions.js +17 -3
  41. package/dist/proto-models/com/coralogix/rum/v2/rum_service.d.ts +93 -0
  42. package/dist/proto-models/com/coralogix/rum/v2/rum_service.js +300 -4
  43. package/dist/proto-models/com/coralogix/rum/v2/rum_settings_service.d.ts +9 -8
  44. package/dist/proto-models/com/coralogix/rum/v2/rum_settings_service.js +22 -7
  45. package/dist/services/dsym.service.d.ts +1 -1
  46. package/dist/services/dsym.service.js +3 -3
  47. package/dist/services/proguard.service.d.ts +3 -0
  48. package/dist/services/proguard.service.js +36 -0
  49. package/dist/services/react-native.service.js +2 -2
  50. package/dist/services/source-maps.service.js +2 -2
  51. package/dist/utils/file-processor.utils.d.ts +5 -8
  52. package/dist/utils/file-processor.utils.js +222 -60
  53. package/mapping.txt +160886 -0
  54. package/models/proguard-commands.model.ts +11 -0
  55. package/package.json +2 -3
  56. package/proto-models/com/coralogix/blobset/v2/entity_metadata.ts +160 -4
  57. package/proto-models/com/coralogix/blobset/v2/entity_type.ts +6 -0
  58. package/proto-models/com/coralogix/rum/v2/release_entity_metadata.ts +156 -0
  59. package/proto-models/com/coralogix/rum/v2/release_entity_type.ts +6 -0
  60. package/proto-models/com/coralogix/rum/v2/rum_sdk_versions.ts +17 -1
  61. package/proto-models/com/coralogix/rum/v2/rum_service.ts +347 -1
  62. package/proto-models/com/coralogix/rum/v2/rum_settings_service.ts +23 -6
  63. package/protofetch.lock +4 -4
  64. package/protofetch.toml +3 -3
  65. package/protos/com/coralogix/blobset/v2/entity_metadata.proto +9 -0
  66. package/protos/com/coralogix/blobset/v2/entity_type.proto +1 -0
  67. package/protos/com/coralogix/rum/v2/release_entity_metadata.proto +9 -0
  68. package/protos/com/coralogix/rum/v2/release_entity_type.proto +1 -0
  69. package/protos/com/coralogix/rum/v2/rum_sdk_versions.proto +1 -0
  70. package/protos/com/coralogix/rum/v2/rum_service.proto +23 -0
  71. package/protos/com/coralogix/rum/v2/rum_settings_service.proto +3 -1
  72. package/protoset.bin +0 -0
  73. package/services/dsym.service.ts +2 -2
  74. package/services/proguard.service.ts +33 -0
  75. package/services/react-native.service.ts +1 -1
  76. package/services/source-maps.service.ts +1 -1
  77. package/utils/file-processor.utils.ts +224 -70
@@ -20,7 +20,7 @@ export class SourceMapsService {
20
20
  console.log(GRAY_COLOR, '🚀 Uploading source maps...');
21
21
 
22
22
  try {
23
- const totalFiles: number | undefined = await FileProcessor.traverseSourceMapsFolder(folderPath, '.js.map', request);
23
+ const totalFiles: number | undefined = await FileProcessor.traverse(folderPath, '.js.map', request);
24
24
 
25
25
  await SourceMapsApi.uploadSourceMaps(request, application, version, repoName, commitHash, orgName, env, privateKey);
26
26
 
@@ -1,19 +1,222 @@
1
+ // import * as fs from 'fs';
2
+ // import { Dirent, promises as fsPromises } from 'fs';
3
+ // import * as path from 'path';
4
+ // import { consoleError } from './shared.utils';
5
+ // import { UploadBlobRequest } from '../proto-models/com/coralogix/blobset/v2/blobset_service';
6
+ // import { UploadSourceMapsRequest } from '../proto-models/com/coralogix/rum/v2/rum_source_map_service';
7
+ // import config from '../config';
8
+ // import pako from 'pako';
9
+ // import { FileMetadata } from '../proto-models/com/coralogix/rum/v2/file';
10
+ // import { RED_COLOR } from '../consts/consts';
11
+ // import * as zlib from 'node:zlib';
12
+ //
13
+ // const { readdir, stat, readFile } = fsPromises;
14
+ //
15
+ // export class FileProcessor {
16
+ // static async traverseDsymFolder(folderPath: string, request: UploadBlobRequest): Promise<void> {
17
+ // try {
18
+ // const processDsymFolder = async (dsymFolderPath: string) => {
19
+ // const dwarfPath = await FileProcessor.getDwarfPath(dsymFolderPath);
20
+ // const content = await readFile(dwarfPath);
21
+ // const compressedContent = zlib.gzipSync(content);
22
+ // request.data = new Uint8Array(compressedContent);
23
+ // };
24
+ //
25
+ // // Check if the folder itself is a .dSYM folder
26
+ // if (folderPath.endsWith('.dSYM')) {
27
+ // await processDsymFolder(folderPath);
28
+ // return;
29
+ // }
30
+ //
31
+ // const files: Dirent[] = await readdir(folderPath, { withFileTypes: true });
32
+ //
33
+ // for (const file of files) {
34
+ // // Skip .DS_Store and hidden files
35
+ // if (file.name === '.DS_Store' || file.name.startsWith('.')) {
36
+ // continue;
37
+ // }
38
+ //
39
+ // const filePath: string = path.join(folderPath, file.name);
40
+ //
41
+ // if (file.isDirectory()) {
42
+ // if (file.name.endsWith('.dSYM')) {
43
+ // await processDsymFolder(filePath);
44
+ // return;
45
+ // } else {
46
+ // await FileProcessor.traverseDsymFolder(filePath, request);
47
+ // }
48
+ // }
49
+ // }
50
+ // } catch (error) {
51
+ // consoleError(`Error traversing directory: ${JSON.stringify(error)}`);
52
+ // }
53
+ // }
54
+ //
55
+ // static async traverseSourceMapsFolder(
56
+ // folderPath: string,
57
+ // fileExtension: string,
58
+ // request: UploadBlobRequest,
59
+ // ): Promise<number | undefined> {
60
+ // try {
61
+ // const stats = await stat(folderPath);
62
+ //
63
+ // if (stats.isFile() && folderPath.endsWith(fileExtension)) {
64
+ // return await this.processSourceMapSingleFile(folderPath, fileExtension, request);
65
+ // } else if (stats.isDirectory()) {
66
+ // return await this.processSourceMapDirectory(folderPath, fileExtension, request);
67
+ // }
68
+ // } catch (error) {
69
+ // consoleError(`Error traversing directory: ${error}`);
70
+ // }
71
+ // }
72
+ //
73
+ // private static async processSourceMapSingleFile(filePath: string, fileExtension: string, request: UploadBlobRequest): Promise<number> {
74
+ // try {
75
+ // const content = await readFile(filePath);
76
+ //
77
+ // const { name } = path.parse(filePath);
78
+ //
79
+ // const fileData = { name, content: content.toString('base64') };
80
+ //
81
+ // const compressedContent = zlib.gzipSync(Buffer.from(JSON.stringify([fileData])));
82
+ //
83
+ // request.data = new Uint8Array(compressedContent);
84
+ //
85
+ // return 1;
86
+ // } catch (error) {
87
+ // consoleError(`Error processing file: ${error}`);
88
+ // throw error;
89
+ // }
90
+ // }
91
+ //
92
+ // private static async processSourceMapDirectory(folderPath: string, fileExtension: string, request: UploadBlobRequest): Promise<number> {
93
+ // const files = await readdir(folderPath, { withFileTypes: true });
94
+ // const fileDataList: { name: string; content: string }[] = [];
95
+ //
96
+ // for (const dirent of files) {
97
+ // const filePath = path.join(folderPath, dirent.name);
98
+ //
99
+ // if (dirent.isFile() && dirent.name.endsWith(fileExtension)) {
100
+ // const content = await readFile(filePath);
101
+ // const { name } = path.parse(filePath);
102
+ // fileDataList.push({ name, content: content.toString('base64') });
103
+ // } else if (dirent.isDirectory()) {
104
+ // await this.traverseSourceMapsFolder(filePath, fileExtension, request);
105
+ // }
106
+ // }
107
+ //
108
+ // const compressedContent = zlib.gzipSync(Buffer.from(JSON.stringify(fileDataList)));
109
+ //
110
+ // request.data = new Uint8Array(compressedContent);
111
+ //
112
+ // return fileDataList.length;
113
+ // }
114
+ //
115
+ // private static async getDwarfPath(dsymPath: string): Promise<string> {
116
+ // const dwarfPath: string = 'Contents/Resources/DWARF';
117
+ // const targetPath: string = path.join(dsymPath, dwarfPath);
118
+ // const files = fs.readdirSync(targetPath);
119
+ // if (files.length === 1) {
120
+ // return path.join(dsymPath, dwarfPath, files[0]);
121
+ // } else {
122
+ // consoleError(`Expected exactly one file in the directory, but found: ${files.length}`);
123
+ // return '';
124
+ // }
125
+ // }
126
+ //
127
+ // static async traverseSourceMapsFolderOld(folderPath: string, request: UploadSourceMapsRequest): Promise<void> {
128
+ // const stack: string[] = [folderPath];
129
+ //
130
+ // while (stack.length) {
131
+ // const currentPath = stack.pop();
132
+ // const entries: Dirent[] = await readdir(currentPath!, { withFileTypes: true });
133
+ //
134
+ // await Promise.all(
135
+ // entries.map(async (file: Dirent) => {
136
+ // const { name } = file;
137
+ // const entryPath = path.join(currentPath as string, name);
138
+ // const isCorrectSuffix = file.isFile() && name.endsWith('.js.map');
139
+ //
140
+ // if (file.isDirectory()) stack.push(entryPath);
141
+ // else if (isCorrectSuffix) request.files.push(await FileProcessor.getFileMetadata(entryPath));
142
+ // }),
143
+ // );
144
+ // }
145
+ // }
146
+ //
147
+ // static compressFileContentChunks(fileContent: Uint8Array): Uint8Array {
148
+ // const { chunkSize } = config.rumApi; // 512 KB chunk size
149
+ // const deflate = new pako.Deflate();
150
+ // const totalChunks = Math.ceil(fileContent.length / chunkSize);
151
+ //
152
+ // for (let i = 0; i < totalChunks; i++) {
153
+ // const start = i * chunkSize;
154
+ // const end = start + chunkSize;
155
+ // const chunk = fileContent.subarray(start, end);
156
+ //
157
+ // const isLastChunk = i === totalChunks - 1;
158
+ //
159
+ // deflate.push(chunk, isLastChunk);
160
+ // }
161
+ //
162
+ // const { err, msg, result } = deflate;
163
+ //
164
+ // if (err) console.error(RED_COLOR, msg);
165
+ //
166
+ // return result;
167
+ // }
168
+ //
169
+ // private static async getFileMetadata(filePath: string): Promise<FileMetadata> {
170
+ // const { size } = await stat(filePath);
171
+ // const { name } = path.parse(filePath);
172
+ // const content = await readFile(filePath);
173
+ //
174
+ // return { chunkName: name, size, content };
175
+ // }
176
+ // }
177
+
1
178
  import * as fs from 'fs';
2
179
  import { Dirent, promises as fsPromises } from 'fs';
3
180
  import * as path from 'path';
4
181
  import { consoleError } from './shared.utils';
5
182
  import { UploadBlobRequest } from '../proto-models/com/coralogix/blobset/v2/blobset_service';
6
- import { UploadSourceMapsRequest } from '../proto-models/com/coralogix/rum/v2/rum_source_map_service';
7
- import config from '../config';
8
- import pako from 'pako';
9
- import { FileMetadata } from '../proto-models/com/coralogix/rum/v2/file';
10
- import { RED_COLOR } from '../consts/consts';
11
183
  import * as zlib from 'node:zlib';
12
184
 
13
185
  const { readdir, stat, readFile } = fsPromises;
14
186
 
15
187
  export class FileProcessor {
16
- static async traverseDsymFolder(folderPath: string, request: UploadBlobRequest): Promise<void> {
188
+ static async traverseProguardFolder(proguardPath: string, request: UploadBlobRequest): Promise<void> {
189
+ try {
190
+ const processTxtFile = async (proguardFile: string) => {
191
+ const content = await readFile(proguardFile);
192
+ const compressedContent = zlib.gzipSync(content);
193
+ request.data = new Uint8Array(compressedContent);
194
+ };
195
+
196
+ // Check if the folder itself is a .txt file
197
+ if (proguardPath.endsWith('.txt')) {
198
+ await processTxtFile(proguardPath);
199
+ return;
200
+ }
201
+
202
+ const files: Dirent[] = await readdir(proguardPath, { withFileTypes: true });
203
+
204
+ for (const file of files) {
205
+ const filePath: string = path.join(proguardPath, file.name);
206
+
207
+ if (file.name.endsWith('.txt')) {
208
+ await processTxtFile(filePath);
209
+ return;
210
+ } else {
211
+ await FileProcessor.traverseProguardFolder(filePath, request);
212
+ }
213
+ }
214
+ } catch (error) {
215
+ consoleError(`Error traversing directory: ${JSON.stringify(error)}`);
216
+ }
217
+ }
218
+
219
+ static async traverseDsymFolder(dsymPath: string, request: UploadBlobRequest): Promise<void> {
17
220
  try {
18
221
  const processDsymFolder = async (dsymFolderPath: string) => {
19
222
  const dwarfPath = await FileProcessor.getDwarfPath(dsymFolderPath);
@@ -23,12 +226,12 @@ export class FileProcessor {
23
226
  };
24
227
 
25
228
  // Check if the folder itself is a .dSYM folder
26
- if (folderPath.endsWith('.dSYM')) {
27
- await processDsymFolder(folderPath);
229
+ if (dsymPath.endsWith('.dSYM')) {
230
+ await processDsymFolder(dsymPath);
28
231
  return;
29
232
  }
30
233
 
31
- const files: Dirent[] = await readdir(folderPath, { withFileTypes: true });
234
+ const files: Dirent[] = await readdir(dsymPath, { withFileTypes: true });
32
235
 
33
236
  for (const file of files) {
34
237
  // Skip .DS_Store and hidden files
@@ -36,7 +239,7 @@ export class FileProcessor {
36
239
  continue;
37
240
  }
38
241
 
39
- const filePath: string = path.join(folderPath, file.name);
242
+ const filePath: string = path.join(dsymPath, file.name);
40
243
 
41
244
  if (file.isDirectory()) {
42
245
  if (file.name.endsWith('.dSYM')) {
@@ -52,26 +255,27 @@ export class FileProcessor {
52
255
  }
53
256
  }
54
257
 
55
- static async traverseSourceMapsFolder(
56
- folderPath: string,
57
- fileExtension: string,
58
- request: UploadBlobRequest,
59
- ): Promise<number | undefined> {
258
+ static async traverse(folderPath: string, fileExtension: string, request: UploadBlobRequest): Promise<number | undefined> {
60
259
  try {
61
260
  const stats = await stat(folderPath);
62
261
 
63
262
  if (stats.isFile() && folderPath.endsWith(fileExtension)) {
64
- return await this.processSourceMapSingleFile(folderPath, fileExtension, request);
263
+ return await this.processSingleFile(folderPath, fileExtension, request);
65
264
  } else if (stats.isDirectory()) {
66
- return await this.processSourceMapDirectory(folderPath, fileExtension, request);
265
+ return await this.processDirectory(folderPath, fileExtension, request);
67
266
  }
68
267
  } catch (error) {
69
268
  consoleError(`Error traversing directory: ${error}`);
70
269
  }
71
270
  }
72
271
 
73
- private static async processSourceMapSingleFile(filePath: string, fileExtension: string, request: UploadBlobRequest): Promise<number> {
272
+ private static async processSingleFile(filePath: string, fileExtension: string, request: UploadBlobRequest): Promise<number> {
74
273
  try {
274
+ if (!filePath.endsWith(fileExtension)) {
275
+ consoleError(`File ${filePath} does not have the correct extension: ${fileExtension}`);
276
+ return 0;
277
+ }
278
+
75
279
  const content = await readFile(filePath);
76
280
 
77
281
  const { name } = path.parse(filePath);
@@ -89,7 +293,7 @@ export class FileProcessor {
89
293
  }
90
294
  }
91
295
 
92
- private static async processSourceMapDirectory(folderPath: string, fileExtension: string, request: UploadBlobRequest): Promise<number> {
296
+ private static async processDirectory(folderPath: string, fileExtension: string, request: UploadBlobRequest): Promise<number> {
93
297
  const files = await readdir(folderPath, { withFileTypes: true });
94
298
  const fileDataList: { name: string; content: string }[] = [];
95
299
 
@@ -101,7 +305,7 @@ export class FileProcessor {
101
305
  const { name } = path.parse(filePath);
102
306
  fileDataList.push({ name, content: content.toString('base64') });
103
307
  } else if (dirent.isDirectory()) {
104
- await this.traverseSourceMapsFolder(filePath, fileExtension, request);
308
+ await this.traverse(filePath, fileExtension, request);
105
309
  }
106
310
  }
107
311
 
@@ -123,54 +327,4 @@ export class FileProcessor {
123
327
  return '';
124
328
  }
125
329
  }
126
-
127
- static async traverseSourceMapsFolderOld(folderPath: string, request: UploadSourceMapsRequest): Promise<void> {
128
- const stack: string[] = [folderPath];
129
-
130
- while (stack.length) {
131
- const currentPath = stack.pop();
132
- const entries: Dirent[] = await readdir(currentPath!, { withFileTypes: true });
133
-
134
- await Promise.all(
135
- entries.map(async (file: Dirent) => {
136
- const { name } = file;
137
- const entryPath = path.join(currentPath as string, name);
138
- const isCorrectSuffix = file.isFile() && name.endsWith('.js.map');
139
-
140
- if (file.isDirectory()) stack.push(entryPath);
141
- else if (isCorrectSuffix) request.files.push(await FileProcessor.getFileMetadata(entryPath));
142
- }),
143
- );
144
- }
145
- }
146
-
147
- static compressFileContentChunks(fileContent: Uint8Array): Uint8Array {
148
- const { chunkSize } = config.rumApi; // 512 KB chunk size
149
- const deflate = new pako.Deflate();
150
- const totalChunks = Math.ceil(fileContent.length / chunkSize);
151
-
152
- for (let i = 0; i < totalChunks; i++) {
153
- const start = i * chunkSize;
154
- const end = start + chunkSize;
155
- const chunk = fileContent.subarray(start, end);
156
-
157
- const isLastChunk = i === totalChunks - 1;
158
-
159
- deflate.push(chunk, isLastChunk);
160
- }
161
-
162
- const { err, msg, result } = deflate;
163
-
164
- if (err) console.error(RED_COLOR, msg);
165
-
166
- return result;
167
- }
168
-
169
- private static async getFileMetadata(filePath: string): Promise<FileMetadata> {
170
- const { size } = await stat(filePath);
171
- const { name } = path.parse(filePath);
172
- const content = await readFile(filePath);
173
-
174
- return { chunkName: name, size, content };
175
- }
176
330
  }