my-typescript-library-rahul52us 2.4.9 → 2.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2111,18 +2111,25 @@ export async function updateComment(id: string, data: Partial<IDocument>) : Prom
2111
2111
  }
2112
2112
  }
2113
2113
 
2114
- export async function updateStatus(data: Partial<IDocument>) : Promise<any>{
2114
+
2115
+ export async function updateStatus(
2116
+ data: { documentId: string; key?: string; value: any }
2117
+ ): Promise<any> {
2115
2118
  try {
2119
+ const { documentId, key = "status", value } = data;
2120
+ if (!documentId) return null;
2121
+ const updatePayload = {
2122
+ [`originalValues.${key}`]: value,
2123
+ };
2124
+
2116
2125
  const result = await Document.updateOne(
2117
- { documentId: data.documentId },
2118
- { $set: { "originalValues.status": data.status } }
2126
+ { documentId },
2127
+ { $set: updatePayload }
2119
2128
  );
2120
- if (result.matchedCount === 0) {
2121
- return null;
2122
- } else {
2123
- return result;
2124
- }
2129
+
2130
+ return result.matchedCount === 0 ? null : result;
2125
2131
  } catch (err) {
2132
+ console.error("Update failed:", err);
2126
2133
  return null;
2127
2134
  }
2128
2135
  }
@@ -1,134 +1,138 @@
1
- import mongoose, { connection, mongo } from "mongoose";
1
+ import mongoose, { mongo } from "mongoose";
2
2
  import { Readable } from "stream";
3
3
 
4
4
  import fileSystemSchema from "../repository/schemas/fileSystem.schema";
5
5
  const { GridFSBucket } = mongo;
6
6
 
7
- export async function uploadFile(file: any, name: string, type: string) {
7
+ // Core upload logic wrapped as a helper
8
+ async function tryUploadFile(file: any, name: string, type: string) {
8
9
  return new Promise((resolve, reject) => {
9
- try {
10
- const connection = mongoose.connection;
11
- const bucket = new GridFSBucket(connection.db);
12
-
13
- const buffer = Buffer.from(file, 'base64');
14
- const readableStream = new Readable();
15
- readableStream.push(buffer);
16
- readableStream.push(null);
17
-
18
- const uploadStream = bucket.openUploadStream(name, {
19
- metadata: { contentType: type },
20
- });
21
-
22
- readableStream.pipe(uploadStream);
23
-
24
- uploadStream.on("finish", () => {
25
- const fileId = uploadStream.id;
26
- fileSystemSchema
27
- .create({
28
- fileId: fileId,
29
- })
30
- .then((data) => {
31
- resolve(data);
32
- })
33
- .catch((err) => {
34
- reject("File upload failed");
35
- });
36
- });
10
+ try {
11
+ const connection = mongoose.connection;
12
+ const bucket = new GridFSBucket(connection.db);
13
+
14
+ const buffer = Buffer.from(file, 'base64');
15
+ const readableStream = new Readable();
16
+ readableStream.push(buffer);
17
+ readableStream.push(null);
18
+
19
+ const uploadStream = bucket.openUploadStream(name, {
20
+ metadata: { contentType: type },
21
+ });
22
+
23
+ readableStream.pipe(uploadStream);
24
+
25
+ uploadStream.on("finish", () => {
26
+ const fileId = uploadStream.id;
27
+ fileSystemSchema
28
+ .create({ fileId: fileId })
29
+ .then((data) => resolve(data))
30
+ .catch(() => reject("File upload failed"));
31
+ });
32
+
33
+ uploadStream.on("error", () => {
34
+ reject("Error uploading file");
35
+ });
36
+ } catch (err) {
37
+ reject("Error uploading the file");
38
+ }
39
+ });
40
+ }
37
41
 
38
- uploadStream.on("error", (error) => {
39
- reject("Error uploading file");
40
- });
41
- } catch (err) {
42
- reject("Error uploading the file");
42
+ // Public function with retry logic
43
+ export async function uploadFile(file: any, name: string, type: string) {
44
+ const maxRetries = 3;
45
+ let attempt = 0;
46
+ while (attempt < maxRetries) {
47
+ try {
48
+ return await tryUploadFile(file, name, type); // success → exit
49
+ } catch (error) {
50
+ attempt++;
51
+ if (attempt >= maxRetries) {
52
+ throw new Error(`File upload failed after ${maxRetries} attempts`);
43
53
  }
44
- });
45
-
46
-
54
+ await new Promise((res) => setTimeout(res, 300 * attempt)); // wait before retry
55
+ }
56
+ }
47
57
  }
48
58
 
49
59
  export async function getFile(id: string) {
50
60
  return new Promise((resolve, reject) => {
51
- try {
52
- const connection = mongoose.connection;
53
- const bucket = new GridFSBucket(connection.db);
54
- const downloadStream = bucket.openDownloadStream(new mongoose.Types.ObjectId(id));
61
+ try {
62
+ const connection = mongoose.connection;
63
+ const bucket = new GridFSBucket(connection.db);
64
+ const downloadStream = bucket.openDownloadStream(new mongoose.Types.ObjectId(id));
65
+
66
+ const chunks: Buffer[] = [];
67
+ let fileSize = 0;
68
+
69
+ downloadStream.on("data", (chunk) => {
70
+ chunks.push(chunk);
71
+ fileSize += chunk.length;
72
+ });
73
+
74
+ downloadStream.on("error", () => {
75
+ reject("An error occured while fetching file");
76
+ });
77
+
78
+ downloadStream.on("end", () => {
79
+ const buffer = Buffer.concat(chunks, fileSize);
80
+ const downloadData = buffer;
81
+ const array = new Uint8Array(downloadData);
82
+ resolve(`[${array}]`);
83
+ });
84
+ } catch (err) {
85
+ reject("Error Getting the file");
86
+ }
87
+ });
88
+ }
89
+
90
+ export async function getFileData(id: string) {
91
+ return new Promise((resolve, reject) => {
92
+ try {
93
+ const connection = mongoose.connection;
94
+ const bucket = new GridFSBucket(connection.db);
95
+ const downloadStream = bucket.openDownloadStream(new mongoose.Types.ObjectId(id));
96
+
97
+ const chunks: Buffer[] = [];
98
+ let fileSize = 0;
99
+
100
+ connection.db
101
+ .collection("fs.files")
102
+ .findOne({ _id: new mongoose.Types.ObjectId(id) })
103
+ .then((fileInfo) => {
104
+ if (!fileInfo) return reject("File not found");
55
105
 
56
- const chunks: Buffer[] = [];
57
- let fileSize = 0;
106
+ const { filename, contentType, length } = fileInfo;
58
107
 
59
108
  downloadStream.on("data", (chunk) => {
60
109
  chunks.push(chunk);
61
110
  fileSize += chunk.length;
62
111
  });
63
112
 
64
- downloadStream.on("error", (error) => {
65
- reject("An error occured while fetching file")
113
+ downloadStream.on("error", () => {
114
+ reject("An error occurred while fetching the file");
66
115
  });
67
116
 
68
117
  downloadStream.on("end", () => {
69
118
  const buffer = Buffer.concat(chunks, fileSize);
70
-
71
- // Use the `buffer` variable as needed, e.g., save it to a file or process it further
72
- // For example, you can assign it to a variable:
73
119
  const downloadData = buffer;
74
- const array = new Uint8Array(downloadData)
75
- resolve(`[${array}]`)
120
+ const array = new Uint8Array(downloadData);
121
+
122
+ resolve({
123
+ fileId: id,
124
+ filename,
125
+ contentType,
126
+ fileSize: length,
127
+ data: array,
128
+ });
76
129
  });
77
- } catch (err) {
78
- reject("Error Getting the file")
79
- }
80
- })
130
+ })
131
+ .catch(() => {
132
+ reject("Error fetching file metadata");
133
+ });
134
+ } catch (err) {
135
+ reject("Error Getting the file");
136
+ }
137
+ });
81
138
  }
82
-
83
- export async function getFileData(id: string) {
84
- return new Promise((resolve, reject) => {
85
- try {
86
- const connection = mongoose.connection;
87
- const bucket = new GridFSBucket(connection.db);
88
- const downloadStream = bucket.openDownloadStream(new mongoose.Types.ObjectId(id));
89
-
90
- const chunks: Buffer[] = [];
91
- let fileSize = 0;
92
-
93
- // Fetch the file's metadata from fs.files
94
- connection.db.collection('fs.files').findOne({ _id: new mongoose.Types.ObjectId(id) })
95
- .then((fileInfo) => {
96
- if (!fileInfo) {
97
- return reject("File not found");
98
- }
99
-
100
- // Push metadata for the response
101
- const { filename, contentType, length } = fileInfo;
102
-
103
- downloadStream.on("data", (chunk) => {
104
- chunks.push(chunk);
105
- fileSize += chunk.length;
106
- });
107
-
108
- downloadStream.on("error", () => {
109
- reject("An error occurred while fetching the file");
110
- });
111
-
112
- downloadStream.on("end", () => {
113
- const buffer = Buffer.concat(chunks, fileSize);
114
- const downloadData = buffer;
115
- const array = new Uint8Array(downloadData);
116
-
117
- // Return an object containing file metadata and data
118
- resolve({
119
- fileId: id, // Include file ID
120
- filename: filename,
121
- contentType: contentType,
122
- fileSize: length,
123
- data: array, // Returning as Uint8Array
124
- });
125
- });
126
- })
127
- .catch(() => {
128
- reject("Error fetching file metadata");
129
- });
130
- } catch (err) {
131
- reject("Error Getting the file");
132
- }
133
- });
134
- }
@@ -1410,6 +1410,7 @@ export async function getcountDocumentsWorkflowService(
1410
1410
 
1411
1411
  return {
1412
1412
  ...result.data,
1413
+ _id : workflow?._id,
1413
1414
  workflowName: workflow?.workFlowName
1414
1415
  };
1415
1416
  })