@elqnt/react 1.0.7 → 1.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/components/index.js +2 -120
- package/dist/components/index.js.map +1 -1
- package/dist/components/index.mjs +1 -119
- package/dist/components/index.mjs.map +1 -1
- package/dist/components/upload/upload-actions.js +5 -3
- package/dist/components/upload/upload-actions.js.map +1 -1
- package/dist/components/upload/upload-actions.mjs +5 -3
- package/dist/components/upload/upload-actions.mjs.map +1 -1
- package/dist/index.js +2 -120
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1 -119
- package/dist/index.mjs.map +1 -1
- package/package.json +5 -3
- package/dist/components/upload/upload-actions.d.mts +0 -20
- package/dist/components/upload/upload-actions.d.ts +0 -20
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"use
|
|
1
|
+
"use server";
|
|
2
2
|
"use strict";
|
|
3
3
|
"use server";
|
|
4
4
|
var __defProp = Object.defineProperty;
|
|
@@ -134,10 +134,12 @@ async function uploadToS3(formData) {
|
|
|
134
134
|
const fileUrl = `${S3_ENDPOINT}/${S3_BUCKET_NAME}/${fileName}`;
|
|
135
135
|
return { success: true, fileUrl };
|
|
136
136
|
} catch (error) {
|
|
137
|
-
|
|
137
|
+
const errorMessage = error instanceof Error ? error.message : "Failed to upload file";
|
|
138
|
+
const errorName = error instanceof Error ? error.name : "Unknown";
|
|
139
|
+
console.error("Upload error:", errorName, errorMessage, error);
|
|
138
140
|
return {
|
|
139
141
|
success: false,
|
|
140
|
-
error:
|
|
142
|
+
error: errorMessage
|
|
141
143
|
};
|
|
142
144
|
}
|
|
143
145
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../components/upload/upload-actions.ts"],"sourcesContent":["\"use server\";\nimport { BlobServiceClient } from \"@azure/storage-blob\";\nimport { PutObjectCommand, S3Client } from \"@aws-sdk/client-s3\";\n\n\nexport async function uploadFile(formData: FormData) {\n try {\n const file = formData.get(\"file\") as File;\n if (!file) {\n throw new Error(\"No file provided\");\n }\n\n const STORAGE_CONNECTION_STRING =\n process.env.AZURE_STORAGE_CONNECTION_STRING;\n const STORAGE_CONTAINER_NAME = process.env.AZURE_STORAGE_CONTAINER_NAME;\n\n if (!STORAGE_CONNECTION_STRING || !STORAGE_CONTAINER_NAME) {\n throw new Error(\n \"STORAGE_CONNECTION_STRING or STORAGE_CONTAINER_NAME not set\"\n );\n }\n\n // Generate unique filename\n const fileExtension = file.name.split(\".\").pop()?.toLowerCase();\n const fileName = `${Date.now()}-${Math.random().toString(36).substring(2)}.${fileExtension}`;\n\n // Determine content type based on file extension\n const contentType = getContentType(fileExtension);\n\n // Convert file to buffer\n const buffer = Buffer.from(await file.arrayBuffer());\n\n // Upload to Azure Blob Storage\n const blobServiceClient = BlobServiceClient.fromConnectionString(\n STORAGE_CONNECTION_STRING\n );\n const containerClient = blobServiceClient.getContainerClient(\n STORAGE_CONTAINER_NAME\n );\n\n // Create container if it doesn't exist\n await containerClient.createIfNotExists();\n\n // Upload file with content type\n const blockBlobClient = containerClient.getBlockBlobClient(fileName);\n await blockBlobClient.upload(buffer, buffer.length, {\n blobHTTPHeaders: {\n blobContentType: contentType,\n },\n });\n\n // Get the URL\n const fileUrl = blockBlobClient.url;\n\n return { success: true, fileUrl };\n } catch (error) {\n console.error(\"Upload error:\", error);\n return {\n success: false,\n error: error instanceof Error ? error.message : \"Failed to upload file\",\n };\n }\n}\n\nfunction getContentType(fileExtension: string | undefined): string {\n if (!fileExtension) return \"application/octet-stream\";\n\n const mimeTypes: Record<string, string> = {\n // Images\n png: \"image/png\",\n jpg: \"image/jpeg\",\n jpeg: \"image/jpeg\",\n gif: \"image/gif\",\n webp: \"image/webp\",\n svg: \"image/svg+xml\",\n // Documents\n pdf: \"application/pdf\",\n doc: \"application/msword\",\n docx: \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\",\n // Others\n txt: \"text/plain\",\n csv: \"text/csv\",\n json: \"application/json\",\n };\n\n return mimeTypes[fileExtension] || \"application/octet-stream\";\n}\n\nexport async function uploadToS3(formData: FormData) {\n try {\n const file = formData.get(\"file\") as File;\n if (!file) {\n throw new Error(\"No file provided\");\n }\n\n const AWS_ACCESS_KEY_ID = process.env.LINODE_ACCESS_KEY;\n const AWS_SECRET_ACCESS_KEY = process.env.LINODE_SECRET_KEY;\n const S3_BUCKET_NAME = process.env.LINODE_BUCKET_NAME;\n let S3_ENDPOINT = process.env.LINODE_OBJECT_STORAGE_ENDPOINT;\n\n if (\n !AWS_ACCESS_KEY_ID ||\n !AWS_SECRET_ACCESS_KEY ||\n !S3_BUCKET_NAME ||\n !S3_ENDPOINT\n ) {\n throw new Error(\"S3 credentials or configuration not set\");\n }\n\n // Generate unique filename\n const fileExtension = file.name.split(\".\").pop()?.toLowerCase();\n const fileName = `${Date.now()}-${Math.random().toString(36).substring(2)}.${fileExtension}`;\n\n // Determine content type based on file extension\n const contentType = getContentType(fileExtension);\n\n // Convert file to buffer\n const buffer = Buffer.from(await file.arrayBuffer());\n\n // Initialize S3 client\n const s3Client = new S3Client({\n endpoint: S3_ENDPOINT,\n region: \"us-east-1\", // Linode requires a region but it's not used with custom endpoint\n credentials: {\n accessKeyId: AWS_ACCESS_KEY_ID,\n secretAccessKey: AWS_SECRET_ACCESS_KEY,\n },\n forcePathStyle: true, // Required for Linode Object Storage\n maxAttempts: 3,\n requestHandler: {\n timeout: 10000, // 10 seconds timeout\n },\n });\n\n // Upload to S3\n const command = new PutObjectCommand({\n Bucket: S3_BUCKET_NAME,\n Key: fileName,\n Body: buffer,\n ContentType: contentType,\n ACL: \"public-read\", // Make the object publicly readable\n });\n\n await s3Client.send(command);\n\n // Construct the URL using the endpoint\n const fileUrl = `${S3_ENDPOINT}/${S3_BUCKET_NAME}/${fileName}`;\n\n return { success: true, fileUrl };\n } catch (error) {\n console.error(\"Upload error:\",
|
|
1
|
+
{"version":3,"sources":["../../../components/upload/upload-actions.ts"],"sourcesContent":["\"use server\";\nimport { BlobServiceClient } from \"@azure/storage-blob\";\nimport { PutObjectCommand, S3Client } from \"@aws-sdk/client-s3\";\n\n\nexport async function uploadFile(formData: FormData) {\n try {\n const file = formData.get(\"file\") as File;\n if (!file) {\n throw new Error(\"No file provided\");\n }\n\n const STORAGE_CONNECTION_STRING =\n process.env.AZURE_STORAGE_CONNECTION_STRING;\n const STORAGE_CONTAINER_NAME = process.env.AZURE_STORAGE_CONTAINER_NAME;\n\n if (!STORAGE_CONNECTION_STRING || !STORAGE_CONTAINER_NAME) {\n throw new Error(\n \"STORAGE_CONNECTION_STRING or STORAGE_CONTAINER_NAME not set\"\n );\n }\n\n // Generate unique filename\n const fileExtension = file.name.split(\".\").pop()?.toLowerCase();\n const fileName = `${Date.now()}-${Math.random().toString(36).substring(2)}.${fileExtension}`;\n\n // Determine content type based on file extension\n const contentType = getContentType(fileExtension);\n\n // Convert file to buffer\n const buffer = Buffer.from(await file.arrayBuffer());\n\n // Upload to Azure Blob Storage\n const blobServiceClient = BlobServiceClient.fromConnectionString(\n STORAGE_CONNECTION_STRING\n );\n const containerClient = blobServiceClient.getContainerClient(\n STORAGE_CONTAINER_NAME\n );\n\n // Create container if it doesn't exist\n await containerClient.createIfNotExists();\n\n // Upload file with content type\n const blockBlobClient = containerClient.getBlockBlobClient(fileName);\n await blockBlobClient.upload(buffer, buffer.length, {\n blobHTTPHeaders: {\n blobContentType: contentType,\n },\n });\n\n // Get the URL\n const fileUrl = blockBlobClient.url;\n\n return { success: true, fileUrl };\n } catch (error) {\n console.error(\"Upload error:\", error);\n return {\n success: false,\n error: error instanceof Error ? error.message : \"Failed to upload file\",\n };\n }\n}\n\nfunction getContentType(fileExtension: string | undefined): string {\n if (!fileExtension) return \"application/octet-stream\";\n\n const mimeTypes: Record<string, string> = {\n // Images\n png: \"image/png\",\n jpg: \"image/jpeg\",\n jpeg: \"image/jpeg\",\n gif: \"image/gif\",\n webp: \"image/webp\",\n svg: \"image/svg+xml\",\n // Documents\n pdf: \"application/pdf\",\n doc: \"application/msword\",\n docx: \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\",\n // Others\n txt: \"text/plain\",\n csv: \"text/csv\",\n json: \"application/json\",\n };\n\n return mimeTypes[fileExtension] || \"application/octet-stream\";\n}\n\nexport async function uploadToS3(formData: FormData) {\n try {\n const file = formData.get(\"file\") as File;\n if (!file) {\n throw new Error(\"No file provided\");\n }\n\n const AWS_ACCESS_KEY_ID = process.env.LINODE_ACCESS_KEY;\n const AWS_SECRET_ACCESS_KEY = process.env.LINODE_SECRET_KEY;\n const S3_BUCKET_NAME = process.env.LINODE_BUCKET_NAME;\n let S3_ENDPOINT = process.env.LINODE_OBJECT_STORAGE_ENDPOINT;\n\n if (\n !AWS_ACCESS_KEY_ID ||\n !AWS_SECRET_ACCESS_KEY ||\n !S3_BUCKET_NAME ||\n !S3_ENDPOINT\n ) {\n throw new Error(\"S3 credentials or configuration not set\");\n }\n\n // Generate unique filename\n const fileExtension = file.name.split(\".\").pop()?.toLowerCase();\n const fileName = `${Date.now()}-${Math.random().toString(36).substring(2)}.${fileExtension}`;\n\n // Determine content type based on file extension\n const contentType = getContentType(fileExtension);\n\n // Convert file to buffer\n const buffer = Buffer.from(await file.arrayBuffer());\n\n // Initialize S3 client\n const s3Client = new S3Client({\n endpoint: S3_ENDPOINT,\n region: \"us-east-1\", // Linode requires a region but it's not used with custom endpoint\n credentials: {\n accessKeyId: AWS_ACCESS_KEY_ID,\n secretAccessKey: AWS_SECRET_ACCESS_KEY,\n },\n forcePathStyle: true, // Required for Linode Object Storage\n maxAttempts: 3,\n requestHandler: {\n timeout: 10000, // 10 seconds timeout\n },\n });\n\n // Upload to S3\n const command = new PutObjectCommand({\n Bucket: S3_BUCKET_NAME,\n Key: fileName,\n Body: buffer,\n ContentType: contentType,\n ACL: \"public-read\", // Make the object publicly readable\n });\n\n await s3Client.send(command);\n\n // Construct the URL using the endpoint\n const fileUrl = `${S3_ENDPOINT}/${S3_BUCKET_NAME}/${fileName}`;\n\n return { success: true, fileUrl };\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : \"Failed to upload file\";\n const errorName = error instanceof Error ? error.name : \"Unknown\";\n console.error(\"Upload error:\", errorName, errorMessage, error);\n return {\n success: false,\n error: errorMessage,\n };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,0BAAkC;AAClC,uBAA2C;AAG3C,eAAsB,WAAW,UAAoB;AACnD,MAAI;AACF,UAAM,OAAO,SAAS,IAAI,MAAM;AAChC,QAAI,CAAC,MAAM;AACT,YAAM,IAAI,MAAM,kBAAkB;AAAA,IACpC;AAEA,UAAM,4BACJ,QAAQ,IAAI;AACd,UAAM,yBAAyB,QAAQ,IAAI;AAE3C,QAAI,CAAC,6BAA6B,CAAC,wBAAwB;AACzD,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAGA,UAAM,gBAAgB,KAAK,KAAK,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY;AAC9D,UAAM,WAAW,GAAG,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,UAAU,CAAC,CAAC,IAAI,aAAa;AAG1F,UAAM,cAAc,eAAe,aAAa;AAGhD,UAAM,SAAS,OAAO,KAAK,MAAM,KAAK,YAAY,CAAC;AAGnD,UAAM,oBAAoB,sCAAkB;AAAA,MAC1C;AAAA,IACF;AACA,UAAM,kBAAkB,kBAAkB;AAAA,MACxC;AAAA,IACF;AAGA,UAAM,gBAAgB,kBAAkB;AAGxC,UAAM,kBAAkB,gBAAgB,mBAAmB,QAAQ;AACnE,UAAM,gBAAgB,OAAO,QAAQ,OAAO,QAAQ;AAAA,MAClD,iBAAiB;AAAA,QACf,iBAAiB;AAAA,MACnB;AAAA,IACF,CAAC;AAGD,UAAM,UAAU,gBAAgB;AAEhC,WAAO,EAAE,SAAS,MAAM,QAAQ;AAAA,EAClC,SAAS,OAAO;AACd,YAAQ,MAAM,iBAAiB,KAAK;AACpC,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,IAClD;AAAA,EACF;AACF;AAEA,SAAS,eAAe,eAA2C;AACjE,MAAI,CAAC,cAAe,QAAO;AAE3B,QAAM,YAAoC;AAAA;AAAA,IAExC,KAAK;AAAA,IACL,KAAK;AAAA,IACL,MAAM;AAAA,IACN,KAAK;AAAA,IACL,MAAM;AAAA,IACN,KAAK;AAAA;AAAA,IAEL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,MAAM;AAAA;AAAA,IAEN,KAAK;AAAA,IACL,KAAK;AAAA,IACL,MAAM;AAAA,EACR;AAEA,SAAO,UAAU,aAAa,KAAK;AACrC;AAEA,eAAsB,WAAW,UAAoB;AACnD,MAAI;AACF,UAAM,OAAO,SAAS,IAAI,MAAM;AAChC,QAAI,CAAC,MAAM;AACT,YAAM,IAAI,MAAM,kBAAkB;AAAA,IACpC;AAEA,UAAM,oBAAoB,QAAQ,IAAI;AACtC,UAAM,wBAAwB,QAAQ,IAAI;AAC1C,UAAM,iBAAiB,QAAQ,IAAI;AACnC,QAAI,cAAc,QAAQ,IAAI;AAE9B,QACE,CAAC,qBACD,CAAC,yBACD,CAAC,kBACD,CAAC,aACD;AACA,YAAM,IAAI,MAAM,yCAAyC;AAAA,IAC3D;AAGA,UAAM,gBAAgB,KAAK,KAAK,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY;AAC9D,UAAM,WAAW,GAAG,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,UAAU,CAAC,CAAC,IAAI,aAAa;AAG1F,UAAM,cAAc,eAAe,aAAa;AAGhD,UAAM,SAAS,OAAO,KAAK,MAAM,KAAK,YAAY,CAAC;AAGnD,UAAM,WAAW,IAAI,0BAAS;AAAA,MAC5B,UAAU;AAAA,MACV,QAAQ;AAAA;AAAA,MACR,aAAa;AAAA,QACX,aAAa;AAAA,QACb,iBAAiB;AAAA,MACnB;AAAA,MACA,gBAAgB;AAAA;AAAA,MAChB,aAAa;AAAA,MACb,gBAAgB;AAAA,QACd,SAAS;AAAA;AAAA,MACX;AAAA,IACF,CAAC;AAGD,UAAM,UAAU,IAAI,kCAAiB;AAAA,MACnC,QAAQ;AAAA,MACR,KAAK;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,KAAK;AAAA;AAAA,IACP,CAAC;AAED,UAAM,SAAS,KAAK,OAAO;AAG3B,UAAM,UAAU,GAAG,WAAW,IAAI,cAAc,IAAI,QAAQ;AAE5D,WAAO,EAAE,SAAS,MAAM,QAAQ;AAAA,EAClC,SAAS,OAAO;AACd,UAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU;AAC9D,UAAM,YAAY,iBAAiB,QAAQ,MAAM,OAAO;AACxD,YAAQ,MAAM,iBAAiB,WAAW,cAAc,KAAK;AAC7D,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO;AAAA,IACT;AAAA,EACF;AACF;","names":[]}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"use
|
|
1
|
+
"use server";
|
|
2
2
|
"use server";
|
|
3
3
|
|
|
4
4
|
// components/upload/upload-actions.ts
|
|
@@ -110,10 +110,12 @@ async function uploadToS3(formData) {
|
|
|
110
110
|
const fileUrl = `${S3_ENDPOINT}/${S3_BUCKET_NAME}/${fileName}`;
|
|
111
111
|
return { success: true, fileUrl };
|
|
112
112
|
} catch (error) {
|
|
113
|
-
|
|
113
|
+
const errorMessage = error instanceof Error ? error.message : "Failed to upload file";
|
|
114
|
+
const errorName = error instanceof Error ? error.name : "Unknown";
|
|
115
|
+
console.error("Upload error:", errorName, errorMessage, error);
|
|
114
116
|
return {
|
|
115
117
|
success: false,
|
|
116
|
-
error:
|
|
118
|
+
error: errorMessage
|
|
117
119
|
};
|
|
118
120
|
}
|
|
119
121
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../components/upload/upload-actions.ts"],"sourcesContent":["\"use server\";\nimport { BlobServiceClient } from \"@azure/storage-blob\";\nimport { PutObjectCommand, S3Client } from \"@aws-sdk/client-s3\";\n\n\nexport async function uploadFile(formData: FormData) {\n try {\n const file = formData.get(\"file\") as File;\n if (!file) {\n throw new Error(\"No file provided\");\n }\n\n const STORAGE_CONNECTION_STRING =\n process.env.AZURE_STORAGE_CONNECTION_STRING;\n const STORAGE_CONTAINER_NAME = process.env.AZURE_STORAGE_CONTAINER_NAME;\n\n if (!STORAGE_CONNECTION_STRING || !STORAGE_CONTAINER_NAME) {\n throw new Error(\n \"STORAGE_CONNECTION_STRING or STORAGE_CONTAINER_NAME not set\"\n );\n }\n\n // Generate unique filename\n const fileExtension = file.name.split(\".\").pop()?.toLowerCase();\n const fileName = `${Date.now()}-${Math.random().toString(36).substring(2)}.${fileExtension}`;\n\n // Determine content type based on file extension\n const contentType = getContentType(fileExtension);\n\n // Convert file to buffer\n const buffer = Buffer.from(await file.arrayBuffer());\n\n // Upload to Azure Blob Storage\n const blobServiceClient = BlobServiceClient.fromConnectionString(\n STORAGE_CONNECTION_STRING\n );\n const containerClient = blobServiceClient.getContainerClient(\n STORAGE_CONTAINER_NAME\n );\n\n // Create container if it doesn't exist\n await containerClient.createIfNotExists();\n\n // Upload file with content type\n const blockBlobClient = containerClient.getBlockBlobClient(fileName);\n await blockBlobClient.upload(buffer, buffer.length, {\n blobHTTPHeaders: {\n blobContentType: contentType,\n },\n });\n\n // Get the URL\n const fileUrl = blockBlobClient.url;\n\n return { success: true, fileUrl };\n } catch (error) {\n console.error(\"Upload error:\", error);\n return {\n success: false,\n error: error instanceof Error ? error.message : \"Failed to upload file\",\n };\n }\n}\n\nfunction getContentType(fileExtension: string | undefined): string {\n if (!fileExtension) return \"application/octet-stream\";\n\n const mimeTypes: Record<string, string> = {\n // Images\n png: \"image/png\",\n jpg: \"image/jpeg\",\n jpeg: \"image/jpeg\",\n gif: \"image/gif\",\n webp: \"image/webp\",\n svg: \"image/svg+xml\",\n // Documents\n pdf: \"application/pdf\",\n doc: \"application/msword\",\n docx: \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\",\n // Others\n txt: \"text/plain\",\n csv: \"text/csv\",\n json: \"application/json\",\n };\n\n return mimeTypes[fileExtension] || \"application/octet-stream\";\n}\n\nexport async function uploadToS3(formData: FormData) {\n try {\n const file = formData.get(\"file\") as File;\n if (!file) {\n throw new Error(\"No file provided\");\n }\n\n const AWS_ACCESS_KEY_ID = process.env.LINODE_ACCESS_KEY;\n const AWS_SECRET_ACCESS_KEY = process.env.LINODE_SECRET_KEY;\n const S3_BUCKET_NAME = process.env.LINODE_BUCKET_NAME;\n let S3_ENDPOINT = process.env.LINODE_OBJECT_STORAGE_ENDPOINT;\n\n if (\n !AWS_ACCESS_KEY_ID ||\n !AWS_SECRET_ACCESS_KEY ||\n !S3_BUCKET_NAME ||\n !S3_ENDPOINT\n ) {\n throw new Error(\"S3 credentials or configuration not set\");\n }\n\n // Generate unique filename\n const fileExtension = file.name.split(\".\").pop()?.toLowerCase();\n const fileName = `${Date.now()}-${Math.random().toString(36).substring(2)}.${fileExtension}`;\n\n // Determine content type based on file extension\n const contentType = getContentType(fileExtension);\n\n // Convert file to buffer\n const buffer = Buffer.from(await file.arrayBuffer());\n\n // Initialize S3 client\n const s3Client = new S3Client({\n endpoint: S3_ENDPOINT,\n region: \"us-east-1\", // Linode requires a region but it's not used with custom endpoint\n credentials: {\n accessKeyId: AWS_ACCESS_KEY_ID,\n secretAccessKey: AWS_SECRET_ACCESS_KEY,\n },\n forcePathStyle: true, // Required for Linode Object Storage\n maxAttempts: 3,\n requestHandler: {\n timeout: 10000, // 10 seconds timeout\n },\n });\n\n // Upload to S3\n const command = new PutObjectCommand({\n Bucket: S3_BUCKET_NAME,\n Key: fileName,\n Body: buffer,\n ContentType: contentType,\n ACL: \"public-read\", // Make the object publicly readable\n });\n\n await s3Client.send(command);\n\n // Construct the URL using the endpoint\n const fileUrl = `${S3_ENDPOINT}/${S3_BUCKET_NAME}/${fileName}`;\n\n return { success: true, fileUrl };\n } catch (error) {\n console.error(\"Upload error:\",
|
|
1
|
+
{"version":3,"sources":["../../../components/upload/upload-actions.ts"],"sourcesContent":["\"use server\";\nimport { BlobServiceClient } from \"@azure/storage-blob\";\nimport { PutObjectCommand, S3Client } from \"@aws-sdk/client-s3\";\n\n\nexport async function uploadFile(formData: FormData) {\n try {\n const file = formData.get(\"file\") as File;\n if (!file) {\n throw new Error(\"No file provided\");\n }\n\n const STORAGE_CONNECTION_STRING =\n process.env.AZURE_STORAGE_CONNECTION_STRING;\n const STORAGE_CONTAINER_NAME = process.env.AZURE_STORAGE_CONTAINER_NAME;\n\n if (!STORAGE_CONNECTION_STRING || !STORAGE_CONTAINER_NAME) {\n throw new Error(\n \"STORAGE_CONNECTION_STRING or STORAGE_CONTAINER_NAME not set\"\n );\n }\n\n // Generate unique filename\n const fileExtension = file.name.split(\".\").pop()?.toLowerCase();\n const fileName = `${Date.now()}-${Math.random().toString(36).substring(2)}.${fileExtension}`;\n\n // Determine content type based on file extension\n const contentType = getContentType(fileExtension);\n\n // Convert file to buffer\n const buffer = Buffer.from(await file.arrayBuffer());\n\n // Upload to Azure Blob Storage\n const blobServiceClient = BlobServiceClient.fromConnectionString(\n STORAGE_CONNECTION_STRING\n );\n const containerClient = blobServiceClient.getContainerClient(\n STORAGE_CONTAINER_NAME\n );\n\n // Create container if it doesn't exist\n await containerClient.createIfNotExists();\n\n // Upload file with content type\n const blockBlobClient = containerClient.getBlockBlobClient(fileName);\n await blockBlobClient.upload(buffer, buffer.length, {\n blobHTTPHeaders: {\n blobContentType: contentType,\n },\n });\n\n // Get the URL\n const fileUrl = blockBlobClient.url;\n\n return { success: true, fileUrl };\n } catch (error) {\n console.error(\"Upload error:\", error);\n return {\n success: false,\n error: error instanceof Error ? error.message : \"Failed to upload file\",\n };\n }\n}\n\nfunction getContentType(fileExtension: string | undefined): string {\n if (!fileExtension) return \"application/octet-stream\";\n\n const mimeTypes: Record<string, string> = {\n // Images\n png: \"image/png\",\n jpg: \"image/jpeg\",\n jpeg: \"image/jpeg\",\n gif: \"image/gif\",\n webp: \"image/webp\",\n svg: \"image/svg+xml\",\n // Documents\n pdf: \"application/pdf\",\n doc: \"application/msword\",\n docx: \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\",\n // Others\n txt: \"text/plain\",\n csv: \"text/csv\",\n json: \"application/json\",\n };\n\n return mimeTypes[fileExtension] || \"application/octet-stream\";\n}\n\nexport async function uploadToS3(formData: FormData) {\n try {\n const file = formData.get(\"file\") as File;\n if (!file) {\n throw new Error(\"No file provided\");\n }\n\n const AWS_ACCESS_KEY_ID = process.env.LINODE_ACCESS_KEY;\n const AWS_SECRET_ACCESS_KEY = process.env.LINODE_SECRET_KEY;\n const S3_BUCKET_NAME = process.env.LINODE_BUCKET_NAME;\n let S3_ENDPOINT = process.env.LINODE_OBJECT_STORAGE_ENDPOINT;\n\n if (\n !AWS_ACCESS_KEY_ID ||\n !AWS_SECRET_ACCESS_KEY ||\n !S3_BUCKET_NAME ||\n !S3_ENDPOINT\n ) {\n throw new Error(\"S3 credentials or configuration not set\");\n }\n\n // Generate unique filename\n const fileExtension = file.name.split(\".\").pop()?.toLowerCase();\n const fileName = `${Date.now()}-${Math.random().toString(36).substring(2)}.${fileExtension}`;\n\n // Determine content type based on file extension\n const contentType = getContentType(fileExtension);\n\n // Convert file to buffer\n const buffer = Buffer.from(await file.arrayBuffer());\n\n // Initialize S3 client\n const s3Client = new S3Client({\n endpoint: S3_ENDPOINT,\n region: \"us-east-1\", // Linode requires a region but it's not used with custom endpoint\n credentials: {\n accessKeyId: AWS_ACCESS_KEY_ID,\n secretAccessKey: AWS_SECRET_ACCESS_KEY,\n },\n forcePathStyle: true, // Required for Linode Object Storage\n maxAttempts: 3,\n requestHandler: {\n timeout: 10000, // 10 seconds timeout\n },\n });\n\n // Upload to S3\n const command = new PutObjectCommand({\n Bucket: S3_BUCKET_NAME,\n Key: fileName,\n Body: buffer,\n ContentType: contentType,\n ACL: \"public-read\", // Make the object publicly readable\n });\n\n await s3Client.send(command);\n\n // Construct the URL using the endpoint\n const fileUrl = `${S3_ENDPOINT}/${S3_BUCKET_NAME}/${fileName}`;\n\n return { success: true, fileUrl };\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : \"Failed to upload file\";\n const errorName = error instanceof Error ? error.name : \"Unknown\";\n console.error(\"Upload error:\", errorName, errorMessage, error);\n return {\n success: false,\n error: errorMessage,\n };\n }\n}\n"],"mappings":";;;;AACA,SAAS,yBAAyB;AAClC,SAAS,kBAAkB,gBAAgB;AAG3C,eAAsB,WAAW,UAAoB;AACnD,MAAI;AACF,UAAM,OAAO,SAAS,IAAI,MAAM;AAChC,QAAI,CAAC,MAAM;AACT,YAAM,IAAI,MAAM,kBAAkB;AAAA,IACpC;AAEA,UAAM,4BACJ,QAAQ,IAAI;AACd,UAAM,yBAAyB,QAAQ,IAAI;AAE3C,QAAI,CAAC,6BAA6B,CAAC,wBAAwB;AACzD,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAGA,UAAM,gBAAgB,KAAK,KAAK,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY;AAC9D,UAAM,WAAW,GAAG,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,UAAU,CAAC,CAAC,IAAI,aAAa;AAG1F,UAAM,cAAc,eAAe,aAAa;AAGhD,UAAM,SAAS,OAAO,KAAK,MAAM,KAAK,YAAY,CAAC;AAGnD,UAAM,oBAAoB,kBAAkB;AAAA,MAC1C;AAAA,IACF;AACA,UAAM,kBAAkB,kBAAkB;AAAA,MACxC;AAAA,IACF;AAGA,UAAM,gBAAgB,kBAAkB;AAGxC,UAAM,kBAAkB,gBAAgB,mBAAmB,QAAQ;AACnE,UAAM,gBAAgB,OAAO,QAAQ,OAAO,QAAQ;AAAA,MAClD,iBAAiB;AAAA,QACf,iBAAiB;AAAA,MACnB;AAAA,IACF,CAAC;AAGD,UAAM,UAAU,gBAAgB;AAEhC,WAAO,EAAE,SAAS,MAAM,QAAQ;AAAA,EAClC,SAAS,OAAO;AACd,YAAQ,MAAM,iBAAiB,KAAK;AACpC,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO,iBAAiB,QAAQ,MAAM,UAAU;AAAA,IAClD;AAAA,EACF;AACF;AAEA,SAAS,eAAe,eAA2C;AACjE,MAAI,CAAC,cAAe,QAAO;AAE3B,QAAM,YAAoC;AAAA;AAAA,IAExC,KAAK;AAAA,IACL,KAAK;AAAA,IACL,MAAM;AAAA,IACN,KAAK;AAAA,IACL,MAAM;AAAA,IACN,KAAK;AAAA;AAAA,IAEL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,MAAM;AAAA;AAAA,IAEN,KAAK;AAAA,IACL,KAAK;AAAA,IACL,MAAM;AAAA,EACR;AAEA,SAAO,UAAU,aAAa,KAAK;AACrC;AAEA,eAAsB,WAAW,UAAoB;AACnD,MAAI;AACF,UAAM,OAAO,SAAS,IAAI,MAAM;AAChC,QAAI,CAAC,MAAM;AACT,YAAM,IAAI,MAAM,kBAAkB;AAAA,IACpC;AAEA,UAAM,oBAAoB,QAAQ,IAAI;AACtC,UAAM,wBAAwB,QAAQ,IAAI;AAC1C,UAAM,iBAAiB,QAAQ,IAAI;AACnC,QAAI,cAAc,QAAQ,IAAI;AAE9B,QACE,CAAC,qBACD,CAAC,yBACD,CAAC,kBACD,CAAC,aACD;AACA,YAAM,IAAI,MAAM,yCAAyC;AAAA,IAC3D;AAGA,UAAM,gBAAgB,KAAK,KAAK,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY;AAC9D,UAAM,WAAW,GAAG,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,UAAU,CAAC,CAAC,IAAI,aAAa;AAG1F,UAAM,cAAc,eAAe,aAAa;AAGhD,UAAM,SAAS,OAAO,KAAK,MAAM,KAAK,YAAY,CAAC;AAGnD,UAAM,WAAW,IAAI,SAAS;AAAA,MAC5B,UAAU;AAAA,MACV,QAAQ;AAAA;AAAA,MACR,aAAa;AAAA,QACX,aAAa;AAAA,QACb,iBAAiB;AAAA,MACnB;AAAA,MACA,gBAAgB;AAAA;AAAA,MAChB,aAAa;AAAA,MACb,gBAAgB;AAAA,QACd,SAAS;AAAA;AAAA,MACX;AAAA,IACF,CAAC;AAGD,UAAM,UAAU,IAAI,iBAAiB;AAAA,MACnC,QAAQ;AAAA,MACR,KAAK;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,KAAK;AAAA;AAAA,IACP,CAAC;AAED,UAAM,SAAS,KAAK,OAAO;AAG3B,UAAM,UAAU,GAAG,WAAW,IAAI,cAAc,IAAI,QAAQ;AAE5D,WAAO,EAAE,SAAS,MAAM,QAAQ;AAAA,EAClC,SAAS,OAAO;AACd,UAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU;AAC9D,UAAM,YAAY,iBAAiB,QAAQ,MAAM,OAAO;AACxD,YAAQ,MAAM,iBAAiB,WAAW,cAAc,KAAK;AAC7D,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO;AAAA,IACT;AAAA,EACF;AACF;","names":[]}
|
package/dist/index.js
CHANGED
|
@@ -5975,125 +5975,7 @@ var import_lucide_react16 = require("lucide-react");
|
|
|
5975
5975
|
var import_react17 = require("react");
|
|
5976
5976
|
var import_react_dropzone2 = require("react-dropzone");
|
|
5977
5977
|
var import_sonner2 = require("sonner");
|
|
5978
|
-
|
|
5979
|
-
// components/upload/upload-actions.ts
|
|
5980
|
-
var import_storage_blob = require("@azure/storage-blob");
|
|
5981
|
-
var import_client_s3 = require("@aws-sdk/client-s3");
|
|
5982
|
-
async function uploadFile(formData) {
|
|
5983
|
-
try {
|
|
5984
|
-
const file = formData.get("file");
|
|
5985
|
-
if (!file) {
|
|
5986
|
-
throw new Error("No file provided");
|
|
5987
|
-
}
|
|
5988
|
-
const STORAGE_CONNECTION_STRING = process.env.AZURE_STORAGE_CONNECTION_STRING;
|
|
5989
|
-
const STORAGE_CONTAINER_NAME = process.env.AZURE_STORAGE_CONTAINER_NAME;
|
|
5990
|
-
if (!STORAGE_CONNECTION_STRING || !STORAGE_CONTAINER_NAME) {
|
|
5991
|
-
throw new Error(
|
|
5992
|
-
"STORAGE_CONNECTION_STRING or STORAGE_CONTAINER_NAME not set"
|
|
5993
|
-
);
|
|
5994
|
-
}
|
|
5995
|
-
const fileExtension = file.name.split(".").pop()?.toLowerCase();
|
|
5996
|
-
const fileName = `${Date.now()}-${Math.random().toString(36).substring(2)}.${fileExtension}`;
|
|
5997
|
-
const contentType = getContentType(fileExtension);
|
|
5998
|
-
const buffer = Buffer.from(await file.arrayBuffer());
|
|
5999
|
-
const blobServiceClient = import_storage_blob.BlobServiceClient.fromConnectionString(
|
|
6000
|
-
STORAGE_CONNECTION_STRING
|
|
6001
|
-
);
|
|
6002
|
-
const containerClient = blobServiceClient.getContainerClient(
|
|
6003
|
-
STORAGE_CONTAINER_NAME
|
|
6004
|
-
);
|
|
6005
|
-
await containerClient.createIfNotExists();
|
|
6006
|
-
const blockBlobClient = containerClient.getBlockBlobClient(fileName);
|
|
6007
|
-
await blockBlobClient.upload(buffer, buffer.length, {
|
|
6008
|
-
blobHTTPHeaders: {
|
|
6009
|
-
blobContentType: contentType
|
|
6010
|
-
}
|
|
6011
|
-
});
|
|
6012
|
-
const fileUrl = blockBlobClient.url;
|
|
6013
|
-
return { success: true, fileUrl };
|
|
6014
|
-
} catch (error) {
|
|
6015
|
-
console.error("Upload error:", error);
|
|
6016
|
-
return {
|
|
6017
|
-
success: false,
|
|
6018
|
-
error: error instanceof Error ? error.message : "Failed to upload file"
|
|
6019
|
-
};
|
|
6020
|
-
}
|
|
6021
|
-
}
|
|
6022
|
-
function getContentType(fileExtension) {
|
|
6023
|
-
if (!fileExtension) return "application/octet-stream";
|
|
6024
|
-
const mimeTypes = {
|
|
6025
|
-
// Images
|
|
6026
|
-
png: "image/png",
|
|
6027
|
-
jpg: "image/jpeg",
|
|
6028
|
-
jpeg: "image/jpeg",
|
|
6029
|
-
gif: "image/gif",
|
|
6030
|
-
webp: "image/webp",
|
|
6031
|
-
svg: "image/svg+xml",
|
|
6032
|
-
// Documents
|
|
6033
|
-
pdf: "application/pdf",
|
|
6034
|
-
doc: "application/msword",
|
|
6035
|
-
docx: "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
|
6036
|
-
// Others
|
|
6037
|
-
txt: "text/plain",
|
|
6038
|
-
csv: "text/csv",
|
|
6039
|
-
json: "application/json"
|
|
6040
|
-
};
|
|
6041
|
-
return mimeTypes[fileExtension] || "application/octet-stream";
|
|
6042
|
-
}
|
|
6043
|
-
async function uploadToS3(formData) {
|
|
6044
|
-
try {
|
|
6045
|
-
const file = formData.get("file");
|
|
6046
|
-
if (!file) {
|
|
6047
|
-
throw new Error("No file provided");
|
|
6048
|
-
}
|
|
6049
|
-
const AWS_ACCESS_KEY_ID = process.env.LINODE_ACCESS_KEY;
|
|
6050
|
-
const AWS_SECRET_ACCESS_KEY = process.env.LINODE_SECRET_KEY;
|
|
6051
|
-
const S3_BUCKET_NAME = process.env.LINODE_BUCKET_NAME;
|
|
6052
|
-
let S3_ENDPOINT = process.env.LINODE_OBJECT_STORAGE_ENDPOINT;
|
|
6053
|
-
if (!AWS_ACCESS_KEY_ID || !AWS_SECRET_ACCESS_KEY || !S3_BUCKET_NAME || !S3_ENDPOINT) {
|
|
6054
|
-
throw new Error("S3 credentials or configuration not set");
|
|
6055
|
-
}
|
|
6056
|
-
const fileExtension = file.name.split(".").pop()?.toLowerCase();
|
|
6057
|
-
const fileName = `${Date.now()}-${Math.random().toString(36).substring(2)}.${fileExtension}`;
|
|
6058
|
-
const contentType = getContentType(fileExtension);
|
|
6059
|
-
const buffer = Buffer.from(await file.arrayBuffer());
|
|
6060
|
-
const s3Client = new import_client_s3.S3Client({
|
|
6061
|
-
endpoint: S3_ENDPOINT,
|
|
6062
|
-
region: "us-east-1",
|
|
6063
|
-
// Linode requires a region but it's not used with custom endpoint
|
|
6064
|
-
credentials: {
|
|
6065
|
-
accessKeyId: AWS_ACCESS_KEY_ID,
|
|
6066
|
-
secretAccessKey: AWS_SECRET_ACCESS_KEY
|
|
6067
|
-
},
|
|
6068
|
-
forcePathStyle: true,
|
|
6069
|
-
// Required for Linode Object Storage
|
|
6070
|
-
maxAttempts: 3,
|
|
6071
|
-
requestHandler: {
|
|
6072
|
-
timeout: 1e4
|
|
6073
|
-
// 10 seconds timeout
|
|
6074
|
-
}
|
|
6075
|
-
});
|
|
6076
|
-
const command = new import_client_s3.PutObjectCommand({
|
|
6077
|
-
Bucket: S3_BUCKET_NAME,
|
|
6078
|
-
Key: fileName,
|
|
6079
|
-
Body: buffer,
|
|
6080
|
-
ContentType: contentType,
|
|
6081
|
-
ACL: "public-read"
|
|
6082
|
-
// Make the object publicly readable
|
|
6083
|
-
});
|
|
6084
|
-
await s3Client.send(command);
|
|
6085
|
-
const fileUrl = `${S3_ENDPOINT}/${S3_BUCKET_NAME}/${fileName}`;
|
|
6086
|
-
return { success: true, fileUrl };
|
|
6087
|
-
} catch (error) {
|
|
6088
|
-
console.error("Upload error:", JSON.stringify(error, null, 2));
|
|
6089
|
-
return {
|
|
6090
|
-
success: false,
|
|
6091
|
-
error: error instanceof Error ? error.message : "Failed to upload file"
|
|
6092
|
-
};
|
|
6093
|
-
}
|
|
6094
|
-
}
|
|
6095
|
-
|
|
6096
|
-
// components/upload/upload-widget-v2.tsx
|
|
5978
|
+
var import_upload_actions = require("./upload/upload-actions");
|
|
6097
5979
|
var import_jsx_runtime81 = require("react/jsx-runtime");
|
|
6098
5980
|
function UploadWidgetV2({
|
|
6099
5981
|
accept,
|
|
@@ -6111,7 +5993,7 @@ function UploadWidgetV2({
|
|
|
6111
5993
|
const handleUpload = async (files) => {
|
|
6112
5994
|
setIsUploading(true);
|
|
6113
5995
|
try {
|
|
6114
|
-
const uploadFunction = uploadDestination === "s3" ? uploadToS3 : uploadFile;
|
|
5996
|
+
const uploadFunction = uploadDestination === "s3" ? import_upload_actions.uploadToS3 : import_upload_actions.uploadFile;
|
|
6115
5997
|
const uploadPromises = files.map(async (file) => {
|
|
6116
5998
|
const formData = new FormData();
|
|
6117
5999
|
formData.append("file", file);
|