@things-factory/attachment-base 6.1.172 → 6.1.173

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,12 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.uploadAwb = void 0;
4
+ require("../azure-storage");
5
+ const attachment_const_1 = require("../attachment-const");
6
+ async function uploadAwb(param) {
7
+ const { content, title } = param;
8
+ let result = await attachment_const_1.AWBSTORAGE.uploadFile({ stream: content, filename: title });
9
+ return result;
10
+ }
11
+ exports.uploadAwb = uploadAwb;
12
+ //# sourceMappingURL=upload-azure-blob.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"upload-azure-blob.js","sourceRoot":"","sources":["../../server/util/upload-azure-blob.ts"],"names":[],"mappings":";;;AAAA,4BAAyB;AAEzB,0DAAgD;AAEzC,KAAK,UAAU,SAAS,CAAC,KAAU;IACxC,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,GAAG,KAAK,CAAA;IAEhC,IAAI,MAAM,GAAG,MAAM,6BAAU,CAAC,UAAU,CAAC,EAAE,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,CAAC,CAAA;IAE9E,OAAO,MAAM,CAAA;AACf,CAAC;AAND,8BAMC","sourcesContent":["import '../azure-storage'\n\nimport { AWBSTORAGE } from '../attachment-const'\n\nexport async function uploadAwb(param: any) {\n const { content, title } = param\n\n let result = await AWBSTORAGE.uploadFile({ stream: content, filename: title })\n\n return result\n}\n"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@things-factory/attachment-base",
3
- "version": "6.1.172",
3
+ "version": "6.1.173",
4
4
  "main": "dist-server/index.js",
5
5
  "browser": "client/index.js",
6
6
  "things-factory": true,
@@ -27,6 +27,7 @@
27
27
  "@aws-sdk/client-s3": "^3.46.0",
28
28
  "@aws-sdk/lib-storage": "^3.46.0",
29
29
  "@aws-sdk/s3-presigned-post": "^3.46.0",
30
+ "@azure/storage-blob": "^12.16.0",
30
31
  "@koa/multer": "^3.0.0",
31
32
  "@things-factory/auth-base": "^6.1.172",
32
33
  "@things-factory/env": "^6.1.172",
@@ -35,5 +36,5 @@
35
36
  "mime": "^3.0.0",
36
37
  "multer": "^1.4.5-lts.1"
37
38
  },
38
- "gitHead": "07a66227652e199dea02bee9937d6b663c0eb0f0"
39
+ "gitHead": "a474924e70e20104fbf6787fc5b99d4c00068016"
39
40
  }
package/server/routes.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import './storage-file'
2
2
  import './storage-s3'
3
3
  import './storage-database'
4
+ import './storage-azure-blob'
4
5
 
5
6
  import { ATTACHMENT_PATH, STORAGE } from './attachment-const'
6
7
 
@@ -0,0 +1,111 @@
1
+ import { BlobServiceClient, ContainerClient } from '@azure/storage-blob'
2
+ import { logger } from '@things-factory/env'
3
+
4
+ import { STORAGE } from './attachment-const'
5
+
6
+ const crypto = require('crypto')
7
+ const mime = require('mime')
8
+
9
+ if (STORAGE && STORAGE.type == 'azureblob') {
10
+ const blobServiceClient = BlobServiceClient.fromConnectionString(STORAGE.connectionString)
11
+
12
+ /* upload file */
13
+ STORAGE.uploadFile = async ({ id, file }) => {
14
+ const { createReadStream, filename, mimetype, encoding } = await file
15
+
16
+ const containerClient = blobServiceClient.getContainerClient(STORAGE.containerName)
17
+ id = id || crypto.randomUUID()
18
+ const ext = filename.split('.').pop()
19
+ const key = ext ? `${id}.${ext}` : id
20
+
21
+ const blockBlobClient = containerClient.getBlockBlobClient(key)
22
+ const stream = createReadStream()
23
+ const buffer = await streamToBuffer(stream)
24
+
25
+ await blockBlobClient.upload(buffer, buffer.length, {
26
+ blobHTTPHeaders: {
27
+ blobContentType: mimetype
28
+ }
29
+ })
30
+
31
+ // await blockBlobClient.uploadStream(stream, undefined, undefined, {
32
+ // blobHTTPHeaders: {
33
+ // blobContentType: mimetype
34
+ // }
35
+ // })
36
+
37
+ const url = `${STORAGE.url}/${STORAGE.containerName}/${key}`
38
+ return {
39
+ id,
40
+ path: key,
41
+ filename,
42
+ size: buffer.length,
43
+ mimetype,
44
+ encoding
45
+ }
46
+ }
47
+
48
+ STORAGE.deleteFile = async (path: string) => {
49
+ const containerClient = blobServiceClient.getContainerClient(STORAGE.containerName)
50
+ const blockBlobClient = containerClient.getBlockBlobClient(path)
51
+ await blockBlobClient.deleteIfExists()
52
+ }
53
+
54
+ /* TODO Streaming to Streaming 으로 구현하라. */
55
+ STORAGE.sendFile = async (context, attachment, next) => {
56
+ const containerClient = blobServiceClient.getContainerClient(STORAGE.containerName)
57
+ const blockBlobClient = containerClient.getBlockBlobClient(attachment)
58
+
59
+ const result = await blockBlobClient.getProperties()
60
+ const response = await blockBlobClient.download(0)
61
+ const body = response.readableStreamBody
62
+
63
+ context.set({
64
+ 'Content-Length': result.contentLength,
65
+ 'Content-Type': mime.getType(attachment),
66
+ 'Last-Modified': result.lastModified.toUTCString(),
67
+ ETag: result.etag,
68
+ 'Cache-Control': 'public, max-age=31556926'
69
+ })
70
+
71
+ context.body = body
72
+ }
73
+
74
+ STORAGE.readFile = async (attachment: string, encoding: string) => {
75
+ const containerClient = blobServiceClient.getContainerClient(STORAGE.containerName)
76
+ const blockBlobClient = containerClient.getBlockBlobClient(attachment)
77
+
78
+ const response = await blockBlobClient.download(0)
79
+ const body = response.readableStreamBody
80
+
81
+ const buffer = Buffer.from(await streamToBuffer(body))
82
+
83
+ switch (encoding) {
84
+ case 'base64':
85
+ return buffer.toString('base64')
86
+ default:
87
+ return buffer
88
+ }
89
+ }
90
+
91
+ STORAGE.generateUploadURL = async (type: string): Promise<{ url: string; fields: { [key: string]: string } }> => {
92
+ const expiresInMinutes = 1
93
+ const id = crypto.randomUUID()
94
+
95
+ return {
96
+ url: `${STORAGE.url}/${STORAGE.containerName}/${id}`,
97
+ fields: {}
98
+ }
99
+ }
100
+
101
+ logger.info('Azure Blob Storage is Ready.')
102
+ }
103
+
104
+ async function streamToBuffer(stream): Promise<Buffer> {
105
+ return new Promise<Buffer>((resolve, reject) => {
106
+ const chunks = []
107
+ stream.on('data', chunk => chunks.push(chunk))
108
+ stream.on('end', () => resolve(Buffer.concat(chunks)))
109
+ stream.on('error', reject)
110
+ })
111
+ }