@storecraft/storage-s3-compatible 1.0.0 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,5 +1,10 @@
1
1
  # Storecraft S3 compatible storage
2
2
 
3
+ <div style="text-align:center">
4
+ <img src='https://storecraft.app/storecraft-color.svg'
5
+ width='90%' />
6
+ </div><hr/><br/>
7
+
3
8
  `fetch` ready support for an `S3` like storage:
4
9
  - `Amazon S3`
5
10
  - `Cloudflare R2`
@@ -11,15 +16,21 @@ Features:
11
16
  - Supports streaming `Get` / `Put` / `Delete`
12
17
  - Supports `presigned` `Get` / `Put` requests to offload to client
13
18
 
19
+ ```bash
20
+ npm i @storecraft/storage-s3-compatible
21
+ ```
22
+
14
23
  ## usage
15
24
 
16
25
  ```js
17
- import { R2 } from '@storecraft/storage-s3-compatible'
26
+ import { R2, S3, DigitalOceanSpaces, S3CompatibleStorage } from '@storecraft/storage-s3-compatible'
18
27
 
19
- const storage = new R2(
20
- process.env.R2_BUCKET, process.env.R2_ACCOUNT_ID,
21
- process.env.R2_ACCESS_KEY_ID, process.env.R2_SECRET_ACCESS_KEY
22
- );
28
+ const storage = new R2({
29
+ accessKeyId: process.env.R2_ACCESS_KEY_ID,
30
+ account_id: process.env.R2_ACCOUNT_ID,
31
+ bucket: process.env.R2_BUCKET,
32
+ secretAccessKey: process.env.R2_SECRET_ACCESS_KEY
33
+ });
23
34
 
24
35
  // write
25
36
  await storage.putBlob(
@@ -34,6 +45,41 @@ console.log('presign GET url ', url);
34
45
 
35
46
  ```
36
47
 
48
+ ## In Storecraft App
49
+
50
+ ```js
51
+ import { App } from '@storecraft/core';
52
+ import { MongoDB, migrateToLatest } from '@storecraft/database-mongodb';
53
+ import { NodePlatform } from '@storecraft/core/platform/node';
54
+ import { R2 } from '@storecraft/storage-s3-compatible'
55
+
56
+ const app = new App(
57
+ {
58
+ storage_rewrite_urls: undefined,
59
+ general_store_name: 'Wush Wush Games',
60
+ general_store_description: 'We sell cool retro video games',
61
+ general_store_website: 'https://wush.games',
62
+ auth_secret_access_token: process.env.auth_secret_access_token,
63
+ auth_secret_refresh_token: process.env.auth_secret_refresh_token
64
+ auth_admins_emails: ['jonny@begood.com']
65
+ }
66
+ )
67
+ .withPlatform(new NodePlatform())
68
+ .withDatabase(new MongoDB())
69
+ .withStorage(
70
+ new R2(
71
+ process.env.R2_BUCKET,
72
+ process.env.R2_ACCOUNT_ID,
73
+ process.env.R2_ACCESS_KEY_ID,
74
+ process.env.R2_SECRET_ACCESS_KEY
75
+ )
76
+ );
77
+
78
+ await app.init();
79
+ await migrateToLatest(app.db, false);
80
+
81
+ ```
82
+
37
83
  ```text
38
84
  Author: Tomer Shalev (tomer.shalev@gmail.com)
39
85
  ```
package/adapter.js CHANGED
@@ -25,12 +25,12 @@ const infer_content_type = (name) => {
25
25
 
26
26
 
27
27
  /**
28
- * @typedef {import('./types.public.js').Config} Config
28
+ * @typedef {import('./types.public.d.ts').Config} Config
29
29
  */
30
30
 
31
31
  /**
32
32
  * The base S3 compatible class
33
- * @typedef {import('@storecraft/core/v-storage').storage_driver} storage
33
+ * @typedef {import('@storecraft/core/storage').storage_driver} storage
34
34
  *
35
35
  * @implements {storage}
36
36
  */
@@ -74,7 +74,7 @@ export class S3CompatibleStorage {
74
74
  get config() { return this.#_config; }
75
75
 
76
76
  features() {
77
- /** @type {import('@storecraft/core/v-storage').StorageFeatures} */
77
+ /** @type {import('@storecraft/core/storage').StorageFeatures} */
78
78
  const f = {
79
79
  supports_signed_urls: true
80
80
  }
@@ -137,7 +137,7 @@ export class S3CompatibleStorage {
137
137
  /**
138
138
  *
139
139
  * @param {string} key
140
- * @returns {ReturnType<import('@storecraft/core/v-storage').storage_driver["putSigned"]>}
140
+ * @returns {ReturnType<import('@storecraft/core/storage').storage_driver["putSigned"]>}
141
141
  */
142
142
  async putSigned(key) {
143
143
  const url = new URL(this.get_file_url(key));
@@ -218,7 +218,7 @@ export class S3CompatibleStorage {
218
218
  /**
219
219
  *
220
220
  * @param {string} key
221
- * @returns {ReturnType<import('@storecraft/core/v-storage').storage_driver["getSigned"]>}
221
+ * @returns {ReturnType<import('@storecraft/core/storage').storage_driver["getSigned"]>}
222
222
  */
223
223
  async getSigned(key) {
224
224
  const url = new URL(this.get_file_url(key));
@@ -259,18 +259,16 @@ export class S3CompatibleStorage {
259
259
  export class R2 extends S3CompatibleStorage {
260
260
 
261
261
  /**
262
- *
263
- * @param {string} bucket
264
- * @param {string} account_id
265
- * @param {string} access_key_id
266
- * @param {string} secret_access_key
262
+ * @param {import('./types.public.d.ts').R2Config} config
267
263
  */
268
- constructor(bucket, account_id, access_key_id, secret_access_key) {
269
- super({
270
- endpoint: `https://${account_id}.r2.cloudflarestorage.com`,
271
- accessKeyId: access_key_id, secretAccessKey: secret_access_key,
272
- bucket, forcePathStyle: true, region: 'auto'
273
- })
264
+ constructor({bucket, account_id, accessKeyId, secretAccessKey}) {
265
+ super(
266
+ {
267
+ endpoint: `https://${account_id}.r2.cloudflarestorage.com`,
268
+ accessKeyId, secretAccessKey, bucket,
269
+ forcePathStyle: true, region: 'auto'
270
+ }
271
+ )
274
272
  }
275
273
 
276
274
  }
@@ -281,19 +279,16 @@ export class R2 extends S3CompatibleStorage {
281
279
  export class S3 extends S3CompatibleStorage {
282
280
 
283
281
  /**
284
- *
285
- * @param {string} bucket
286
- * @param {string} region
287
- * @param {string} access_key_id
288
- * @param {string} secret_access_key
289
- * @param {boolean} forcePathStyle
282
+ * @param {import('./types.public.d.ts').AwsS3Config} config
290
283
  */
291
- constructor(bucket, region, access_key_id, secret_access_key, forcePathStyle=false) {
292
- super({
293
- endpoint: `https://s3${region ? ('.'+region) : ''}.amazonaws.com`,
294
- accessKeyId: access_key_id, secretAccessKey: secret_access_key,
295
- bucket, forcePathStyle, region
296
- })
284
+ constructor({bucket, region, accessKeyId, secretAccessKey, forcePathStyle=false}) {
285
+ super(
286
+ {
287
+ endpoint: `https://s3${region ? ('.'+region) : ''}.amazonaws.com`,
288
+ accessKeyId, secretAccessKey,
289
+ bucket, forcePathStyle, region
290
+ }
291
+ )
297
292
  }
298
293
 
299
294
  }
@@ -304,18 +299,16 @@ export class S3 extends S3CompatibleStorage {
304
299
  export class DigitalOceanSpaces extends S3CompatibleStorage {
305
300
 
306
301
  /**
307
- *
308
- * @param {string} bucket
309
- * @param {string} region 'nyc3' for example
310
- * @param {string} access_key_id
311
- * @param {string} secret_access_key
302
+ * @param {Omit<import('./types.public.d.ts').Config, 'endpoint' | 'forcePathStyle'>} config
312
303
  */
313
- constructor(bucket, region, access_key_id, secret_access_key) {
314
- super({
315
- endpoint: `https://${region}.digitaloceanspaces.com`,
316
- accessKeyId: access_key_id, secretAccessKey: secret_access_key,
317
- bucket, forcePathStyle: false, region: 'auto'
318
- })
304
+ constructor({bucket, region, accessKeyId, secretAccessKey}) {
305
+ super(
306
+ {
307
+ endpoint: `https://${region}.digitaloceanspaces.com`,
308
+ accessKeyId, secretAccessKey,
309
+ bucket, forcePathStyle: false, region: 'auto'
310
+ }
311
+ )
319
312
  }
320
313
 
321
314
  }
package/jsconfig.json ADDED
@@ -0,0 +1,13 @@
1
+ {
2
+ "compilerOptions": {
3
+ "checkJs": true,
4
+ "moduleResolution": "NodeNext",
5
+ "module": "NodeNext",
6
+ "composite": true,
7
+ },
8
+ "include": [
9
+ "*",
10
+ "src/*",
11
+ "tests/*.js"
12
+ ]
13
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@storecraft/storage-s3-compatible",
3
- "version": "1.0.0",
3
+ "version": "1.0.2",
4
4
  "description": "Official S3-Compatible Storage adapter for storecraft",
5
5
  "license": "MIT",
6
6
  "author": "Tomer Shalev (https://github.com/store-craft)",
@@ -8,7 +8,7 @@
8
8
  "repository": {
9
9
  "type": "git",
10
10
  "url": "https://github.com/store-craft/storecraft.git",
11
- "directory": "packages/storage-s3-compatible"
11
+ "directory": "packages/storage/storage-s3-compatible"
12
12
  },
13
13
  "keywords": [
14
14
  "commerce",
@@ -21,8 +21,8 @@
21
21
  "storage-s3-compatible:publish": "npm publish --access public"
22
22
  },
23
23
  "type": "module",
24
- "main": "index.js",
25
- "types": "./types.public.d.ts",
24
+ "main": "adapter.js",
25
+ "types": "types.public.d.ts",
26
26
  "dependencies": {
27
27
  "@storecraft/core": "^1.0.0"
28
28
  },
@@ -0,0 +1,84 @@
1
+ import 'dotenv/config';
2
+ import * as assert from 'uvu/assert';
3
+ import { S3 } from '@storecraft/storage-s3-compatible'
4
+ import { readFile } from 'node:fs/promises';
5
+ import { storage as storage_test_runner } from '@storecraft/core/test-runner'
6
+
7
+ const areBlobsEqual = async (blob1, blob2) => {
8
+ return !Buffer.from(await blob1.arrayBuffer()).compare(
9
+ Buffer.from(await blob2.arrayBuffer())
10
+ );
11
+ };
12
+
13
+ const FORCE_PATH_STYLE = true;
14
+
15
+ const storage = new S3({
16
+ accessKeyId: process.env.S3_ACCESS_KEY_ID,
17
+ bucket: process.env.S3_BUCKET,
18
+ forcePathStyle: FORCE_PATH_STYLE,
19
+ region: process.env.S3_REGION,
20
+ secretAccessKey: process.env.S3_SECRET_KEY
21
+ });
22
+
23
+ const suite = storage_test_runner.create(storage);
24
+
25
+ suite.before(async () => { await storage.init(undefined) });
26
+
27
+ suite('blob put/get/delete', async () => {
28
+ const data = [
29
+ {
30
+ key: 'folder2/node2222.png',
31
+ blob: new Blob([await readFile('./node.png')])
32
+ }
33
+ ];
34
+
35
+ data.forEach(
36
+ async d => {
37
+ // write
38
+ await storage.putBlob(d.key, d.blob);
39
+ // read
40
+ const { value: blob_read } = await storage.getBlob(d.key);
41
+ const url = await storage.getSigned(d.key);
42
+ console.log('presign GET url ', url);
43
+
44
+ // compare
45
+ const equal = await areBlobsEqual(blob_read, d.blob);
46
+ assert.ok(equal, 'Blobs are not equal !!!');
47
+
48
+ // delete
49
+ // await storage.remove(d.key);
50
+ }
51
+ );
52
+
53
+ });
54
+
55
+ suite('blob put (presign)', async () => {
56
+ const data = [
57
+ {
58
+ key: 'folder2/node_test2.png',
59
+ blob: new Blob([await readFile('./node.png')])
60
+ }
61
+ ];
62
+
63
+ data.forEach(
64
+ async d => {
65
+ // get put presigned url
66
+ const { url, method, headers } = await storage.putSigned(d.key);
67
+ // now let's use it to upload
68
+ const r = await fetch(
69
+ url, {
70
+ method,
71
+ headers,
72
+ body: d.blob
73
+ }
74
+ );
75
+
76
+ console.log(url)
77
+
78
+ assert.ok(r.ok, 'upload failed')
79
+ }
80
+ );
81
+
82
+ });
83
+
84
+ suite.run();
@@ -1,10 +1,9 @@
1
1
  import 'dotenv/config';
2
- import { test } from 'uvu';
3
2
  import * as assert from 'uvu/assert';
4
3
  import { R2 } from '../adapter.js'
5
4
  import { readFile } from 'node:fs/promises';
6
- import { homedir } from 'node:os'
7
- import * as path from 'node:path';
5
+ import { storage as storage_test_runner } from '@storecraft/core/test-runner'
6
+
8
7
 
9
8
  const areBlobsEqual = async (blob1, blob2) => {
10
9
  return !Buffer.from(await blob1.arrayBuffer()).compare(
@@ -12,21 +11,21 @@ const areBlobsEqual = async (blob1, blob2) => {
12
11
  );
13
12
  };
14
13
 
15
- const storage = new R2(
16
- process.env.R2_BUCKET, process.env.R2_ACCOUNT_ID,
17
- process.env.R2_ACCESS_KEY_ID, process.env.R2_SECRET_ACCESS_KEY
18
- );
14
+ const storage = new R2({
15
+ accessKeyId: process.env.R2_ACCESS_KEY_ID,
16
+ account_id: process.env.R2_ACCOUNT_ID,
17
+ bucket: process.env.R2_BUCKET,
18
+ secretAccessKey: process.env.R2_SECRET_ACCESS_KEY
19
+ });
19
20
 
20
- test.before(async () => await storage.init())
21
+ const suite = storage_test_runner.create(storage);
21
22
 
22
- test('blob put/get/delete', async () => {
23
+ suite.before(async () => { await storage.init(undefined) });
24
+
25
+ suite('blob put/get/delete', async () => {
23
26
  const data = [
24
- // {
25
- // key: 'folder1/tomer.txt',
26
- // blob: new Blob(['this is some text from tomer :)']),
27
- // },
28
27
  {
29
- key: 'node2222.png',
28
+ key: 'folder2/node2222.png',
30
29
  blob: new Blob([await readFile('./node.png')])
31
30
  }
32
31
  ];
@@ -51,14 +50,10 @@ test('blob put/get/delete', async () => {
51
50
 
52
51
  });
53
52
 
54
- test('blob put (presign)', async () => {
53
+ suite('blob put (presign)', async () => {
55
54
  const data = [
56
- // {
57
- // key: 'folder1/tomer.txt',
58
- // blob: new Blob(['this is some text from tomer :)']),
59
- // },
60
55
  {
61
- key: 'node_test2.png',
56
+ key: 'folder2/node_test2.png',
62
57
  blob: new Blob([await readFile('./node.png')])
63
58
  }
64
59
  ];
@@ -76,10 +71,12 @@ test('blob put (presign)', async () => {
76
71
  }
77
72
  );
78
73
 
74
+ console.log(url)
75
+
79
76
  assert.ok(r.ok, 'upload failed')
80
77
  }
81
78
  );
82
79
 
83
80
  });
84
81
 
85
- test.run();
82
+ suite.run();
package/types.public.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- export * from './index.js';
1
+ export { DigitalOceanSpaces, R2, S3, S3CompatibleStorage } from './adapter.js';
2
2
 
3
3
  export type Config = {
4
4
  endpoint: string;
@@ -9,3 +9,13 @@ export type Config = {
9
9
  forcePathStyle: boolean;
10
10
  }
11
11
 
12
+ export type R2Config = Omit<Config, 'region' | 'forcePathStyle' | 'endpoint'> & {
13
+ /**
14
+ * @description cloudflare account id
15
+ */
16
+ account_id: string;
17
+ };
18
+
19
+
20
+ export type AwsS3Config = Omit<Config, 'endpoint'>;
21
+
package/index.js DELETED
@@ -1 +0,0 @@
1
- export * from './adapter.js'
package/tsconfig.json DELETED
@@ -1,14 +0,0 @@
1
- {
2
- "compileOnSave": false,
3
- "compilerOptions": {
4
- "noEmit": true,
5
- "allowJs": true,
6
- "checkJs": true,
7
- "target": "ESNext",
8
- "resolveJsonModule": true,
9
- "moduleResolution": "NodeNext",
10
- "module": "NodeNext",
11
- "composite": true,
12
- },
13
- "include": ["*", "src/*"]
14
- }