@directus/storage-driver-gcs 12.0.7 → 12.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/dist/index.d.ts +35 -33
  2. package/dist/index.js +102 -108
  3. package/package.json +11 -10
package/dist/index.d.ts CHANGED
@@ -1,38 +1,40 @@
1
- import { TusDriver, ReadOptions, ChunkedUploadContext } from '@directus/storage';
2
- import { Readable } from 'node:stream';
1
+ import { Readable } from "node:stream";
2
+ import { TusDriver } from "@directus/storage";
3
+ import { ChunkedUploadContext, ReadOptions } from "@directus/types";
3
4
 
5
+ //#region src/index.d.ts
4
6
  type DriverGCSConfig = {
5
- root?: string;
6
- bucket: string;
7
- apiEndpoint?: string;
8
- tus?: {
9
- enabled: boolean;
10
- chunkSize?: number;
11
- };
7
+ root?: string;
8
+ bucket: string;
9
+ apiEndpoint?: string;
10
+ tus?: {
11
+ enabled: boolean;
12
+ chunkSize?: number;
13
+ };
12
14
  };
13
15
  declare class DriverGCS implements TusDriver {
14
- private root;
15
- private bucket;
16
- private readonly preferredChunkSize;
17
- constructor(config: DriverGCSConfig);
18
- private fullPath;
19
- private file;
20
- read(filepath: string, options?: ReadOptions): Promise<Readable>;
21
- write(filepath: string, content: Readable): Promise<void>;
22
- delete(filepath: string): Promise<void>;
23
- stat(filepath: string): Promise<{
24
- size: number;
25
- modified: Date;
26
- }>;
27
- exists(filepath: string): Promise<boolean>;
28
- move(src: string, dest: string): Promise<void>;
29
- copy(src: string, dest: string): Promise<void>;
30
- list(prefix?: string): AsyncGenerator<string, void, unknown>;
31
- get tusExtensions(): string[];
32
- createChunkedUpload(filepath: string, context: ChunkedUploadContext): Promise<ChunkedUploadContext>;
33
- writeChunk(filepath: string, content: Readable, offset: number, context: ChunkedUploadContext): Promise<number>;
34
- finishChunkedUpload(_filepath: string, _context: ChunkedUploadContext): Promise<void>;
35
- deleteChunkedUpload(filepath: string, _context: ChunkedUploadContext): Promise<void>;
16
+ private root;
17
+ private bucket;
18
+ private readonly preferredChunkSize;
19
+ constructor(config: DriverGCSConfig);
20
+ private fullPath;
21
+ private file;
22
+ read(filepath: string, options?: ReadOptions): Promise<Readable>;
23
+ write(filepath: string, content: Readable): Promise<void>;
24
+ delete(filepath: string): Promise<void>;
25
+ stat(filepath: string): Promise<{
26
+ size: number;
27
+ modified: Date;
28
+ }>;
29
+ exists(filepath: string): Promise<boolean>;
30
+ move(src: string, dest: string): Promise<void>;
31
+ copy(src: string, dest: string): Promise<void>;
32
+ list(prefix?: string): AsyncGenerator<string, void, unknown>;
33
+ get tusExtensions(): string[];
34
+ createChunkedUpload(filepath: string, context: ChunkedUploadContext): Promise<ChunkedUploadContext>;
35
+ writeChunk(filepath: string, content: Readable, offset: number, context: ChunkedUploadContext): Promise<number>;
36
+ finishChunkedUpload(_filepath: string, _context: ChunkedUploadContext): Promise<void>;
37
+ deleteChunkedUpload(filepath: string, _context: ChunkedUploadContext): Promise<void>;
36
38
  }
37
-
38
- export { DriverGCS, type DriverGCSConfig, DriverGCS as default };
39
+ //#endregion
40
+ export { DriverGCS, DriverGCS as default, DriverGCSConfig };
package/dist/index.js CHANGED
@@ -1,113 +1,107 @@
1
- // src/index.ts
2
1
  import { DEFAULT_CHUNK_SIZE } from "@directus/constants";
3
2
  import { normalizePath } from "@directus/utils";
4
3
  import { Storage } from "@google-cloud/storage";
5
- import { join } from "path";
6
- import "stream";
7
- import { pipeline } from "stream/promises";
8
- var MINIMUM_CHUNK_SIZE = 262144;
4
+ import { join } from "node:path";
5
+ import { pipeline } from "node:stream/promises";
6
+
7
+ //#region src/index.ts
8
+ const MINIMUM_CHUNK_SIZE = 262144;
9
9
  var DriverGCS = class {
10
- root;
11
- bucket;
12
- // TUS specific members
13
- preferredChunkSize;
14
- constructor(config) {
15
- const { bucket, root, tus, ...storageOptions } = config;
16
- this.root = root ? normalizePath(root, { removeLeading: true }) : "";
17
- const storage = new Storage(storageOptions);
18
- this.bucket = storage.bucket(bucket);
19
- this.preferredChunkSize = tus?.chunkSize || DEFAULT_CHUNK_SIZE;
20
- if (tus?.enabled && (this.preferredChunkSize < MINIMUM_CHUNK_SIZE || Math.log2(this.preferredChunkSize) % 1 !== 0)) {
21
- throw new Error("Invalid chunkSize provided");
22
- }
23
- }
24
- fullPath(filepath) {
25
- return normalizePath(join(this.root, filepath));
26
- }
27
- file(filepath) {
28
- return this.bucket.file(filepath);
29
- }
30
- async read(filepath, options) {
31
- const { range } = options || {};
32
- const stream_options = {};
33
- if (range?.start) stream_options.start = range.start;
34
- if (range?.end) stream_options.end = range.end;
35
- return this.file(this.fullPath(filepath)).createReadStream(stream_options);
36
- }
37
- async write(filepath, content) {
38
- const file = this.file(this.fullPath(filepath));
39
- const stream = file.createWriteStream({ resumable: false });
40
- await pipeline(content, stream);
41
- }
42
- async delete(filepath) {
43
- await this.file(this.fullPath(filepath)).delete();
44
- }
45
- async stat(filepath) {
46
- const [{ size, updated }] = await this.file(this.fullPath(filepath)).getMetadata();
47
- return { size, modified: new Date(updated) };
48
- }
49
- async exists(filepath) {
50
- return (await this.file(this.fullPath(filepath)).exists())[0];
51
- }
52
- async move(src, dest) {
53
- await this.file(this.fullPath(src)).move(this.file(this.fullPath(dest)));
54
- }
55
- async copy(src, dest) {
56
- await this.file(this.fullPath(src)).copy(this.file(this.fullPath(dest)));
57
- }
58
- async *list(prefix = "") {
59
- let query = {
60
- prefix: this.fullPath(prefix),
61
- autoPaginate: false,
62
- maxResults: 500
63
- };
64
- while (query) {
65
- const [files, nextQuery] = await this.bucket.getFiles(query);
66
- for (const file of files) {
67
- yield file.name.substring(this.root.length);
68
- }
69
- query = nextQuery;
70
- }
71
- }
72
- get tusExtensions() {
73
- return ["creation", "termination", "expiration"];
74
- }
75
- async createChunkedUpload(filepath, context) {
76
- const file = this.file(this.fullPath(filepath));
77
- const [uri] = await file.createResumableUpload();
78
- context.metadata["uri"] = uri;
79
- return context;
80
- }
81
- async writeChunk(filepath, content, offset, context) {
82
- const file = this.file(this.fullPath(filepath));
83
- const stream = file.createWriteStream({
84
- chunkSize: this.preferredChunkSize,
85
- uri: context.metadata["uri"],
86
- offset,
87
- isPartialUpload: true,
88
- resumeCRC32C: context.metadata["hash"],
89
- metadata: {
90
- contentLength: context.size || 0
91
- }
92
- });
93
- stream.on("crc32c", (hash) => {
94
- context.metadata["hash"] = hash;
95
- });
96
- let bytesUploaded = offset || 0;
97
- content.on("data", (chunk) => {
98
- bytesUploaded += chunk.length;
99
- });
100
- await pipeline(content, stream);
101
- return bytesUploaded;
102
- }
103
- async finishChunkedUpload(_filepath, _context) {
104
- }
105
- async deleteChunkedUpload(filepath, _context) {
106
- await this.delete(filepath);
107
- }
108
- };
109
- var index_default = DriverGCS;
110
- export {
111
- DriverGCS,
112
- index_default as default
10
+ root;
11
+ bucket;
12
+ preferredChunkSize;
13
+ constructor(config) {
14
+ const { bucket, root, tus,...storageOptions } = config;
15
+ this.root = root ? normalizePath(root, { removeLeading: true }) : "";
16
+ this.bucket = new Storage(storageOptions).bucket(bucket);
17
+ this.preferredChunkSize = tus?.chunkSize || DEFAULT_CHUNK_SIZE;
18
+ if (tus?.enabled && (this.preferredChunkSize < MINIMUM_CHUNK_SIZE || Math.log2(this.preferredChunkSize) % 1 !== 0)) throw new Error("Invalid chunkSize provided");
19
+ }
20
+ fullPath(filepath) {
21
+ return normalizePath(join(this.root, filepath));
22
+ }
23
+ file(filepath) {
24
+ return this.bucket.file(filepath);
25
+ }
26
+ async read(filepath, options) {
27
+ const { range } = options || {};
28
+ const stream_options = {};
29
+ if (range?.start) stream_options.start = range.start;
30
+ if (range?.end) stream_options.end = range.end;
31
+ return this.file(this.fullPath(filepath)).createReadStream(stream_options);
32
+ }
33
+ async write(filepath, content) {
34
+ const stream = this.file(this.fullPath(filepath)).createWriteStream({ resumable: false });
35
+ await pipeline(content, stream);
36
+ }
37
+ async delete(filepath) {
38
+ await this.file(this.fullPath(filepath)).delete();
39
+ }
40
+ async stat(filepath) {
41
+ const [{ size, updated }] = await this.file(this.fullPath(filepath)).getMetadata();
42
+ return {
43
+ size,
44
+ modified: new Date(updated)
45
+ };
46
+ }
47
+ async exists(filepath) {
48
+ return (await this.file(this.fullPath(filepath)).exists())[0];
49
+ }
50
+ async move(src, dest) {
51
+ await this.file(this.fullPath(src)).move(this.file(this.fullPath(dest)));
52
+ }
53
+ async copy(src, dest) {
54
+ await this.file(this.fullPath(src)).copy(this.file(this.fullPath(dest)));
55
+ }
56
+ async *list(prefix = "") {
57
+ let query = {
58
+ prefix: this.fullPath(prefix),
59
+ autoPaginate: false,
60
+ maxResults: 500
61
+ };
62
+ while (query) {
63
+ const [files, nextQuery] = await this.bucket.getFiles(query);
64
+ for (const file of files) yield file.name.substring(this.root.length);
65
+ query = nextQuery;
66
+ }
67
+ }
68
+ get tusExtensions() {
69
+ return [
70
+ "creation",
71
+ "termination",
72
+ "expiration"
73
+ ];
74
+ }
75
+ async createChunkedUpload(filepath, context) {
76
+ const [uri] = await this.file(this.fullPath(filepath)).createResumableUpload();
77
+ context.metadata["uri"] = uri;
78
+ return context;
79
+ }
80
+ async writeChunk(filepath, content, offset, context) {
81
+ const stream = this.file(this.fullPath(filepath)).createWriteStream({
82
+ chunkSize: this.preferredChunkSize,
83
+ uri: context.metadata["uri"],
84
+ offset,
85
+ isPartialUpload: true,
86
+ resumeCRC32C: context.metadata["hash"],
87
+ metadata: { contentLength: context.size || 0 }
88
+ });
89
+ stream.on("crc32c", (hash) => {
90
+ context.metadata["hash"] = hash;
91
+ });
92
+ let bytesUploaded = offset || 0;
93
+ content.on("data", (chunk) => {
94
+ bytesUploaded += chunk.length;
95
+ });
96
+ await pipeline(content, stream);
97
+ return bytesUploaded;
98
+ }
99
+ async finishChunkedUpload(_filepath, _context) {}
100
+ async deleteChunkedUpload(filepath, _context) {
101
+ await this.delete(filepath);
102
+ }
113
103
  };
104
+ var src_default = DriverGCS;
105
+
106
+ //#endregion
107
+ export { DriverGCS, src_default as default };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@directus/storage-driver-gcs",
3
- "version": "12.0.7",
3
+ "version": "12.0.9",
4
4
  "description": "GCS file storage abstraction for `@directus/storage`",
5
5
  "homepage": "https://directus.io",
6
6
  "repository": {
@@ -21,22 +21,23 @@
21
21
  "dist"
22
22
  ],
23
23
  "dependencies": {
24
- "@google-cloud/storage": "7.15.2",
25
- "@directus/storage": "12.0.0",
26
- "@directus/constants": "13.0.1",
27
- "@directus/utils": "13.0.8"
24
+ "@google-cloud/storage": "7.16.0",
25
+ "@directus/constants": "13.0.3",
26
+ "@directus/storage": "12.0.2",
27
+ "@directus/utils": "13.0.10"
28
28
  },
29
29
  "devDependencies": {
30
30
  "@directus/tsconfig": "3.0.0",
31
- "@ngneat/falso": "7.3.0",
31
+ "@ngneat/falso": "8.0.2",
32
32
  "@vitest/coverage-v8": "3.2.4",
33
- "tsup": "8.5.0",
33
+ "tsdown": "0.14.2",
34
34
  "typescript": "5.8.3",
35
- "vitest": "3.2.4"
35
+ "vitest": "3.2.4",
36
+ "@directus/types": "13.2.3"
36
37
  },
37
38
  "scripts": {
38
- "build": "tsup src/index.ts --format=esm --dts",
39
- "dev": "tsup src/index.ts --format=esm --dts --watch",
39
+ "build": "tsdown src/index.ts --dts",
40
+ "dev": "tsdown src/index.ts --dts --watch",
40
41
  "test": "vitest run",
41
42
  "test:coverage": "vitest run --coverage"
42
43
  }