@plugable-io/js 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +53 -0
- package/dist/index.d.ts +53 -0
- package/dist/index.js +152 -0
- package/dist/index.mjs +117 -0
- package/package.json +29 -0
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
interface BucketClientConfig {
|
|
2
|
+
bucketId: string;
|
|
3
|
+
getToken: () => Promise<string> | string;
|
|
4
|
+
baseUrl?: string;
|
|
5
|
+
}
|
|
6
|
+
interface FileObject {
|
|
7
|
+
id: string;
|
|
8
|
+
name: string;
|
|
9
|
+
metadata: Record<string, any>;
|
|
10
|
+
checksum: string;
|
|
11
|
+
byte_size: number;
|
|
12
|
+
content_type: string;
|
|
13
|
+
download_url?: string;
|
|
14
|
+
created_at: string;
|
|
15
|
+
updated_at: string;
|
|
16
|
+
}
|
|
17
|
+
interface ListResponse {
|
|
18
|
+
files: FileObject[];
|
|
19
|
+
paging: {
|
|
20
|
+
has_next_page: boolean;
|
|
21
|
+
has_previous_page: boolean;
|
|
22
|
+
current_page: number;
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
interface SearchOptions {
|
|
26
|
+
media_type?: string;
|
|
27
|
+
metadata?: Record<string, any>;
|
|
28
|
+
page?: number;
|
|
29
|
+
per_page?: number;
|
|
30
|
+
with_download_url?: boolean;
|
|
31
|
+
}
|
|
32
|
+
interface UploadOptions {
|
|
33
|
+
metadata?: Record<string, any>;
|
|
34
|
+
onProgress?: (progress: number) => void;
|
|
35
|
+
}
|
|
36
|
+
interface UpdateOptions {
|
|
37
|
+
filename?: string;
|
|
38
|
+
metadata?: Record<string, any>;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
declare class BucketClient {
|
|
42
|
+
private client;
|
|
43
|
+
private config;
|
|
44
|
+
constructor(config: BucketClientConfig);
|
|
45
|
+
private request;
|
|
46
|
+
list(options?: SearchOptions): Promise<ListResponse>;
|
|
47
|
+
get(id: string): Promise<FileObject>;
|
|
48
|
+
update(id: string, updates: UpdateOptions): Promise<FileObject>;
|
|
49
|
+
delete(id: string): Promise<void>;
|
|
50
|
+
upload(file: File, options?: UploadOptions): Promise<FileObject>;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export { BucketClient, type BucketClientConfig, type FileObject, type ListResponse, type SearchOptions, type UpdateOptions, type UploadOptions };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
interface BucketClientConfig {
|
|
2
|
+
bucketId: string;
|
|
3
|
+
getToken: () => Promise<string> | string;
|
|
4
|
+
baseUrl?: string;
|
|
5
|
+
}
|
|
6
|
+
interface FileObject {
|
|
7
|
+
id: string;
|
|
8
|
+
name: string;
|
|
9
|
+
metadata: Record<string, any>;
|
|
10
|
+
checksum: string;
|
|
11
|
+
byte_size: number;
|
|
12
|
+
content_type: string;
|
|
13
|
+
download_url?: string;
|
|
14
|
+
created_at: string;
|
|
15
|
+
updated_at: string;
|
|
16
|
+
}
|
|
17
|
+
interface ListResponse {
|
|
18
|
+
files: FileObject[];
|
|
19
|
+
paging: {
|
|
20
|
+
has_next_page: boolean;
|
|
21
|
+
has_previous_page: boolean;
|
|
22
|
+
current_page: number;
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
interface SearchOptions {
|
|
26
|
+
media_type?: string;
|
|
27
|
+
metadata?: Record<string, any>;
|
|
28
|
+
page?: number;
|
|
29
|
+
per_page?: number;
|
|
30
|
+
with_download_url?: boolean;
|
|
31
|
+
}
|
|
32
|
+
interface UploadOptions {
|
|
33
|
+
metadata?: Record<string, any>;
|
|
34
|
+
onProgress?: (progress: number) => void;
|
|
35
|
+
}
|
|
36
|
+
interface UpdateOptions {
|
|
37
|
+
filename?: string;
|
|
38
|
+
metadata?: Record<string, any>;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
declare class BucketClient {
|
|
42
|
+
private client;
|
|
43
|
+
private config;
|
|
44
|
+
constructor(config: BucketClientConfig);
|
|
45
|
+
private request;
|
|
46
|
+
list(options?: SearchOptions): Promise<ListResponse>;
|
|
47
|
+
get(id: string): Promise<FileObject>;
|
|
48
|
+
update(id: string, updates: UpdateOptions): Promise<FileObject>;
|
|
49
|
+
delete(id: string): Promise<void>;
|
|
50
|
+
upload(file: File, options?: UploadOptions): Promise<FileObject>;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export { BucketClient, type BucketClientConfig, type FileObject, type ListResponse, type SearchOptions, type UpdateOptions, type UploadOptions };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
9
|
+
var __export = (target, all) => {
|
|
10
|
+
for (var name in all)
|
|
11
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
12
|
+
};
|
|
13
|
+
var __copyProps = (to, from, except, desc) => {
|
|
14
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
15
|
+
for (let key of __getOwnPropNames(from))
|
|
16
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
17
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
18
|
+
}
|
|
19
|
+
return to;
|
|
20
|
+
};
|
|
21
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
22
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
23
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
24
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
25
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
26
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
27
|
+
mod
|
|
28
|
+
));
|
|
29
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
30
|
+
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
31
|
+
|
|
32
|
+
// src/index.ts
|
|
33
|
+
var index_exports = {};
|
|
34
|
+
__export(index_exports, {
|
|
35
|
+
BucketClient: () => BucketClient
|
|
36
|
+
});
|
|
37
|
+
module.exports = __toCommonJS(index_exports);
|
|
38
|
+
|
|
39
|
+
// src/client.ts
|
|
40
|
+
var import_axios = __toESM(require("axios"));
|
|
41
|
+
|
|
42
|
+
// src/utils.ts
|
|
43
|
+
var import_spark_md5 = __toESM(require("spark-md5"));
|
|
44
|
+
var calculateChecksum = (file) => {
|
|
45
|
+
return new Promise((resolve, reject) => {
|
|
46
|
+
const blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
|
|
47
|
+
const chunkSize = 2097152;
|
|
48
|
+
const chunks = Math.ceil(file.size / chunkSize);
|
|
49
|
+
const spark = new import_spark_md5.default.ArrayBuffer();
|
|
50
|
+
const fileReader = new FileReader();
|
|
51
|
+
let currentChunk = 0;
|
|
52
|
+
fileReader.onload = function(e) {
|
|
53
|
+
spark.append(e.target?.result);
|
|
54
|
+
currentChunk++;
|
|
55
|
+
if (currentChunk < chunks) {
|
|
56
|
+
loadNext();
|
|
57
|
+
} else {
|
|
58
|
+
const md5Hash = spark.end(true);
|
|
59
|
+
const base64Hash = btoa(md5Hash);
|
|
60
|
+
resolve(base64Hash);
|
|
61
|
+
}
|
|
62
|
+
};
|
|
63
|
+
fileReader.onerror = function() {
|
|
64
|
+
reject(new Error("Failed to read file for checksum calculation."));
|
|
65
|
+
};
|
|
66
|
+
function loadNext() {
|
|
67
|
+
const start = currentChunk * chunkSize;
|
|
68
|
+
const end = start + chunkSize >= file.size ? file.size : start + chunkSize;
|
|
69
|
+
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
|
|
70
|
+
}
|
|
71
|
+
loadNext();
|
|
72
|
+
});
|
|
73
|
+
};
|
|
74
|
+
|
|
75
|
+
// src/client.ts
|
|
76
|
+
var BucketClient = class {
|
|
77
|
+
constructor(config) {
|
|
78
|
+
__publicField(this, "client");
|
|
79
|
+
__publicField(this, "config");
|
|
80
|
+
this.config = config;
|
|
81
|
+
this.client = import_axios.default.create({
|
|
82
|
+
baseURL: config.baseUrl || "http://localhost:3000/v1"
|
|
83
|
+
});
|
|
84
|
+
this.client.interceptors.request.use(async (req) => {
|
|
85
|
+
const token = await Promise.resolve(this.config.getToken());
|
|
86
|
+
if (token) {
|
|
87
|
+
req.headers.Authorization = `Bearer ${token}`;
|
|
88
|
+
}
|
|
89
|
+
return req;
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
async request(method, url, data, params) {
|
|
93
|
+
try {
|
|
94
|
+
const response = await this.client.request({
|
|
95
|
+
method,
|
|
96
|
+
url,
|
|
97
|
+
data,
|
|
98
|
+
params
|
|
99
|
+
});
|
|
100
|
+
return response.data;
|
|
101
|
+
} catch (error) {
|
|
102
|
+
if (error.response) {
|
|
103
|
+
throw new Error(`API Error: ${error.response.status} - ${JSON.stringify(error.response.data)}`);
|
|
104
|
+
}
|
|
105
|
+
throw error;
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
async list(options) {
|
|
109
|
+
return this.request("POST", `/buckets/${this.config.bucketId}/files/search`, {
|
|
110
|
+
...options
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
async get(id) {
|
|
114
|
+
return this.request("GET", `/buckets/${this.config.bucketId}/files/${id}`);
|
|
115
|
+
}
|
|
116
|
+
async update(id, updates) {
|
|
117
|
+
return this.request("PATCH", `/buckets/${this.config.bucketId}/files/${id}`, updates);
|
|
118
|
+
}
|
|
119
|
+
async delete(id) {
|
|
120
|
+
await this.request("DELETE", `/buckets/${this.config.bucketId}/files/${id}`);
|
|
121
|
+
}
|
|
122
|
+
async upload(file, options) {
|
|
123
|
+
const checksum = await calculateChecksum(file);
|
|
124
|
+
const uploadInit = await this.request(
|
|
125
|
+
"POST",
|
|
126
|
+
`/buckets/${this.config.bucketId}/files`,
|
|
127
|
+
{
|
|
128
|
+
filename: file.name,
|
|
129
|
+
byte_size: file.size,
|
|
130
|
+
checksum,
|
|
131
|
+
content_type: file.type
|
|
132
|
+
}
|
|
133
|
+
);
|
|
134
|
+
await import_axios.default.put(uploadInit.url, file, {
|
|
135
|
+
headers: uploadInit.headers,
|
|
136
|
+
onUploadProgress: (progressEvent) => {
|
|
137
|
+
if (options?.onProgress && progressEvent.total) {
|
|
138
|
+
const percentCompleted = Math.round(progressEvent.loaded * 100 / progressEvent.total);
|
|
139
|
+
options.onProgress(percentCompleted);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
});
|
|
143
|
+
return this.request("POST", `/buckets/${this.config.bucketId}/files/confirm`, {
|
|
144
|
+
signed_id: uploadInit.sid,
|
|
145
|
+
metadata: options?.metadata
|
|
146
|
+
});
|
|
147
|
+
}
|
|
148
|
+
};
|
|
149
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
150
|
+
0 && (module.exports = {
|
|
151
|
+
BucketClient
|
|
152
|
+
});
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
3
|
+
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
4
|
+
|
|
5
|
+
// src/client.ts
|
|
6
|
+
import axios from "axios";
|
|
7
|
+
|
|
8
|
+
// src/utils.ts
|
|
9
|
+
import SparkMD5 from "spark-md5";
|
|
10
|
+
var calculateChecksum = (file) => {
|
|
11
|
+
return new Promise((resolve, reject) => {
|
|
12
|
+
const blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
|
|
13
|
+
const chunkSize = 2097152;
|
|
14
|
+
const chunks = Math.ceil(file.size / chunkSize);
|
|
15
|
+
const spark = new SparkMD5.ArrayBuffer();
|
|
16
|
+
const fileReader = new FileReader();
|
|
17
|
+
let currentChunk = 0;
|
|
18
|
+
fileReader.onload = function(e) {
|
|
19
|
+
spark.append(e.target?.result);
|
|
20
|
+
currentChunk++;
|
|
21
|
+
if (currentChunk < chunks) {
|
|
22
|
+
loadNext();
|
|
23
|
+
} else {
|
|
24
|
+
const md5Hash = spark.end(true);
|
|
25
|
+
const base64Hash = btoa(md5Hash);
|
|
26
|
+
resolve(base64Hash);
|
|
27
|
+
}
|
|
28
|
+
};
|
|
29
|
+
fileReader.onerror = function() {
|
|
30
|
+
reject(new Error("Failed to read file for checksum calculation."));
|
|
31
|
+
};
|
|
32
|
+
function loadNext() {
|
|
33
|
+
const start = currentChunk * chunkSize;
|
|
34
|
+
const end = start + chunkSize >= file.size ? file.size : start + chunkSize;
|
|
35
|
+
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
|
|
36
|
+
}
|
|
37
|
+
loadNext();
|
|
38
|
+
});
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
// src/client.ts
|
|
42
|
+
var BucketClient = class {
|
|
43
|
+
constructor(config) {
|
|
44
|
+
__publicField(this, "client");
|
|
45
|
+
__publicField(this, "config");
|
|
46
|
+
this.config = config;
|
|
47
|
+
this.client = axios.create({
|
|
48
|
+
baseURL: config.baseUrl || "http://localhost:3000/v1"
|
|
49
|
+
});
|
|
50
|
+
this.client.interceptors.request.use(async (req) => {
|
|
51
|
+
const token = await Promise.resolve(this.config.getToken());
|
|
52
|
+
if (token) {
|
|
53
|
+
req.headers.Authorization = `Bearer ${token}`;
|
|
54
|
+
}
|
|
55
|
+
return req;
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
async request(method, url, data, params) {
|
|
59
|
+
try {
|
|
60
|
+
const response = await this.client.request({
|
|
61
|
+
method,
|
|
62
|
+
url,
|
|
63
|
+
data,
|
|
64
|
+
params
|
|
65
|
+
});
|
|
66
|
+
return response.data;
|
|
67
|
+
} catch (error) {
|
|
68
|
+
if (error.response) {
|
|
69
|
+
throw new Error(`API Error: ${error.response.status} - ${JSON.stringify(error.response.data)}`);
|
|
70
|
+
}
|
|
71
|
+
throw error;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
async list(options) {
|
|
75
|
+
return this.request("POST", `/buckets/${this.config.bucketId}/files/search`, {
|
|
76
|
+
...options
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
async get(id) {
|
|
80
|
+
return this.request("GET", `/buckets/${this.config.bucketId}/files/${id}`);
|
|
81
|
+
}
|
|
82
|
+
async update(id, updates) {
|
|
83
|
+
return this.request("PATCH", `/buckets/${this.config.bucketId}/files/${id}`, updates);
|
|
84
|
+
}
|
|
85
|
+
async delete(id) {
|
|
86
|
+
await this.request("DELETE", `/buckets/${this.config.bucketId}/files/${id}`);
|
|
87
|
+
}
|
|
88
|
+
async upload(file, options) {
|
|
89
|
+
const checksum = await calculateChecksum(file);
|
|
90
|
+
const uploadInit = await this.request(
|
|
91
|
+
"POST",
|
|
92
|
+
`/buckets/${this.config.bucketId}/files`,
|
|
93
|
+
{
|
|
94
|
+
filename: file.name,
|
|
95
|
+
byte_size: file.size,
|
|
96
|
+
checksum,
|
|
97
|
+
content_type: file.type
|
|
98
|
+
}
|
|
99
|
+
);
|
|
100
|
+
await axios.put(uploadInit.url, file, {
|
|
101
|
+
headers: uploadInit.headers,
|
|
102
|
+
onUploadProgress: (progressEvent) => {
|
|
103
|
+
if (options?.onProgress && progressEvent.total) {
|
|
104
|
+
const percentCompleted = Math.round(progressEvent.loaded * 100 / progressEvent.total);
|
|
105
|
+
options.onProgress(percentCompleted);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
});
|
|
109
|
+
return this.request("POST", `/buckets/${this.config.bucketId}/files/confirm`, {
|
|
110
|
+
signed_id: uploadInit.sid,
|
|
111
|
+
metadata: options?.metadata
|
|
112
|
+
});
|
|
113
|
+
}
|
|
114
|
+
};
|
|
115
|
+
export {
|
|
116
|
+
BucketClient
|
|
117
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@plugable-io/js",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "JavaScript client for Plugable File Management API",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"module": "dist/index.mjs",
|
|
7
|
+
"types": "dist/index.d.ts",
|
|
8
|
+
"files": [
|
|
9
|
+
"dist"
|
|
10
|
+
],
|
|
11
|
+
"scripts": {
|
|
12
|
+
"build": "tsup src/index.ts --format cjs,esm --dts",
|
|
13
|
+
"dev": "tsup src/index.ts --format cjs,esm --dts --watch",
|
|
14
|
+
"lint": "tsc --noEmit"
|
|
15
|
+
},
|
|
16
|
+
"keywords": [],
|
|
17
|
+
"author": "",
|
|
18
|
+
"license": "MIT",
|
|
19
|
+
"dependencies": {
|
|
20
|
+
"axios": "^1.7.0",
|
|
21
|
+
"spark-md5": "^3.0.2"
|
|
22
|
+
},
|
|
23
|
+
"devDependencies": {
|
|
24
|
+
"@types/node": "^20.0.0",
|
|
25
|
+
"@types/spark-md5": "^3.0.4",
|
|
26
|
+
"tsup": "^8.0.0",
|
|
27
|
+
"typescript": "^5.0.0"
|
|
28
|
+
}
|
|
29
|
+
}
|