@soga/uploader 0.3.0 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/main.d.mts +50 -0
- package/dist/main.d.ts +50 -6
- package/dist/main.js +1 -45
- package/dist/main.mjs +1 -0
- package/package.json +21 -17
- package/dist/hooks/complete.d.ts +0 -7
- package/dist/hooks/complete.js +0 -210
- package/dist/hooks/prepare.d.ts +0 -7
- package/dist/hooks/prepare.js +0 -104
- package/dist/hooks/trasform.d.ts +0 -8
- package/dist/hooks/trasform.js +0 -86
- package/dist/host-uploader/ali.d.ts +0 -9
- package/dist/host-uploader/ali.js +0 -106
- package/dist/host-uploader/baidu.d.ts +0 -9
- package/dist/host-uploader/baidu.js +0 -106
- package/dist/host-uploader/base.d.ts +0 -40
- package/dist/host-uploader/base.js +0 -423
- package/dist/types/main.d.ts +0 -25
- package/dist/types/main.js +0 -2
- package/dist/uploader.d.ts +0 -21
- package/dist/uploader.js +0 -102
package/dist/hooks/trasform.js
DELETED
|
@@ -1,86 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.transform = transform;
|
|
4
|
-
const types_1 = require("@soga/types");
|
|
5
|
-
const fs_extra_1 = require("fs-extra");
|
|
6
|
-
const typeorm_1 = require("typeorm");
|
|
7
|
-
async function transform(params) {
|
|
8
|
-
const { fileRepository, partRepository, successRepository } = params;
|
|
9
|
-
const file = await fileRepository.findOneBy({ id: params.file_id });
|
|
10
|
-
const success_file_fields = {
|
|
11
|
-
root_id: file.root_id,
|
|
12
|
-
pid: 0,
|
|
13
|
-
space_id: file.space_id,
|
|
14
|
-
space_name: file.space_name,
|
|
15
|
-
uid: file.uid,
|
|
16
|
-
inputs: file.inputs.map((item) => ({
|
|
17
|
-
filename: item.filename,
|
|
18
|
-
filepath: item.filepath,
|
|
19
|
-
filesize: item.filesize,
|
|
20
|
-
})),
|
|
21
|
-
cloud_id: file.cloud_info.id,
|
|
22
|
-
cloud_name: file.cloud_info.name,
|
|
23
|
-
type: file.type,
|
|
24
|
-
};
|
|
25
|
-
const isRoot = file.root_id == 0;
|
|
26
|
-
if (!isRoot) {
|
|
27
|
-
const success_root = await successRepository.findOneBy({
|
|
28
|
-
root_id: file.root_id,
|
|
29
|
-
pid: 0,
|
|
30
|
-
});
|
|
31
|
-
if (!success_root) {
|
|
32
|
-
const file_root = await fileRepository.findOneBy({
|
|
33
|
-
id: file.root_id,
|
|
34
|
-
});
|
|
35
|
-
const success_root_result = await successRepository.save(successRepository.create({
|
|
36
|
-
root_id: file.root_id,
|
|
37
|
-
pid: 0,
|
|
38
|
-
space_id: file_root.space_id,
|
|
39
|
-
space_name: file_root.space_name,
|
|
40
|
-
uid: file_root.uid,
|
|
41
|
-
inputs: file_root.inputs?.map((item) => ({
|
|
42
|
-
filename: item.filename,
|
|
43
|
-
filepath: item.filepath,
|
|
44
|
-
filesize: item.filesize,
|
|
45
|
-
})),
|
|
46
|
-
cloud_id: file_root.cloud_info.id,
|
|
47
|
-
cloud_name: file_root.cloud_info.name,
|
|
48
|
-
type: file_root.type,
|
|
49
|
-
}));
|
|
50
|
-
success_file_fields.pid = success_root_result.id;
|
|
51
|
-
}
|
|
52
|
-
else {
|
|
53
|
-
success_file_fields.pid = success_root.id;
|
|
54
|
-
}
|
|
55
|
-
await successRepository.update({
|
|
56
|
-
id: success_file_fields.pid,
|
|
57
|
-
}, {
|
|
58
|
-
updated_at: new Date(),
|
|
59
|
-
});
|
|
60
|
-
}
|
|
61
|
-
await successRepository.save(successRepository.create(success_file_fields));
|
|
62
|
-
await fileRepository.delete({
|
|
63
|
-
id: params.file_id,
|
|
64
|
-
});
|
|
65
|
-
await partRepository.delete({
|
|
66
|
-
file_id: params.file_id,
|
|
67
|
-
});
|
|
68
|
-
await (0, fs_extra_1.remove)(file.output_root);
|
|
69
|
-
if (!isRoot) {
|
|
70
|
-
const exist = await fileRepository.findOneBy({
|
|
71
|
-
root_id: file.root_id,
|
|
72
|
-
type: (0, typeorm_1.Not)(types_1.RecordType.FOLDER),
|
|
73
|
-
});
|
|
74
|
-
if (!exist) {
|
|
75
|
-
await fileRepository.delete({
|
|
76
|
-
root_id: file.root_id,
|
|
77
|
-
});
|
|
78
|
-
await fileRepository.delete({
|
|
79
|
-
id: file.root_id,
|
|
80
|
-
});
|
|
81
|
-
}
|
|
82
|
-
else {
|
|
83
|
-
await fileRepository.increment({ id: file.root_id }, 'completed_count', 1);
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
}
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
import { ThreadType, UploaderParams } from '../types/main';
|
|
2
|
-
import { BaseUploader } from './base';
|
|
3
|
-
import { UploadPart } from '@soga/entities';
|
|
4
|
-
export declare class AliUploader extends BaseUploader {
|
|
5
|
-
private params;
|
|
6
|
-
constructor(params: UploaderParams);
|
|
7
|
-
getThread(part: UploadPart): ThreadType;
|
|
8
|
-
}
|
|
9
|
-
export declare const getAliUploader: (params: UploaderParams) => Promise<AliUploader>;
|
|
@@ -1,106 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.getAliUploader = exports.AliUploader = void 0;
|
|
7
|
-
const base_1 = require("./base");
|
|
8
|
-
const types_1 = require("@soga/types");
|
|
9
|
-
const piscina_1 = __importDefault(require("piscina"));
|
|
10
|
-
const node_worker_threads_1 = require("node:worker_threads");
|
|
11
|
-
const piscina = new piscina_1.default({
|
|
12
|
-
filename: require.resolve('@soga/part-uploader'),
|
|
13
|
-
});
|
|
14
|
-
const uploaders = new Map();
|
|
15
|
-
class AliUploader extends base_1.BaseUploader {
|
|
16
|
-
params;
|
|
17
|
-
constructor(params) {
|
|
18
|
-
super(params, types_1.HostType.ALI);
|
|
19
|
-
this.params = params;
|
|
20
|
-
}
|
|
21
|
-
getThread(part) {
|
|
22
|
-
const abortController = new AbortController();
|
|
23
|
-
const { port1, port2 } = new node_worker_threads_1.MessageChannel();
|
|
24
|
-
return {
|
|
25
|
-
file_id: part.file_id,
|
|
26
|
-
part_id: part.id,
|
|
27
|
-
uid: part.uid,
|
|
28
|
-
start: async () => {
|
|
29
|
-
try {
|
|
30
|
-
const { file_id, host_id, id: part_id, info } = part;
|
|
31
|
-
const file = await this.fileRepository.findOneBy({ id: file_id });
|
|
32
|
-
const { output_root } = file;
|
|
33
|
-
port2.on('message', async (event) => {
|
|
34
|
-
await this.onPartProgress(event);
|
|
35
|
-
});
|
|
36
|
-
port2.on('error', (evt) => {
|
|
37
|
-
throw new Error('Ali upload part error');
|
|
38
|
-
});
|
|
39
|
-
const params = {
|
|
40
|
-
file_id,
|
|
41
|
-
host_id,
|
|
42
|
-
part_id,
|
|
43
|
-
output_root,
|
|
44
|
-
part: info,
|
|
45
|
-
cloud_folder_id: file.cloud_info.hosts.ali?.file_id,
|
|
46
|
-
sdk_domain: this.params.sdk_domain,
|
|
47
|
-
port: port1,
|
|
48
|
-
};
|
|
49
|
-
const result = await piscina.run(params, {
|
|
50
|
-
name: 'uploadAli',
|
|
51
|
-
signal: abortController.signal,
|
|
52
|
-
transferList: [port1],
|
|
53
|
-
});
|
|
54
|
-
if (result) {
|
|
55
|
-
await this.partRepository.update(part.id, {
|
|
56
|
-
upload_result: {
|
|
57
|
-
ali: result,
|
|
58
|
-
},
|
|
59
|
-
upload_status: types_1.UploadStatus.SUCCESS,
|
|
60
|
-
});
|
|
61
|
-
await this.onPartSuccess({
|
|
62
|
-
file_id,
|
|
63
|
-
host_type: types_1.HostType.ALI,
|
|
64
|
-
});
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
catch (error) {
|
|
68
|
-
if (error.name == 'AbortError') {
|
|
69
|
-
this.partRepository.update(part.id, {
|
|
70
|
-
upload_status: types_1.UploadStatus.NULL,
|
|
71
|
-
});
|
|
72
|
-
}
|
|
73
|
-
else {
|
|
74
|
-
await this.onPartError({
|
|
75
|
-
error,
|
|
76
|
-
part_id: part.id,
|
|
77
|
-
file_id: part.file_id,
|
|
78
|
-
});
|
|
79
|
-
}
|
|
80
|
-
}
|
|
81
|
-
finally {
|
|
82
|
-
this.threads = this.threads.filter((thread) => thread.part_id != part.id);
|
|
83
|
-
port2.close();
|
|
84
|
-
await this.start();
|
|
85
|
-
}
|
|
86
|
-
},
|
|
87
|
-
stop: async () => {
|
|
88
|
-
abortController.abort();
|
|
89
|
-
this.threads = this.threads.filter((thread) => thread.part_id !== part.id);
|
|
90
|
-
},
|
|
91
|
-
};
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
|
-
exports.AliUploader = AliUploader;
|
|
95
|
-
const getAliUploader = async (params) => {
|
|
96
|
-
const { uid, host_id } = params;
|
|
97
|
-
const key = `${uid}_${host_id}`;
|
|
98
|
-
let uploader = uploaders.get(key);
|
|
99
|
-
if (!uploader) {
|
|
100
|
-
uploader = new AliUploader(params);
|
|
101
|
-
uploader.setThreads(3);
|
|
102
|
-
uploaders.set(key, uploader);
|
|
103
|
-
}
|
|
104
|
-
return uploader;
|
|
105
|
-
};
|
|
106
|
-
exports.getAliUploader = getAliUploader;
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
import { ThreadType, UploaderParams } from '../types/main';
|
|
2
|
-
import { BaseUploader } from './base';
|
|
3
|
-
import { UploadPart } from '@soga/entities';
|
|
4
|
-
export declare class BaiduUploader extends BaseUploader {
|
|
5
|
-
private params;
|
|
6
|
-
constructor(params: UploaderParams);
|
|
7
|
-
getThread(part: UploadPart): ThreadType;
|
|
8
|
-
}
|
|
9
|
-
export declare const getBaiduUploader: (params: UploaderParams) => Promise<BaiduUploader>;
|
|
@@ -1,106 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.getBaiduUploader = exports.BaiduUploader = void 0;
|
|
7
|
-
const base_1 = require("./base");
|
|
8
|
-
const types_1 = require("@soga/types");
|
|
9
|
-
const piscina_1 = __importDefault(require("piscina"));
|
|
10
|
-
const node_worker_threads_1 = require("node:worker_threads");
|
|
11
|
-
const piscina = new piscina_1.default({
|
|
12
|
-
filename: require.resolve('@soga/part-uploader'),
|
|
13
|
-
});
|
|
14
|
-
const uploaders = new Map();
|
|
15
|
-
class BaiduUploader extends base_1.BaseUploader {
|
|
16
|
-
params;
|
|
17
|
-
constructor(params) {
|
|
18
|
-
super(params, types_1.HostType.BAIDU);
|
|
19
|
-
this.params = params;
|
|
20
|
-
}
|
|
21
|
-
getThread(part) {
|
|
22
|
-
const abortController = new AbortController();
|
|
23
|
-
const { port1, port2 } = new node_worker_threads_1.MessageChannel();
|
|
24
|
-
return {
|
|
25
|
-
file_id: part.file_id,
|
|
26
|
-
part_id: part.id,
|
|
27
|
-
uid: part.uid,
|
|
28
|
-
start: async () => {
|
|
29
|
-
try {
|
|
30
|
-
const { file_id, host_id, id: part_id, info } = part;
|
|
31
|
-
const file = await this.fileRepository.findOneBy({ id: file_id });
|
|
32
|
-
const { output_root } = file;
|
|
33
|
-
port2.on('message', async (event) => {
|
|
34
|
-
await this.onPartProgress(event);
|
|
35
|
-
});
|
|
36
|
-
port2.on('error', (evt) => {
|
|
37
|
-
throw new Error('Baidu upload part error');
|
|
38
|
-
});
|
|
39
|
-
const params = {
|
|
40
|
-
file_id,
|
|
41
|
-
host_id,
|
|
42
|
-
part_id,
|
|
43
|
-
output_root,
|
|
44
|
-
part: info,
|
|
45
|
-
cloud_folder_path: file.cloud_info.hosts.baidu?.path,
|
|
46
|
-
sdk_domain: this.params.sdk_domain,
|
|
47
|
-
port: port1,
|
|
48
|
-
};
|
|
49
|
-
const result = await piscina.run(params, {
|
|
50
|
-
name: 'uploadBaidu',
|
|
51
|
-
signal: abortController.signal,
|
|
52
|
-
transferList: [port1],
|
|
53
|
-
});
|
|
54
|
-
if (result) {
|
|
55
|
-
await this.partRepository.update(part.id, {
|
|
56
|
-
upload_result: {
|
|
57
|
-
baidu: result,
|
|
58
|
-
},
|
|
59
|
-
upload_status: types_1.UploadStatus.SUCCESS,
|
|
60
|
-
});
|
|
61
|
-
await this.onPartSuccess({
|
|
62
|
-
file_id,
|
|
63
|
-
host_type: types_1.HostType.BAIDU,
|
|
64
|
-
});
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
catch (error) {
|
|
68
|
-
if (error.name == 'AbortError') {
|
|
69
|
-
await this.partRepository.update(part.id, {
|
|
70
|
-
upload_status: types_1.UploadStatus.NULL,
|
|
71
|
-
});
|
|
72
|
-
}
|
|
73
|
-
else {
|
|
74
|
-
await this.onPartError({
|
|
75
|
-
error,
|
|
76
|
-
part_id: part.id,
|
|
77
|
-
file_id: part.file_id,
|
|
78
|
-
});
|
|
79
|
-
}
|
|
80
|
-
}
|
|
81
|
-
finally {
|
|
82
|
-
this.threads = this.threads.filter((thread) => thread.part_id != part.id);
|
|
83
|
-
port2.close();
|
|
84
|
-
await this.start();
|
|
85
|
-
}
|
|
86
|
-
},
|
|
87
|
-
stop: async () => {
|
|
88
|
-
abortController.abort();
|
|
89
|
-
this.threads = this.threads.filter((thread) => thread.part_id !== part.id);
|
|
90
|
-
},
|
|
91
|
-
};
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
|
-
exports.BaiduUploader = BaiduUploader;
|
|
95
|
-
const getBaiduUploader = async (params) => {
|
|
96
|
-
const { uid, host_id } = params;
|
|
97
|
-
const key = `${uid}_${host_id}`;
|
|
98
|
-
let uploader = uploaders.get(key);
|
|
99
|
-
if (!uploader) {
|
|
100
|
-
uploader = new BaiduUploader(params);
|
|
101
|
-
uploader.setThreads(6);
|
|
102
|
-
uploaders.set(key, uploader);
|
|
103
|
-
}
|
|
104
|
-
return uploader;
|
|
105
|
-
};
|
|
106
|
-
exports.getBaiduUploader = getBaiduUploader;
|
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
import { Repository } from 'typeorm';
|
|
2
|
-
import { UploadFile, UploadPart, UploadSuccess } from '@soga/entities';
|
|
3
|
-
import { HostType, UploadWorkerPercent } from '@soga/types';
|
|
4
|
-
import { UploaderParams, ThreadType } from '../types/main';
|
|
5
|
-
export declare abstract class BaseUploader {
|
|
6
|
-
protected hostType: HostType;
|
|
7
|
-
protected onProgress: (file: UploadFile) => Promise<void>;
|
|
8
|
-
protected onSuccess: (file: UploadFile) => Promise<void>;
|
|
9
|
-
protected onError: (err: Error, file: UploadFile) => Promise<void>;
|
|
10
|
-
protected successRepository: Repository<UploadSuccess>;
|
|
11
|
-
protected fileRepository: Repository<UploadFile>;
|
|
12
|
-
protected partRepository: Repository<UploadPart>;
|
|
13
|
-
private thread_count;
|
|
14
|
-
private maxThreads;
|
|
15
|
-
protected threads: ThreadType[];
|
|
16
|
-
baseParams: UploaderParams;
|
|
17
|
-
abstract getThread(part: UploadPart): ThreadType;
|
|
18
|
-
protected getValidThreads(threads: number): number;
|
|
19
|
-
protected onPartProgress(params: UploadWorkerPercent): Promise<void>;
|
|
20
|
-
protected onPartSuccess({ file_id, host_type, }: {
|
|
21
|
-
file_id: number;
|
|
22
|
-
host_type: HostType;
|
|
23
|
-
}): Promise<void>;
|
|
24
|
-
protected onFileSuccess(file: UploadFile): Promise<void>;
|
|
25
|
-
protected onPartError({ error, part_id, file_id, }: {
|
|
26
|
-
error: Error;
|
|
27
|
-
part_id: number;
|
|
28
|
-
file_id: number;
|
|
29
|
-
}): Promise<void>;
|
|
30
|
-
constructor(baseParams: UploaderParams, host_type: HostType);
|
|
31
|
-
setThreads(threads: number): Promise<void>;
|
|
32
|
-
start(): Promise<void>;
|
|
33
|
-
stopFiles(ids: number[]): Promise<void>;
|
|
34
|
-
deleteFiles(ids: number[]): Promise<void>;
|
|
35
|
-
stopAll(): Promise<void>;
|
|
36
|
-
protected dequeueOneFile(): Promise<void>;
|
|
37
|
-
private checkProcessingFile;
|
|
38
|
-
private run;
|
|
39
|
-
private checkAllHostsUploaded;
|
|
40
|
-
}
|