@soga/uploader 0.2.45 → 0.2.47
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/hooks/complete.js +211 -1
- package/dist/hooks/prepare.js +104 -1
- package/dist/hooks/trasform.js +86 -1
- package/dist/host-uploader/ali.js +107 -1
- package/dist/host-uploader/baidu.js +110 -1
- package/dist/host-uploader/base.js +426 -1
- package/dist/main.js +45 -1
- package/dist/types/main.js +2 -1
- package/dist/uploader.js +102 -1
- package/package.json +2 -2
package/dist/hooks/complete.js
CHANGED
|
@@ -1 +1,211 @@
|
|
|
1
|
-
"use strict";
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.complete = complete;
|
|
4
|
+
const sdk_1 = require("@soga/sdk");
|
|
5
|
+
const types_1 = require("@soga/types");
|
|
6
|
+
async function complete({ file_id, sdk_domain, fileRepository, }) {
|
|
7
|
+
const file = await fileRepository.findOneBy({ id: file_id });
|
|
8
|
+
let pid = file.task_record_id;
|
|
9
|
+
const sdk = (0, sdk_1.getSdk)(sdk_domain);
|
|
10
|
+
const partMap = {};
|
|
11
|
+
let manifest;
|
|
12
|
+
if (file.type == types_1.RecordType.AFFIX) {
|
|
13
|
+
const record = await sdk.getRecordInfo({
|
|
14
|
+
space_id: file.space_id,
|
|
15
|
+
record_id: file.task_record_id,
|
|
16
|
+
refresh: true,
|
|
17
|
+
});
|
|
18
|
+
const { parent_id } = record;
|
|
19
|
+
pid = parent_id;
|
|
20
|
+
await processAffixManifest(file, sdk);
|
|
21
|
+
}
|
|
22
|
+
else {
|
|
23
|
+
if (file.pid !== 0) {
|
|
24
|
+
const parent = await fileRepository.findOneBy({ id: file.pid });
|
|
25
|
+
pid = parent.cloud_info.id;
|
|
26
|
+
}
|
|
27
|
+
await processNormalManifest(file);
|
|
28
|
+
}
|
|
29
|
+
await sdk.updateRecord({
|
|
30
|
+
space_id: file.space_id,
|
|
31
|
+
record_id: file.cloud_info.id,
|
|
32
|
+
parent_id: pid,
|
|
33
|
+
manifest: JSON.stringify(manifest),
|
|
34
|
+
});
|
|
35
|
+
async function processAffixManifest(file, sdk) {
|
|
36
|
+
file.encoded.affix?.forEach((item) => {
|
|
37
|
+
item.parts.forEach((part) => {
|
|
38
|
+
partMap[part.file] = {
|
|
39
|
+
md5: part.md5,
|
|
40
|
+
size: part.size,
|
|
41
|
+
source: true,
|
|
42
|
+
preview: true,
|
|
43
|
+
};
|
|
44
|
+
});
|
|
45
|
+
});
|
|
46
|
+
const record = await sdk.getRecordInfo({
|
|
47
|
+
space_id: file.space_id,
|
|
48
|
+
record_id: file.task_record_id,
|
|
49
|
+
refresh: true,
|
|
50
|
+
});
|
|
51
|
+
if (!record.manifest) {
|
|
52
|
+
record.manifest = {};
|
|
53
|
+
}
|
|
54
|
+
manifest = record.manifest;
|
|
55
|
+
if (!manifest.meta) {
|
|
56
|
+
manifest.meta = {
|
|
57
|
+
host_size: 0,
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
if (!manifest.parts) {
|
|
61
|
+
manifest.parts = {};
|
|
62
|
+
}
|
|
63
|
+
if (!manifest.affix) {
|
|
64
|
+
manifest.affix = [];
|
|
65
|
+
}
|
|
66
|
+
file.encoded.affix?.forEach((item) => {
|
|
67
|
+
const { parts, ...fields } = item;
|
|
68
|
+
manifest.affix.push({ ...fields, parts: parts.map((item) => item.file) });
|
|
69
|
+
});
|
|
70
|
+
Object.assign(manifest.parts, partMap);
|
|
71
|
+
setManifestHosts();
|
|
72
|
+
setHostSize();
|
|
73
|
+
}
|
|
74
|
+
async function processNormalManifest(file) {
|
|
75
|
+
const { type, inputs, encoded } = file;
|
|
76
|
+
if (encoded.source?.parts) {
|
|
77
|
+
let preview_need = true;
|
|
78
|
+
if (type == types_1.RecordType.VIDEO || type == types_1.RecordType.AUDIO) {
|
|
79
|
+
preview_need = false;
|
|
80
|
+
}
|
|
81
|
+
if (type == types_1.RecordType.TXT) {
|
|
82
|
+
preview_need = false;
|
|
83
|
+
}
|
|
84
|
+
encoded.source.parts.forEach((part) => {
|
|
85
|
+
partMap[part.file] = {
|
|
86
|
+
md5: part.md5,
|
|
87
|
+
size: part.size,
|
|
88
|
+
source: true,
|
|
89
|
+
preview: preview_need,
|
|
90
|
+
};
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
if (encoded.media?.parts) {
|
|
94
|
+
encoded.media.parts.forEach((part) => {
|
|
95
|
+
partMap[part.file] = {
|
|
96
|
+
md5: part.md5,
|
|
97
|
+
size: part.size,
|
|
98
|
+
source: false,
|
|
99
|
+
preview: true,
|
|
100
|
+
};
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
if (encoded.img?.parts) {
|
|
104
|
+
encoded.img.parts.forEach((part) => {
|
|
105
|
+
partMap[part.file] = {
|
|
106
|
+
md5: part.md5,
|
|
107
|
+
size: part.size,
|
|
108
|
+
source: false,
|
|
109
|
+
preview: true,
|
|
110
|
+
};
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
if (encoded.txt?.parts) {
|
|
114
|
+
encoded.txt.parts.forEach((part) => {
|
|
115
|
+
partMap[part.file] = {
|
|
116
|
+
md5: part.md5,
|
|
117
|
+
size: part.size,
|
|
118
|
+
source: false,
|
|
119
|
+
preview: true,
|
|
120
|
+
};
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
const input = inputs[0];
|
|
124
|
+
const meta_field = {
|
|
125
|
+
host_size: 0,
|
|
126
|
+
filesize: input.filesize,
|
|
127
|
+
filename: input.filename,
|
|
128
|
+
btime: input.local_btime,
|
|
129
|
+
ctime: input.local_ctime,
|
|
130
|
+
mtime: input.local_mtime,
|
|
131
|
+
};
|
|
132
|
+
manifest = { meta: meta_field, parts: partMap };
|
|
133
|
+
if (encoded.source?.parts) {
|
|
134
|
+
manifest.source = {
|
|
135
|
+
head: encoded.source.head,
|
|
136
|
+
parts: encoded.source.parts.map((part) => part.file),
|
|
137
|
+
};
|
|
138
|
+
}
|
|
139
|
+
if (encoded.media?.parts) {
|
|
140
|
+
const { parts, ...fields } = encoded.media;
|
|
141
|
+
manifest.media = { ...fields, parts: parts.map((item) => item.file) };
|
|
142
|
+
}
|
|
143
|
+
if (encoded.img?.parts) {
|
|
144
|
+
const { meta, parts } = encoded.img;
|
|
145
|
+
manifest.img = {
|
|
146
|
+
meta,
|
|
147
|
+
preview: parts.map((item) => ({
|
|
148
|
+
file: item.file,
|
|
149
|
+
start: item.start,
|
|
150
|
+
end: item.end,
|
|
151
|
+
})),
|
|
152
|
+
parts: parts.map((item) => item.file),
|
|
153
|
+
};
|
|
154
|
+
}
|
|
155
|
+
if (encoded.txt?.parts) {
|
|
156
|
+
const { map, pad, pages, parts } = encoded.txt;
|
|
157
|
+
manifest.txt = {
|
|
158
|
+
entrance: map,
|
|
159
|
+
pad,
|
|
160
|
+
pages,
|
|
161
|
+
parts: parts.map((item) => item.file),
|
|
162
|
+
};
|
|
163
|
+
}
|
|
164
|
+
setHostSize();
|
|
165
|
+
setManifestHosts();
|
|
166
|
+
return manifest;
|
|
167
|
+
}
|
|
168
|
+
function setHostSize() {
|
|
169
|
+
const parts = Object.values(partMap);
|
|
170
|
+
const sizes = { host: 0, preview: 0, source: 0 };
|
|
171
|
+
parts.forEach((part) => {
|
|
172
|
+
sizes.host += part.size;
|
|
173
|
+
});
|
|
174
|
+
manifest.meta.host_size = sizes.host;
|
|
175
|
+
}
|
|
176
|
+
function setManifestHosts() {
|
|
177
|
+
if (file.ali_host_id && !manifest.ali) {
|
|
178
|
+
manifest.ali = {
|
|
179
|
+
drive_id: '',
|
|
180
|
+
host_id: file.ali_host_id,
|
|
181
|
+
info: {},
|
|
182
|
+
};
|
|
183
|
+
const data = file.ali_upload_result;
|
|
184
|
+
console.log('part_map: ', Object.keys(partMap));
|
|
185
|
+
Object.keys(partMap).forEach((filename) => {
|
|
186
|
+
const result = data[filename];
|
|
187
|
+
manifest.ali.info[filename] = {
|
|
188
|
+
file_id: result.file_id,
|
|
189
|
+
sha1: result.sha1,
|
|
190
|
+
};
|
|
191
|
+
if (!manifest.ali.drive_id) {
|
|
192
|
+
manifest.ali.drive_id = result.drive_id;
|
|
193
|
+
}
|
|
194
|
+
});
|
|
195
|
+
}
|
|
196
|
+
if (file.baidu_host_id) {
|
|
197
|
+
manifest.baidu = {
|
|
198
|
+
host_id: file.baidu_host_id,
|
|
199
|
+
info: {},
|
|
200
|
+
};
|
|
201
|
+
const data = file.baidu_upload_result;
|
|
202
|
+
Object.keys(partMap).forEach((file) => {
|
|
203
|
+
const result = data[file];
|
|
204
|
+
manifest.baidu.info[file] = {
|
|
205
|
+
fs_id: result.fs_id,
|
|
206
|
+
md4: result.md4,
|
|
207
|
+
};
|
|
208
|
+
});
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
}
|
package/dist/hooks/prepare.js
CHANGED
|
@@ -1 +1,104 @@
|
|
|
1
|
-
"use strict";
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.prepare = prepare;
|
|
4
|
+
const types_1 = require("@soga/types");
|
|
5
|
+
const sdk_1 = require("@soga/sdk");
|
|
6
|
+
async function prepare({ file_id, sdk_domain, fileRepository, }) {
|
|
7
|
+
const file = await fileRepository.findOneBy({ id: file_id });
|
|
8
|
+
if (file.type != types_1.RecordType.AFFIX) {
|
|
9
|
+
await ensureParents();
|
|
10
|
+
await createCloudFolderOrFile(file);
|
|
11
|
+
}
|
|
12
|
+
else {
|
|
13
|
+
const { task_record_id } = file;
|
|
14
|
+
const sdk = (0, sdk_1.getSdk)(sdk_domain);
|
|
15
|
+
const record = await sdk.getRecordInfo({
|
|
16
|
+
space_id: file.space_id,
|
|
17
|
+
record_id: task_record_id,
|
|
18
|
+
refresh: true,
|
|
19
|
+
});
|
|
20
|
+
const cloud_info = await parseCloudInfo(record);
|
|
21
|
+
await fileRepository.update(file.id, {
|
|
22
|
+
cloud_info,
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
async function getLocalFolderList(id, list = []) {
|
|
26
|
+
if (id === 0) {
|
|
27
|
+
return list;
|
|
28
|
+
}
|
|
29
|
+
const file = await fileRepository.findOneBy({ id });
|
|
30
|
+
if (!file) {
|
|
31
|
+
return list;
|
|
32
|
+
}
|
|
33
|
+
if (!file.cloud_info?.id) {
|
|
34
|
+
list.unshift(file);
|
|
35
|
+
if (file.pid !== 0) {
|
|
36
|
+
return await getLocalFolderList(file.pid, list);
|
|
37
|
+
}
|
|
38
|
+
else {
|
|
39
|
+
return list;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
else {
|
|
43
|
+
return list;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
async function parseCloudInfo(record) {
|
|
47
|
+
const { id, name, cloud_info } = record;
|
|
48
|
+
const info = { id, name, hosts: {} };
|
|
49
|
+
if (cloud_info.ali) {
|
|
50
|
+
info.hosts.ali = {
|
|
51
|
+
id: cloud_info.ali.id,
|
|
52
|
+
name: cloud_info.ali.name,
|
|
53
|
+
drive_id: cloud_info.ali.drive_id,
|
|
54
|
+
file_id: cloud_info.ali.file_id,
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
if (cloud_info.baidu) {
|
|
58
|
+
info.hosts.baidu = {
|
|
59
|
+
id: cloud_info.baidu.id,
|
|
60
|
+
name: cloud_info.baidu.name,
|
|
61
|
+
fs_id: cloud_info.baidu.fs_id,
|
|
62
|
+
path: cloud_info.baidu.path,
|
|
63
|
+
};
|
|
64
|
+
if (!info.hosts.baidu.path) {
|
|
65
|
+
const parent = await fileRepository.findOneBy({ id: file.pid });
|
|
66
|
+
const { path: parentPath } = parent.cloud_info.hosts.baidu;
|
|
67
|
+
if (parentPath) {
|
|
68
|
+
info.hosts.baidu.path = `${parentPath}/${cloud_info.baidu.name}`;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
return info;
|
|
73
|
+
}
|
|
74
|
+
async function createCloudFolderOrFile(file) {
|
|
75
|
+
if (file.cloud_info?.id)
|
|
76
|
+
return;
|
|
77
|
+
const { inputs, space_id, task_record_id } = file;
|
|
78
|
+
let pid = task_record_id;
|
|
79
|
+
if (file.pid !== 0) {
|
|
80
|
+
const parent = await fileRepository.findOneBy({ id: file.pid });
|
|
81
|
+
pid = parent.cloud_info.id;
|
|
82
|
+
}
|
|
83
|
+
const sdk = (0, sdk_1.getSdk)(sdk_domain);
|
|
84
|
+
const recordInfo = await sdk.createRecord({
|
|
85
|
+
space_id,
|
|
86
|
+
name: inputs[0].filename,
|
|
87
|
+
parent_id: pid,
|
|
88
|
+
type: file.type,
|
|
89
|
+
ftype: types_1.RecordFtype.NONE,
|
|
90
|
+
});
|
|
91
|
+
const info = await parseCloudInfo(recordInfo);
|
|
92
|
+
await fileRepository.update({ id: file.id }, { cloud_info: info });
|
|
93
|
+
}
|
|
94
|
+
async function ensureParents() {
|
|
95
|
+
const file = await fileRepository.findOneBy({ id: file_id });
|
|
96
|
+
if (!file)
|
|
97
|
+
return;
|
|
98
|
+
const unCloudParentList = await getLocalFolderList(file.pid, []);
|
|
99
|
+
for (const item of unCloudParentList) {
|
|
100
|
+
await createCloudFolderOrFile(item);
|
|
101
|
+
}
|
|
102
|
+
return unCloudParentList;
|
|
103
|
+
}
|
|
104
|
+
}
|
package/dist/hooks/trasform.js
CHANGED
|
@@ -1 +1,86 @@
|
|
|
1
|
-
"use strict";
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.transform = transform;
|
|
4
|
+
const types_1 = require("@soga/types");
|
|
5
|
+
const fs_extra_1 = require("fs-extra");
|
|
6
|
+
const typeorm_1 = require("typeorm");
|
|
7
|
+
async function transform(params) {
|
|
8
|
+
const { fileRepository, partRepository, successRepository } = params;
|
|
9
|
+
const file = await fileRepository.findOneBy({ id: params.file_id });
|
|
10
|
+
const success_file_fields = {
|
|
11
|
+
root_id: file.root_id,
|
|
12
|
+
pid: 0,
|
|
13
|
+
space_id: file.space_id,
|
|
14
|
+
space_name: file.space_name,
|
|
15
|
+
uid: file.uid,
|
|
16
|
+
inputs: file.inputs.map((item) => ({
|
|
17
|
+
filename: item.filename,
|
|
18
|
+
filepath: item.filepath,
|
|
19
|
+
filesize: item.filesize,
|
|
20
|
+
})),
|
|
21
|
+
cloud_id: file.cloud_info.id,
|
|
22
|
+
cloud_name: file.cloud_info.name,
|
|
23
|
+
type: file.type,
|
|
24
|
+
};
|
|
25
|
+
const isRoot = file.root_id == 0;
|
|
26
|
+
if (!isRoot) {
|
|
27
|
+
const success_root = await successRepository.findOneBy({
|
|
28
|
+
root_id: file.root_id,
|
|
29
|
+
pid: 0,
|
|
30
|
+
});
|
|
31
|
+
if (!success_root) {
|
|
32
|
+
const file_root = await fileRepository.findOneBy({
|
|
33
|
+
id: file.root_id,
|
|
34
|
+
});
|
|
35
|
+
const success_root_result = await successRepository.save(successRepository.create({
|
|
36
|
+
root_id: file.root_id,
|
|
37
|
+
pid: 0,
|
|
38
|
+
space_id: file_root.space_id,
|
|
39
|
+
space_name: file_root.space_name,
|
|
40
|
+
uid: file_root.uid,
|
|
41
|
+
inputs: file_root.inputs?.map((item) => ({
|
|
42
|
+
filename: item.filename,
|
|
43
|
+
filepath: item.filepath,
|
|
44
|
+
filesize: item.filesize,
|
|
45
|
+
})),
|
|
46
|
+
cloud_id: file_root.cloud_info.id,
|
|
47
|
+
cloud_name: file_root.cloud_info.name,
|
|
48
|
+
type: file_root.type,
|
|
49
|
+
}));
|
|
50
|
+
success_file_fields.pid = success_root_result.id;
|
|
51
|
+
}
|
|
52
|
+
else {
|
|
53
|
+
success_file_fields.pid = success_root.id;
|
|
54
|
+
}
|
|
55
|
+
await successRepository.update({
|
|
56
|
+
id: success_file_fields.pid,
|
|
57
|
+
}, {
|
|
58
|
+
updated_at: new Date(),
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
await successRepository.save(successRepository.create(success_file_fields));
|
|
62
|
+
await fileRepository.delete({
|
|
63
|
+
id: params.file_id,
|
|
64
|
+
});
|
|
65
|
+
await partRepository.delete({
|
|
66
|
+
file_id: params.file_id,
|
|
67
|
+
});
|
|
68
|
+
await (0, fs_extra_1.remove)(file.output_root);
|
|
69
|
+
if (!isRoot) {
|
|
70
|
+
const exist = await fileRepository.findOneBy({
|
|
71
|
+
root_id: file.root_id,
|
|
72
|
+
type: (0, typeorm_1.Not)(types_1.RecordType.FOLDER),
|
|
73
|
+
});
|
|
74
|
+
if (!exist) {
|
|
75
|
+
await fileRepository.delete({
|
|
76
|
+
root_id: file.root_id,
|
|
77
|
+
});
|
|
78
|
+
await fileRepository.delete({
|
|
79
|
+
id: file.root_id,
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
else {
|
|
83
|
+
await fileRepository.increment({ id: file.root_id }, 'completed_count', 1);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
}
|
|
@@ -1 +1,107 @@
|
|
|
1
|
-
"use strict";
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.getAliUploader = exports.AliUploader = void 0;
|
|
7
|
+
const base_1 = require("./base");
|
|
8
|
+
const types_1 = require("@soga/types");
|
|
9
|
+
const piscina_1 = __importDefault(require("piscina"));
|
|
10
|
+
const node_worker_threads_1 = require("node:worker_threads");
|
|
11
|
+
const piscina = new piscina_1.default({
|
|
12
|
+
filename: require.resolve('@soga/part-uploader'),
|
|
13
|
+
});
|
|
14
|
+
const uploaders = new Map();
|
|
15
|
+
class AliUploader extends base_1.BaseUploader {
|
|
16
|
+
params;
|
|
17
|
+
constructor(params) {
|
|
18
|
+
super(params, types_1.HostType.ALI);
|
|
19
|
+
this.params = params;
|
|
20
|
+
}
|
|
21
|
+
getThread(part) {
|
|
22
|
+
const abortController = new AbortController();
|
|
23
|
+
const { port1, port2 } = new node_worker_threads_1.MessageChannel();
|
|
24
|
+
return {
|
|
25
|
+
file_id: part.file_id,
|
|
26
|
+
part_id: part.id,
|
|
27
|
+
uid: part.uid,
|
|
28
|
+
start: async () => {
|
|
29
|
+
try {
|
|
30
|
+
const { file_id, host_id, id: part_id, info } = part;
|
|
31
|
+
const file = await this.fileRepository.findOneBy({ id: file_id });
|
|
32
|
+
const { output_root } = file;
|
|
33
|
+
port2.on('message', async (event) => {
|
|
34
|
+
await this.onPartProgress(event);
|
|
35
|
+
});
|
|
36
|
+
port2.on('error', (evt) => {
|
|
37
|
+
console.log('ali upload part error evt: ');
|
|
38
|
+
console.log(evt);
|
|
39
|
+
});
|
|
40
|
+
const params = {
|
|
41
|
+
file_id,
|
|
42
|
+
host_id,
|
|
43
|
+
part_id,
|
|
44
|
+
output_root,
|
|
45
|
+
part: info,
|
|
46
|
+
cloud_folder_id: file.cloud_info.hosts.ali?.file_id,
|
|
47
|
+
sdk_domain: this.params.sdk_domain,
|
|
48
|
+
port: port1,
|
|
49
|
+
};
|
|
50
|
+
const result = await piscina.run(params, {
|
|
51
|
+
name: 'uploadAli',
|
|
52
|
+
signal: abortController.signal,
|
|
53
|
+
transferList: [port1],
|
|
54
|
+
});
|
|
55
|
+
if (result) {
|
|
56
|
+
await this.partRepository.update(part.id, {
|
|
57
|
+
upload_result: {
|
|
58
|
+
ali: result,
|
|
59
|
+
},
|
|
60
|
+
upload_status: types_1.UploadStatus.SUCCESS,
|
|
61
|
+
});
|
|
62
|
+
await this.onPartSuccess({
|
|
63
|
+
file_id,
|
|
64
|
+
host_type: types_1.HostType.ALI,
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
catch (error) {
|
|
69
|
+
if (error.name == 'AbortError') {
|
|
70
|
+
this.partRepository.update(part.id, {
|
|
71
|
+
upload_status: types_1.UploadStatus.NULL,
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
else {
|
|
75
|
+
await this.onPartError({
|
|
76
|
+
error,
|
|
77
|
+
part_id: part.id,
|
|
78
|
+
file_id: part.file_id,
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
finally {
|
|
83
|
+
this.threads = this.threads.filter((thread) => thread.part_id != part.id);
|
|
84
|
+
port2.close();
|
|
85
|
+
await this.start();
|
|
86
|
+
}
|
|
87
|
+
},
|
|
88
|
+
stop: async () => {
|
|
89
|
+
abortController.abort();
|
|
90
|
+
this.threads = this.threads.filter((thread) => thread.part_id !== part.id);
|
|
91
|
+
},
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
exports.AliUploader = AliUploader;
|
|
96
|
+
const getAliUploader = async (params) => {
|
|
97
|
+
const { uid, host_id } = params;
|
|
98
|
+
const key = `${uid}_${host_id}`;
|
|
99
|
+
let uploader = uploaders.get(key);
|
|
100
|
+
if (!uploader) {
|
|
101
|
+
uploader = new AliUploader(params);
|
|
102
|
+
uploader.setThreads(3);
|
|
103
|
+
uploaders.set(key, uploader);
|
|
104
|
+
}
|
|
105
|
+
return uploader;
|
|
106
|
+
};
|
|
107
|
+
exports.getAliUploader = getAliUploader;
|
|
@@ -1 +1,110 @@
|
|
|
1
|
-
"use strict";
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.getBaiduUploader = exports.BaiduUploader = void 0;
|
|
7
|
+
const base_1 = require("./base");
|
|
8
|
+
const types_1 = require("@soga/types");
|
|
9
|
+
const piscina_1 = __importDefault(require("piscina"));
|
|
10
|
+
const node_worker_threads_1 = require("node:worker_threads");
|
|
11
|
+
const piscina = new piscina_1.default({
|
|
12
|
+
filename: require.resolve('@soga/part-uploader'),
|
|
13
|
+
});
|
|
14
|
+
const uploaders = new Map();
|
|
15
|
+
class BaiduUploader extends base_1.BaseUploader {
|
|
16
|
+
params;
|
|
17
|
+
constructor(params) {
|
|
18
|
+
super(params, types_1.HostType.BAIDU);
|
|
19
|
+
this.params = params;
|
|
20
|
+
}
|
|
21
|
+
getThread(part) {
|
|
22
|
+
const abortController = new AbortController();
|
|
23
|
+
const { port1, port2 } = new node_worker_threads_1.MessageChannel();
|
|
24
|
+
return {
|
|
25
|
+
file_id: part.file_id,
|
|
26
|
+
part_id: part.id,
|
|
27
|
+
uid: part.uid,
|
|
28
|
+
start: async () => {
|
|
29
|
+
try {
|
|
30
|
+
const { file_id, host_id, id: part_id, info } = part;
|
|
31
|
+
const file = await this.fileRepository.findOneBy({ id: file_id });
|
|
32
|
+
const { output_root } = file;
|
|
33
|
+
port2.on('message', async (event) => {
|
|
34
|
+
await this.onPartProgress(event);
|
|
35
|
+
});
|
|
36
|
+
port2.on('error', async (evt) => {
|
|
37
|
+
console.log('baidu upload part error evt: ');
|
|
38
|
+
console.log(evt);
|
|
39
|
+
});
|
|
40
|
+
const params = {
|
|
41
|
+
file_id,
|
|
42
|
+
host_id,
|
|
43
|
+
part_id,
|
|
44
|
+
output_root,
|
|
45
|
+
part: info,
|
|
46
|
+
cloud_folder_path: file.cloud_info.hosts.baidu?.path,
|
|
47
|
+
sdk_domain: this.params.sdk_domain,
|
|
48
|
+
port: port1,
|
|
49
|
+
};
|
|
50
|
+
const result = await piscina.run(params, {
|
|
51
|
+
name: 'uploadBaidu',
|
|
52
|
+
signal: abortController.signal,
|
|
53
|
+
transferList: [port1],
|
|
54
|
+
});
|
|
55
|
+
if (result) {
|
|
56
|
+
await this.partRepository.update(part.id, {
|
|
57
|
+
upload_result: {
|
|
58
|
+
baidu: result,
|
|
59
|
+
},
|
|
60
|
+
upload_status: types_1.UploadStatus.SUCCESS,
|
|
61
|
+
});
|
|
62
|
+
await this.onPartSuccess({
|
|
63
|
+
file_id,
|
|
64
|
+
host_type: types_1.HostType.BAIDU,
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
catch (error) {
|
|
69
|
+
console.log('error_name: ', error.name);
|
|
70
|
+
if (error.name == 'AbortError') {
|
|
71
|
+
await this.partRepository.update(part.id, {
|
|
72
|
+
upload_status: types_1.UploadStatus.NULL,
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
else {
|
|
76
|
+
console.log('baidu upload part error: ');
|
|
77
|
+
console.log(error);
|
|
78
|
+
await this.onPartError({
|
|
79
|
+
error,
|
|
80
|
+
part_id: part.id,
|
|
81
|
+
file_id: part.file_id,
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
finally {
|
|
86
|
+
this.threads = this.threads.filter((thread) => thread.part_id != part.id);
|
|
87
|
+
port2.close();
|
|
88
|
+
await this.start();
|
|
89
|
+
}
|
|
90
|
+
},
|
|
91
|
+
stop: async () => {
|
|
92
|
+
abortController.abort();
|
|
93
|
+
this.threads = this.threads.filter((thread) => thread.part_id !== part.id);
|
|
94
|
+
},
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
exports.BaiduUploader = BaiduUploader;
|
|
99
|
+
const getBaiduUploader = async (params) => {
|
|
100
|
+
const { uid, host_id } = params;
|
|
101
|
+
const key = `${uid}_${host_id}`;
|
|
102
|
+
let uploader = uploaders.get(key);
|
|
103
|
+
if (!uploader) {
|
|
104
|
+
uploader = new BaiduUploader(params);
|
|
105
|
+
uploader.setThreads(6);
|
|
106
|
+
uploaders.set(key, uploader);
|
|
107
|
+
}
|
|
108
|
+
return uploader;
|
|
109
|
+
};
|
|
110
|
+
exports.getBaiduUploader = getBaiduUploader;
|
|
@@ -1 +1,426 @@
|
|
|
1
|
-
"use strict";
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.BaseUploader = void 0;
|
|
4
|
+
const typeorm_1 = require("typeorm");
|
|
5
|
+
const entities_1 = require("@soga/entities");
|
|
6
|
+
const types_1 = require("@soga/types");
|
|
7
|
+
const prepare_1 = require("../hooks/prepare");
|
|
8
|
+
const utils_1 = require("@soga/utils");
|
|
9
|
+
const complete_1 = require("../hooks/complete");
|
|
10
|
+
const trasform_1 = require("../hooks/trasform");
|
|
11
|
+
let isDequeueing = false;
|
|
12
|
+
const runningMap = new Map();
|
|
13
|
+
const successMap = new Map();
|
|
14
|
+
class BaseUploader {
|
|
15
|
+
hostType;
|
|
16
|
+
onProgress = async () => { };
|
|
17
|
+
onSuccess = async () => { };
|
|
18
|
+
onError = async () => { };
|
|
19
|
+
successRepository;
|
|
20
|
+
fileRepository;
|
|
21
|
+
partRepository;
|
|
22
|
+
thread_count = 1;
|
|
23
|
+
maxThreads = 10;
|
|
24
|
+
threads = [];
|
|
25
|
+
baseParams;
|
|
26
|
+
getValidThreads(threads) {
|
|
27
|
+
return Math.min(Math.max(threads ?? 0, 0), this.maxThreads);
|
|
28
|
+
}
|
|
29
|
+
async onPartProgress(params) {
|
|
30
|
+
const { id: file_id, data, type } = params;
|
|
31
|
+
if (type != 'percent')
|
|
32
|
+
return;
|
|
33
|
+
const { part_id, percent } = data;
|
|
34
|
+
const part = await this.partRepository.findOneBy({
|
|
35
|
+
id: part_id,
|
|
36
|
+
});
|
|
37
|
+
if (!part)
|
|
38
|
+
return;
|
|
39
|
+
if (part.upload_percent >= percent)
|
|
40
|
+
return;
|
|
41
|
+
if (part.upload_status == types_1.UploadStatus.ERROR)
|
|
42
|
+
return;
|
|
43
|
+
if (part.upload_status == types_1.UploadStatus.SUCCESS)
|
|
44
|
+
return;
|
|
45
|
+
await this.partRepository.update(part_id, {
|
|
46
|
+
upload_percent: percent,
|
|
47
|
+
});
|
|
48
|
+
const parts = await this.partRepository.findBy({
|
|
49
|
+
file_id,
|
|
50
|
+
host_type: this.hostType,
|
|
51
|
+
});
|
|
52
|
+
let total = 0;
|
|
53
|
+
let uploaded = 0;
|
|
54
|
+
for (const part of parts) {
|
|
55
|
+
total += part.info.size;
|
|
56
|
+
uploaded += part.info.size * part.upload_percent;
|
|
57
|
+
}
|
|
58
|
+
const global_percent = uploaded / total;
|
|
59
|
+
const file = await this.fileRepository.findOneBy({ id: file_id });
|
|
60
|
+
const step = types_1.UploadProcessStep[`upload_${this.hostType}`];
|
|
61
|
+
file.progress[step].percent = global_percent;
|
|
62
|
+
file.percent = (0, utils_1.getProgressPercent)(file.progress);
|
|
63
|
+
await this.fileRepository.update(file_id, {
|
|
64
|
+
progress: file.progress,
|
|
65
|
+
percent: file.percent,
|
|
66
|
+
});
|
|
67
|
+
await this.onProgress(file);
|
|
68
|
+
}
|
|
69
|
+
async onPartSuccess({ file_id, host_type, }) {
|
|
70
|
+
while (successMap.get(file_id)) {
|
|
71
|
+
await new Promise((resolve) => {
|
|
72
|
+
setTimeout(resolve, 100);
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
try {
|
|
76
|
+
successMap.set(file_id, true);
|
|
77
|
+
const file = await this.fileRepository.findOneBy({
|
|
78
|
+
id: file_id,
|
|
79
|
+
});
|
|
80
|
+
if (!file)
|
|
81
|
+
return;
|
|
82
|
+
const status = `${host_type}_upload_status`;
|
|
83
|
+
if (file[status] == types_1.UploadStatus.ERROR) {
|
|
84
|
+
return;
|
|
85
|
+
}
|
|
86
|
+
if (file.upload_status == types_1.UploadStatus.ERROR)
|
|
87
|
+
return;
|
|
88
|
+
const exist = await this.partRepository.findOneBy({
|
|
89
|
+
file_id: file.id,
|
|
90
|
+
host_type,
|
|
91
|
+
upload_status: (0, typeorm_1.Not)(types_1.UploadStatus.SUCCESS),
|
|
92
|
+
});
|
|
93
|
+
if (exist)
|
|
94
|
+
return;
|
|
95
|
+
const parts = await this.partRepository.findBy({
|
|
96
|
+
file_id: file.id,
|
|
97
|
+
host_type,
|
|
98
|
+
});
|
|
99
|
+
const result = {};
|
|
100
|
+
if (host_type == types_1.HostType.BAIDU) {
|
|
101
|
+
for (const part of parts) {
|
|
102
|
+
result[part.info.file] = {
|
|
103
|
+
...part.upload_result.baidu,
|
|
104
|
+
md4: part.info.md4,
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
else if (host_type == types_1.HostType.ALI) {
|
|
109
|
+
for (const part of parts) {
|
|
110
|
+
result[part.info.file] = {
|
|
111
|
+
...part.upload_result.ali,
|
|
112
|
+
sha1: part.info.sha1,
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
const result_field = `${host_type}_upload_result`;
|
|
117
|
+
const current_file = await this.fileRepository.findOneBy({
|
|
118
|
+
id: file.id,
|
|
119
|
+
});
|
|
120
|
+
const step = types_1.UploadProcessStep[`upload_${this.hostType}`];
|
|
121
|
+
current_file.progress[step].percent = 1;
|
|
122
|
+
current_file.percent = (0, utils_1.getProgressPercent)(current_file.progress);
|
|
123
|
+
await this.fileRepository.update(file.id, {
|
|
124
|
+
[status]: types_1.UploadStatus.SUCCESS,
|
|
125
|
+
[result_field]: result,
|
|
126
|
+
progress: current_file.progress,
|
|
127
|
+
percent: current_file.percent,
|
|
128
|
+
});
|
|
129
|
+
const latest_file = await this.fileRepository.findOneBy({ id: file.id });
|
|
130
|
+
const allHostUploaded = await this.checkAllHostsUploaded(latest_file);
|
|
131
|
+
if (!allHostUploaded)
|
|
132
|
+
return;
|
|
133
|
+
await this.onFileSuccess(latest_file);
|
|
134
|
+
}
|
|
135
|
+
finally {
|
|
136
|
+
successMap.delete(file_id);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
async onFileSuccess(file) {
|
|
140
|
+
await (0, complete_1.complete)({
|
|
141
|
+
file_id: file.id,
|
|
142
|
+
fileRepository: this.fileRepository,
|
|
143
|
+
sdk_domain: this.baseParams.sdk_domain,
|
|
144
|
+
});
|
|
145
|
+
const latest_file = await this.fileRepository.findOneBy({
|
|
146
|
+
id: file.id,
|
|
147
|
+
});
|
|
148
|
+
await this.onSuccess(latest_file);
|
|
149
|
+
await (0, trasform_1.transform)({
|
|
150
|
+
file_id: file.id,
|
|
151
|
+
fileRepository: this.fileRepository,
|
|
152
|
+
partRepository: this.partRepository,
|
|
153
|
+
successRepository: this.successRepository,
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
async onPartError({ error, part_id, file_id, }) {
|
|
157
|
+
await this.partRepository.update(part_id, {
|
|
158
|
+
upload_status: types_1.UploadStatus.ERROR,
|
|
159
|
+
});
|
|
160
|
+
const status = `${this.hostType}_upload_status`;
|
|
161
|
+
const file = await this.fileRepository.findOneBy({ id: file_id });
|
|
162
|
+
if (file[status] == types_1.UploadStatus.ERROR)
|
|
163
|
+
return;
|
|
164
|
+
await this.fileRepository.update(file.id, {
|
|
165
|
+
[status]: types_1.UploadStatus.ERROR,
|
|
166
|
+
upload_status: types_1.UploadStatus.ERROR,
|
|
167
|
+
});
|
|
168
|
+
await this.onError(error, file);
|
|
169
|
+
}
|
|
170
|
+
constructor(baseParams, host_type) {
|
|
171
|
+
this.hostType = host_type;
|
|
172
|
+
this.baseParams = baseParams;
|
|
173
|
+
const { dataSource } = baseParams;
|
|
174
|
+
this.fileRepository = dataSource.getRepository(entities_1.UploadFile);
|
|
175
|
+
this.successRepository = dataSource.getRepository(entities_1.UploadSuccess);
|
|
176
|
+
this.partRepository = dataSource.getRepository(entities_1.UploadPart);
|
|
177
|
+
this.thread_count = this.getValidThreads(baseParams.threads ?? this.thread_count);
|
|
178
|
+
if (baseParams.onProgress) {
|
|
179
|
+
this.onProgress = baseParams.onProgress.bind(this);
|
|
180
|
+
}
|
|
181
|
+
if (baseParams.onSuccess) {
|
|
182
|
+
this.onSuccess = baseParams.onSuccess.bind(this);
|
|
183
|
+
}
|
|
184
|
+
if (baseParams.onError) {
|
|
185
|
+
this.onError = baseParams.onError.bind(this);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
async setThreads(threads) {
|
|
189
|
+
const validThreads = this.getValidThreads(threads);
|
|
190
|
+
this.thread_count = validThreads;
|
|
191
|
+
await this.run();
|
|
192
|
+
}
|
|
193
|
+
async start() {
|
|
194
|
+
await this.run();
|
|
195
|
+
}
|
|
196
|
+
async stopFiles(ids) {
|
|
197
|
+
const threads = this.threads.filter((thread) => ids.includes(thread.file_id));
|
|
198
|
+
await Promise.all(threads.map((thread) => thread.stop()));
|
|
199
|
+
await this.start();
|
|
200
|
+
}
|
|
201
|
+
async deleteFiles(ids) {
|
|
202
|
+
const threads = this.threads.filter((thread) => ids.includes(thread.file_id));
|
|
203
|
+
await Promise.all(threads.map((thread) => thread.stop()));
|
|
204
|
+
}
|
|
205
|
+
async stopAll() {
|
|
206
|
+
await Promise.all(this.threads.map((thread) => thread.stop()));
|
|
207
|
+
}
|
|
208
|
+
async dequeueOneFile() {
|
|
209
|
+
while (isDequeueing) {
|
|
210
|
+
await new Promise((resolve) => {
|
|
211
|
+
setTimeout(resolve, 50);
|
|
212
|
+
});
|
|
213
|
+
}
|
|
214
|
+
try {
|
|
215
|
+
isDequeueing = true;
|
|
216
|
+
const status_field = `${this.hostType}_upload_status`;
|
|
217
|
+
const host_id_field = `${this.hostType}_host_id`;
|
|
218
|
+
const file = await this.fileRepository.findOne({
|
|
219
|
+
where: {
|
|
220
|
+
uid: this.baseParams.uid,
|
|
221
|
+
is_paused: false,
|
|
222
|
+
encode_status: types_1.EncodeStatus.SUCCESS,
|
|
223
|
+
[status_field]: (0, typeorm_1.IsNull)(),
|
|
224
|
+
upload_status: (0, typeorm_1.In)([types_1.UploadStatus.NULL, types_1.UploadStatus.PROCESS]),
|
|
225
|
+
},
|
|
226
|
+
order: {
|
|
227
|
+
created_at: 'ASC',
|
|
228
|
+
},
|
|
229
|
+
});
|
|
230
|
+
if (!file)
|
|
231
|
+
return;
|
|
232
|
+
await this.fileRepository.update(file.id, {
|
|
233
|
+
[status_field]: types_1.UploadStatus.PROCESS,
|
|
234
|
+
upload_status: types_1.UploadStatus.PROCESS,
|
|
235
|
+
});
|
|
236
|
+
await (0, prepare_1.prepare)({
|
|
237
|
+
file_id: file.id,
|
|
238
|
+
sdk_domain: this.baseParams.sdk_domain,
|
|
239
|
+
fileRepository: this.fileRepository,
|
|
240
|
+
});
|
|
241
|
+
const { encoded } = file;
|
|
242
|
+
const { affix: affix_data, source: source_data, txt: txt_data, img: img_data, media: media_data, } = encoded;
|
|
243
|
+
const parts = [
|
|
244
|
+
...(source_data?.parts ?? []),
|
|
245
|
+
...(txt_data?.parts ?? []),
|
|
246
|
+
...(img_data?.parts ?? []),
|
|
247
|
+
...(media_data?.parts ?? []),
|
|
248
|
+
];
|
|
249
|
+
affix_data?.forEach((item) => {
|
|
250
|
+
item.parts?.forEach((part) => {
|
|
251
|
+
parts.push(part);
|
|
252
|
+
});
|
|
253
|
+
});
|
|
254
|
+
const host_id = file[host_id_field];
|
|
255
|
+
const lists = [];
|
|
256
|
+
const { length } = parts;
|
|
257
|
+
for (let i = 0; i < length; i++) {
|
|
258
|
+
const item = parts[i];
|
|
259
|
+
const exist = await this.partRepository.findOneBy({
|
|
260
|
+
file_id: file.id,
|
|
261
|
+
part_name: item.file,
|
|
262
|
+
host_id,
|
|
263
|
+
});
|
|
264
|
+
if (!exist) {
|
|
265
|
+
lists.push({
|
|
266
|
+
uid: this.baseParams.uid,
|
|
267
|
+
file_id: file.id,
|
|
268
|
+
part_name: item.file,
|
|
269
|
+
info: item,
|
|
270
|
+
output_root: file.output_root,
|
|
271
|
+
host_id,
|
|
272
|
+
host_type: this.hostType,
|
|
273
|
+
});
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
await this.partRepository.save(lists);
|
|
277
|
+
return;
|
|
278
|
+
}
|
|
279
|
+
finally {
|
|
280
|
+
isDequeueing = false;
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
async checkProcessingFile(file) {
|
|
284
|
+
if (!file)
|
|
285
|
+
return;
|
|
286
|
+
if (file.baidu_host_id &&
|
|
287
|
+
file.baidu_upload_status == types_1.UploadStatus.PROCESS) {
|
|
288
|
+
const exist = await this.partRepository.findOneBy({
|
|
289
|
+
file_id: file.id,
|
|
290
|
+
host_type: types_1.HostType.BAIDU,
|
|
291
|
+
upload_status: (0, typeorm_1.Not)(types_1.UploadStatus.SUCCESS),
|
|
292
|
+
});
|
|
293
|
+
if (!exist) {
|
|
294
|
+
await this.onPartSuccess({
|
|
295
|
+
file_id: file.id,
|
|
296
|
+
host_type: types_1.HostType.BAIDU,
|
|
297
|
+
});
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
if (file.ali_host_id && file.ali_upload_status == types_1.UploadStatus.PROCESS) {
|
|
301
|
+
const exist = await this.partRepository.findOneBy({
|
|
302
|
+
file_id: file.id,
|
|
303
|
+
host_type: types_1.HostType.ALI,
|
|
304
|
+
upload_status: (0, typeorm_1.Not)(types_1.UploadStatus.SUCCESS),
|
|
305
|
+
});
|
|
306
|
+
if (!exist) {
|
|
307
|
+
await this.onPartSuccess({
|
|
308
|
+
file_id: file.id,
|
|
309
|
+
host_type: types_1.HostType.ALI,
|
|
310
|
+
});
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
async run() {
|
|
315
|
+
const uid = this.baseParams.uid;
|
|
316
|
+
while (runningMap.get(uid)) {
|
|
317
|
+
await new Promise((resolve) => {
|
|
318
|
+
setTimeout(resolve, 100);
|
|
319
|
+
});
|
|
320
|
+
}
|
|
321
|
+
runningMap.set(uid, true);
|
|
322
|
+
const time1 = Date.now();
|
|
323
|
+
const files = await this.fileRepository.findBy({
|
|
324
|
+
uid: this.baseParams.uid,
|
|
325
|
+
is_paused: false,
|
|
326
|
+
upload_status: types_1.UploadStatus.PROCESS,
|
|
327
|
+
});
|
|
328
|
+
if (files.length) {
|
|
329
|
+
const thread_file_ids = this.threads.map((thread) => thread.file_id);
|
|
330
|
+
const exception_files = files.filter((file) => {
|
|
331
|
+
return !thread_file_ids.includes(file.id);
|
|
332
|
+
});
|
|
333
|
+
for (const file of exception_files) {
|
|
334
|
+
await this.checkProcessingFile(file);
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
console.log('exception time1: ', Date.now() - time1);
|
|
338
|
+
const time2 = Date.now();
|
|
339
|
+
const parts = await this.partRepository.findBy({
|
|
340
|
+
uid: this.baseParams.uid,
|
|
341
|
+
is_paused: false,
|
|
342
|
+
upload_status: types_1.UploadStatus.PROCESS,
|
|
343
|
+
host_id: this.baseParams.host_id,
|
|
344
|
+
});
|
|
345
|
+
if (parts.length) {
|
|
346
|
+
const thread_part_ids = this.threads.map((thread) => thread.part_id);
|
|
347
|
+
const exception_parts = parts.filter((part) => {
|
|
348
|
+
return !thread_part_ids.includes(part.id);
|
|
349
|
+
});
|
|
350
|
+
if (exception_parts.length) {
|
|
351
|
+
const ids = exception_parts.map((part) => part.id);
|
|
352
|
+
console.log('exception ids: ', ids);
|
|
353
|
+
await this.partRepository.update({
|
|
354
|
+
id: (0, typeorm_1.In)(ids),
|
|
355
|
+
upload_status: types_1.UploadStatus.PROCESS,
|
|
356
|
+
}, {
|
|
357
|
+
upload_status: types_1.UploadStatus.NULL,
|
|
358
|
+
});
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
console.log('exception time2: ', Date.now() - time2);
|
|
362
|
+
const real_count = this.threads.length;
|
|
363
|
+
const def_count = this.thread_count;
|
|
364
|
+
if (real_count < def_count) {
|
|
365
|
+
const less = def_count - real_count;
|
|
366
|
+
const getOnePart = async () => {
|
|
367
|
+
const part = await this.partRepository.findOne({
|
|
368
|
+
where: {
|
|
369
|
+
uid: this.baseParams.uid,
|
|
370
|
+
is_paused: false,
|
|
371
|
+
upload_status: types_1.UploadStatus.NULL,
|
|
372
|
+
host_id: this.baseParams.host_id,
|
|
373
|
+
},
|
|
374
|
+
order: {
|
|
375
|
+
created_at: 'ASC',
|
|
376
|
+
},
|
|
377
|
+
});
|
|
378
|
+
return part;
|
|
379
|
+
};
|
|
380
|
+
for (let i = 0; i < less; i++) {
|
|
381
|
+
const part = await getOnePart();
|
|
382
|
+
if (!part)
|
|
383
|
+
break;
|
|
384
|
+
const thread = this.getThread(part);
|
|
385
|
+
this.threads.push(thread);
|
|
386
|
+
await this.partRepository.update(part.id, {
|
|
387
|
+
upload_status: types_1.UploadStatus.PROCESS,
|
|
388
|
+
});
|
|
389
|
+
thread.start();
|
|
390
|
+
}
|
|
391
|
+
if (this.threads.length < def_count) {
|
|
392
|
+
await this.dequeueOneFile();
|
|
393
|
+
const exist_part = await getOnePart();
|
|
394
|
+
if (exist_part) {
|
|
395
|
+
runningMap.delete(uid);
|
|
396
|
+
await this.run();
|
|
397
|
+
}
|
|
398
|
+
else {
|
|
399
|
+
runningMap.delete(uid);
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
else {
|
|
403
|
+
runningMap.delete(uid);
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
else {
|
|
407
|
+
const more = real_count - def_count;
|
|
408
|
+
const threads = this.threads.slice(0, more);
|
|
409
|
+
for (const thread of threads) {
|
|
410
|
+
await thread.stop();
|
|
411
|
+
}
|
|
412
|
+
runningMap.delete(uid);
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
async checkAllHostsUploaded(file) {
|
|
416
|
+
if (file.baidu_host_id &&
|
|
417
|
+
file.baidu_upload_status != types_1.UploadStatus.SUCCESS) {
|
|
418
|
+
return false;
|
|
419
|
+
}
|
|
420
|
+
if (file.ali_host_id && file.ali_upload_status != types_1.UploadStatus.SUCCESS) {
|
|
421
|
+
return false;
|
|
422
|
+
}
|
|
423
|
+
return true;
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
exports.BaseUploader = BaseUploader;
|
package/dist/main.js
CHANGED
|
@@ -1 +1,45 @@
|
|
|
1
|
-
"use strict";
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
exports.getUploader = void 0;
|
|
18
|
+
__exportStar(require("./host-uploader/baidu"), exports);
|
|
19
|
+
__exportStar(require("./host-uploader/ali"), exports);
|
|
20
|
+
const uploader_1 = require("./uploader");
|
|
21
|
+
const instanceMap = new Map();
|
|
22
|
+
const getUploader = async (params) => {
|
|
23
|
+
while (instanceMap.get(params.uid)?.initing) {
|
|
24
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
25
|
+
}
|
|
26
|
+
try {
|
|
27
|
+
if (instanceMap.get(params.uid)?.uploader) {
|
|
28
|
+
return instanceMap.get(params.uid).uploader;
|
|
29
|
+
}
|
|
30
|
+
const cache = {
|
|
31
|
+
uploader: new uploader_1.Uploader(params),
|
|
32
|
+
initing: true,
|
|
33
|
+
};
|
|
34
|
+
instanceMap.set(params.uid, cache);
|
|
35
|
+
await cache.uploader.init();
|
|
36
|
+
cache.initing = false;
|
|
37
|
+
return cache.uploader;
|
|
38
|
+
}
|
|
39
|
+
finally {
|
|
40
|
+
if (instanceMap.get(params.uid)) {
|
|
41
|
+
instanceMap.get(params.uid).initing = false;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
};
|
|
45
|
+
exports.getUploader = getUploader;
|
package/dist/types/main.js
CHANGED
|
@@ -1 +1,2 @@
|
|
|
1
|
-
"use strict";
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
package/dist/uploader.js
CHANGED
|
@@ -1 +1,102 @@
|
|
|
1
|
-
"use strict";
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Uploader = void 0;
|
|
4
|
+
const types_1 = require("@soga/types");
|
|
5
|
+
const entities_1 = require("@soga/entities");
|
|
6
|
+
const baidu_1 = require("./host-uploader/baidu");
|
|
7
|
+
const ali_1 = require("./host-uploader/ali");
|
|
8
|
+
class Uploader {
|
|
9
|
+
params;
|
|
10
|
+
uid;
|
|
11
|
+
dataSource;
|
|
12
|
+
fileRepository;
|
|
13
|
+
partRepository;
|
|
14
|
+
constructor(params) {
|
|
15
|
+
this.params = params;
|
|
16
|
+
this.uid = params.uid;
|
|
17
|
+
this.dataSource = params.dataSource;
|
|
18
|
+
this.fileRepository = this.dataSource.getRepository(entities_1.UploadFile);
|
|
19
|
+
this.partRepository = this.dataSource.getRepository(entities_1.UploadPart);
|
|
20
|
+
}
|
|
21
|
+
processorMap = new Map();
|
|
22
|
+
async init() {
|
|
23
|
+
}
|
|
24
|
+
async start() {
|
|
25
|
+
await Promise.all([this.startAli(), this.startBaidu()]);
|
|
26
|
+
}
|
|
27
|
+
async stopAll() {
|
|
28
|
+
const processor_iterator = this.processorMap.values();
|
|
29
|
+
for (const processor of processor_iterator) {
|
|
30
|
+
await processor.stopAll();
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
async stopFiles(ids) {
|
|
34
|
+
const processor_iterator = this.processorMap.values();
|
|
35
|
+
for (const processor of processor_iterator) {
|
|
36
|
+
await processor.stopFiles(ids);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
async deleteFiles(ids) {
|
|
40
|
+
const processor_iterator = this.processorMap.values();
|
|
41
|
+
for (const processor of processor_iterator) {
|
|
42
|
+
await processor.deleteFiles(ids);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
async startHost(host_id, host_type) {
|
|
46
|
+
if (this.processorMap.has(host_id)) {
|
|
47
|
+
const processor = this.processorMap.get(host_id);
|
|
48
|
+
await processor.start();
|
|
49
|
+
return;
|
|
50
|
+
}
|
|
51
|
+
const params = {
|
|
52
|
+
uid: this.uid,
|
|
53
|
+
sdk_domain: this.params.sdk_domain,
|
|
54
|
+
dataSource: this.dataSource,
|
|
55
|
+
host_id,
|
|
56
|
+
onProgress: this.params.onProgress,
|
|
57
|
+
onSuccess: this.params.onSuccess,
|
|
58
|
+
onError: this.params.onError,
|
|
59
|
+
};
|
|
60
|
+
if (host_type == types_1.HostType.BAIDU) {
|
|
61
|
+
const baiduUploader = await (0, baidu_1.getBaiduUploader)(params);
|
|
62
|
+
this.processorMap.set(host_id, baiduUploader);
|
|
63
|
+
await baiduUploader.start();
|
|
64
|
+
}
|
|
65
|
+
else if (host_type == types_1.HostType.ALI) {
|
|
66
|
+
const aliUploader = await (0, ali_1.getAliUploader)(params);
|
|
67
|
+
this.processorMap.set(host_id, aliUploader);
|
|
68
|
+
await aliUploader.start();
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
async startAli() {
|
|
72
|
+
const ali_hosts = await this.fileRepository
|
|
73
|
+
.createQueryBuilder('file')
|
|
74
|
+
.select('DISTINCT file.ali_host_id')
|
|
75
|
+
.where('file.uid = :uid', { uid: this.uid })
|
|
76
|
+
.getRawMany();
|
|
77
|
+
const ali_host_ids = ali_hosts
|
|
78
|
+
.map((file) => file.ali_host_id)
|
|
79
|
+
.filter((id) => !!id);
|
|
80
|
+
if (ali_host_ids.length) {
|
|
81
|
+
for (const ali_host_id of ali_host_ids) {
|
|
82
|
+
await this.startHost(ali_host_id, types_1.HostType.ALI);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
async startBaidu() {
|
|
87
|
+
const baidu_hosts = await this.fileRepository
|
|
88
|
+
.createQueryBuilder('file')
|
|
89
|
+
.select('DISTINCT file.baidu_host_id')
|
|
90
|
+
.where('file.uid = :uid', { uid: this.uid })
|
|
91
|
+
.getRawMany();
|
|
92
|
+
const baidu_host_ids = baidu_hosts
|
|
93
|
+
.map((file) => file.baidu_host_id)
|
|
94
|
+
.filter((id) => !!id);
|
|
95
|
+
if (baidu_host_ids.length) {
|
|
96
|
+
for (const baidu_host_id of baidu_host_ids) {
|
|
97
|
+
await this.startHost(baidu_host_id, types_1.HostType.BAIDU);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
exports.Uploader = Uploader;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@soga/uploader",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.47",
|
|
4
4
|
"publishConfig": {
|
|
5
5
|
"access": "public"
|
|
6
6
|
},
|
|
@@ -55,5 +55,5 @@
|
|
|
55
55
|
"@soga/utils": "^0.2.33",
|
|
56
56
|
"piscina": "^4.9.2"
|
|
57
57
|
},
|
|
58
|
-
"gitHead": "
|
|
58
|
+
"gitHead": "4e92f73b09132af1a2c2a3760bc5998ba2538ce9"
|
|
59
59
|
}
|