@soga/uploader 0.0.2 → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/main.d.ts +4 -5
- package/dist/main.js +1 -1
- package/dist/prepare/prepare.d.ts +15 -0
- package/dist/prepare/prepare.js +1 -0
- package/dist/processor/ali.d.ts +11 -0
- package/dist/processor/ali.js +1 -0
- package/dist/processor/baidu.d.ts +11 -0
- package/dist/processor/baidu.js +1 -0
- package/dist/processor/base.d.ts +33 -0
- package/dist/processor/base.js +1 -0
- package/dist/types/main.d.ts +55 -16
- package/dist/uploader/uploader.d.ts +26 -0
- package/dist/uploader/uploader.js +1 -0
- package/package.json +59 -57
- package/dist/types/runtime.d.ts +0 -42
- package/dist/types/runtime.js +0 -1
- package/dist/uploader/ali.d.ts +0 -57
- package/dist/uploader/ali.js +0 -1
- package/dist/uploader/baidu.d.ts +0 -45
- package/dist/uploader/baidu.js +0 -1
- package/dist/uploader/base.d.ts +0 -82
- package/dist/uploader/base.js +0 -1
- package/dist/utils/chunk.d.ts +0 -3
- package/dist/utils/chunk.js +0 -1
package/README.md
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
# uploader
|
|
1
|
+
# uploader
|
package/dist/main.d.ts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import {
|
|
3
|
-
|
|
4
|
-
export declare const
|
|
5
|
-
export declare const getAliUploader: (params: AliUploaderParams) => Promise<AliUploader>;
|
|
1
|
+
import { UploaderParams } from './types/main';
|
|
2
|
+
import { Uploader } from './uploader/uploader';
|
|
3
|
+
export declare const getUploader: (params: UploaderParams) => Promise<Uploader>;
|
|
4
|
+
export declare const getUploaderByUid: (uid: number) => Promise<Uploader>;
|
package/dist/main.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.getUploaderByUid=exports.getUploader=void 0;const uploader_1=require("./uploader/uploader"),instanceMap=new Map,getUploader=async e=>{for(;instanceMap.get(e.uid)?.initing;)await new Promise((e=>setTimeout(e,100)));if(instanceMap.get(e.uid)?.uploader)return instanceMap.get(e.uid).uploader;const t={uploader:new uploader_1.Uploader(e),initing:!0};return instanceMap.set(e.uid,t),await t.uploader.init(),t.initing=!1,instanceMap.set(e.uid,t),t.uploader};exports.getUploader=getUploader;const getUploaderByUid=async e=>{if(instanceMap.has(e)){for(;instanceMap.get(e)?.initing;)await new Promise((e=>setTimeout(e,100)));return instanceMap.get(e)?.uploader}return null};exports.getUploaderByUid=getUploaderByUid;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { UploadFile } from '@soga/entities';
|
|
2
|
+
import { RecordDetail, RecordFtype, RecordType } from '@soga/types';
|
|
3
|
+
import { Repository } from 'typeorm';
|
|
4
|
+
export declare function prepare({ file_id, fileRepository, createCloudRecord, }: {
|
|
5
|
+
file_id: number;
|
|
6
|
+
fileRepository: Repository<UploadFile>;
|
|
7
|
+
createCloudRecord: ({ uid, space_id, name, parent_id, type, ftype, }: Partial<{
|
|
8
|
+
uid: number;
|
|
9
|
+
space_id: number;
|
|
10
|
+
name: string;
|
|
11
|
+
parent_id: number;
|
|
12
|
+
type: RecordType;
|
|
13
|
+
ftype: RecordFtype;
|
|
14
|
+
}>) => Promise<RecordDetail>;
|
|
15
|
+
}): Promise<void>;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.prepare=prepare;const types_1=require("@soga/types");async function prepare({file_id:i,fileRepository:a,createCloudRecord:d}){await async function(){const d=await a.findOneBy({id:i});if(!d)return;const e=await t(d.pid,[]);for(const i of e)await n(i);return e}();const e=await a.findOneBy({id:i});async function t(i,d=[]){if(0===i)return d;const e=await a.findOneBy({id:i});return e?e.cloud_info?.id?d:(d.unshift(e),0!==e.pid?await t(e.pid,d):d):d}async function n(i){if(i.cloud_info?.id)return;const{filename:e,uid:t,space_id:n,task_record_id:o}=i;let s=o;if(0!==i.pid){s=(await a.findOneBy({id:i.pid})).cloud_info.id}const p=await d({uid:t,space_id:n,name:e,parent_id:s,type:i.type,ftype:i.type==types_1.RecordType.FOLDER?types_1.RecordFtype.NONE:i.ftype}),{id:r,cloud_info:u}=p,f={id:r,hosts:{}};if(u.ali&&(f.hosts.ali={id:u.ali.id,name:u.ali.name,drive_id:u.ali.drive_id,file_id:u.ali.file_id}),u.baidu&&(f.hosts.baidu={id:u.baidu.id,name:u.baidu.name,fs_id:u.baidu.fs_id,path:u.baidu.path},!f.hosts.baidu.path)){const d=await a.findOneBy({id:i.pid}),{path:e}=d.cloud_info.hosts.baidu;e&&(f.hosts.baidu.path=`${e}/${u.baidu.name}`)}await a.update({id:i.id},{cloud_info:f})}e&&await n(e)}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { AliProcessorParams, GetAliProcessorParams, ThreadType } from '../types/main';
|
|
2
|
+
import { BaseProcessor } from './base';
|
|
3
|
+
import { UploadPart } from '@soga/entities';
|
|
4
|
+
import { HostType } from '@soga/types';
|
|
5
|
+
export declare class AliProcessor extends BaseProcessor {
|
|
6
|
+
protected hostType: HostType;
|
|
7
|
+
private params;
|
|
8
|
+
constructor(params: AliProcessorParams);
|
|
9
|
+
getThread(part: UploadPart): ThreadType;
|
|
10
|
+
}
|
|
11
|
+
export declare const getAliProcessor: (params: GetAliProcessorParams) => Promise<AliProcessor>;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.getAliProcessor=exports.AliProcessor=void 0;const single_uploader_1=require("@soga/single-uploader"),base_1=require("./base"),types_1=require("@soga/types"),processors=new Map;class AliProcessor extends base_1.BaseProcessor{hostType=types_1.HostType.ALI;params;constructor(s){super(s),this.params=s}getThread(s){let i,e=null;return{file_id:s.file_id,part_id:s.id,uid:s.uid,start:async()=>{try{const{filepath:t,output_root:r,info:o}=s,a=await this.fileRepository.findOneBy({id:s.file_id}),d={file_id:s.file_id,filePath:t,part:o,uid:s.uid,part_id:s.id,outputRoot:r,drive_id:a.cloud_info.hosts.ali.drive_id,cloud_parent_id:a.cloud_info.hosts.ali.file_id,getAccessToken:async()=>await this.params.getAccessToken(a.ali_host_id),onProgress:async s=>{await this.onPartProgress(s)}};i=(0,single_uploader_1.getAliUploader)(d),e=await i.start(),e&&(await this.partRepository.update(s.id,{ali_result:e,ali_status:types_1.UploadStatus.SUCCESS}),await this.onPartSuccess(await this.fileRepository.findOneBy({id:s.file_id})))}catch(i){await this.onPartError(i,await this.fileRepository.findOneBy({id:s.file_id}))}finally{e&&(this.threads=this.threads.filter((i=>i.part_id!==s.id))),await this.start()}},stop:async()=>{i&&(i.stop(),this.threads=this.threads.filter((i=>i.part_id!==s.id)))}}}}exports.AliProcessor=AliProcessor;const getAliProcessor=async s=>{const{uid:i,unique:e}=s,t=`${i}_${e}`;let r=processors.get(t);if(r)for(;r.is_initing;)await new Promise((s=>setTimeout(s,20)));else r=new AliProcessor(s),r.setThreads(3),processors.set(t,r),await r.init();return r};exports.getAliProcessor=getAliProcessor;
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { BaiduProcessorParams, GetBaiduProcessorParams, ThreadType } from '../types/main';
|
|
2
|
+
import { BaseProcessor } from './base';
|
|
3
|
+
import { UploadPart } from '@soga/entities';
|
|
4
|
+
import { HostType } from '@soga/types';
|
|
5
|
+
export declare class BaiduProcessor extends BaseProcessor {
|
|
6
|
+
protected hostType: HostType;
|
|
7
|
+
private params;
|
|
8
|
+
constructor(params: BaiduProcessorParams);
|
|
9
|
+
getThread(part: UploadPart): ThreadType;
|
|
10
|
+
}
|
|
11
|
+
export declare const getBaiduProcessor: (params: GetBaiduProcessorParams) => Promise<BaiduProcessor>;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.getBaiduProcessor=exports.BaiduProcessor=void 0;const single_uploader_1=require("@soga/single-uploader"),base_1=require("./base"),types_1=require("@soga/types"),processors=new Map;class BaiduProcessor extends base_1.BaseProcessor{hostType=types_1.HostType.BAIDU;params;constructor(s){super(s),this.params=s}getThread(s){let t,e=null;return{file_id:s.file_id,part_id:s.id,uid:s.uid,start:async()=>{try{const{filepath:i,output_root:r,info:a}=s,o=await this.fileRepository.findOneBy({id:s.file_id}),d={file_id:s.file_id,filePath:i,part:a,uid:s.uid,part_id:s.id,outputRoot:r,cloudRoot:o.cloud_info.hosts.baidu.path,getAccessToken:async()=>await this.params.getAccessToken(o.baidu_host_id),onProgress:async s=>{await this.onPartProgress(s)}};t=(0,single_uploader_1.getBaiduUploader)(d),e=await t.start(),e&&(await this.partRepository.update(s.id,{baidu_result:e,baidu_status:types_1.UploadStatus.SUCCESS}),await this.onPartSuccess(await this.fileRepository.findOneBy({id:s.file_id})))}catch(t){await this.onPartError(t,await this.fileRepository.findOneBy({id:s.file_id}))}finally{e&&(this.threads=this.threads.filter((t=>t.part_id!==s.id))),await this.start()}},stop:async()=>{t&&(t.stop(),this.threads=this.threads.filter((t=>t.part_id!==s.id)))}}}}exports.BaiduProcessor=BaiduProcessor;const getBaiduProcessor=async s=>{const{uid:t,unique:e}=s,i=`${t}_${e}`;let r=processors.get(i);if(r)for(;r.is_initing;)await new Promise((s=>setTimeout(s,20)));else r=new BaiduProcessor(s),r.setThreads(5),processors.set(i,r),await r.init();return r};exports.getBaiduProcessor=getBaiduProcessor;
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { Repository } from 'typeorm';
|
|
2
|
+
import { UploadFile, UploadPart } from '@soga/entities';
|
|
3
|
+
import { HostType } from '@soga/types';
|
|
4
|
+
import { ProcessorParams, ThreadType } from '../types/main';
|
|
5
|
+
import { PartUplodProgress } from '@soga/single-uploader';
|
|
6
|
+
export declare abstract class BaseProcessor {
|
|
7
|
+
protected hostType: HostType;
|
|
8
|
+
protected onProgress: (file: UploadFile) => Promise<void>;
|
|
9
|
+
protected onSuccess: (file: UploadFile) => Promise<void>;
|
|
10
|
+
protected onError: (err: Error, file: UploadFile) => Promise<void>;
|
|
11
|
+
protected fileRepository: Repository<UploadFile>;
|
|
12
|
+
protected partRepository: Repository<UploadPart>;
|
|
13
|
+
private isRunning;
|
|
14
|
+
private thread_count;
|
|
15
|
+
private maxThreads;
|
|
16
|
+
protected threads: ThreadType[];
|
|
17
|
+
baseParams: ProcessorParams;
|
|
18
|
+
abstract getThread(part: UploadPart): ThreadType;
|
|
19
|
+
protected getValidThreads(threads: number): number;
|
|
20
|
+
protected onPartProgress(params: PartUplodProgress): Promise<void>;
|
|
21
|
+
protected onPartSuccess(file: UploadFile): Promise<void>;
|
|
22
|
+
protected onPartError(err: Error, file: UploadFile): Promise<void>;
|
|
23
|
+
constructor(baseParams: ProcessorParams);
|
|
24
|
+
setThreads(threads: number): Promise<void>;
|
|
25
|
+
is_initing: boolean;
|
|
26
|
+
init(): Promise<void>;
|
|
27
|
+
start(): Promise<void>;
|
|
28
|
+
stopFiles(ids: number[]): Promise<void>;
|
|
29
|
+
stopAll(): Promise<void>;
|
|
30
|
+
repairFiles(ids: number[]): Promise<void>;
|
|
31
|
+
private run;
|
|
32
|
+
protected dequeueOneFile(): Promise<boolean>;
|
|
33
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.BaseProcessor=void 0;const typeorm_1=require("typeorm"),entities_1=require("@soga/entities"),types_1=require("@soga/types"),prepare_1=require("../prepare/prepare");let isDequeueing=!1;class BaseProcessor{hostType;onProgress=async()=>{};onSuccess=async()=>{};onError=async()=>{};fileRepository;partRepository;isRunning=!1;thread_count=3;maxThreads=10;threads=[];baseParams;getValidThreads(t){return Math.min(Math.max(t??0,0),this.maxThreads)}async onPartProgress(t){const{file_id:s,part_id:i,host_type:e,percent:a,type:o}=t,r=`${e}_percent`;await this.partRepository.update(i,{[r]:a});const p=await this.partRepository.findBy({file_id:s});let d=0,n=0;for(const t of p)d+=t.size,n+=t.size*t[r];const u=n/d,h=await this.fileRepository.findOneBy({id:s});h.progress[o].percent=u,await this.fileRepository.update(s,{progress:h.progress}),await this.onProgress(h)}async onPartSuccess(t){const{ali_host_id:s,baidu_host_id:i}=t;if(s){if((await this.partRepository.findBy({file_id:t.id,ali_status:(0,typeorm_1.Not)(types_1.UploadStatus.SUCCESS)})).length)return}if(i){if((await this.partRepository.findBy({file_id:t.id,baidu_status:(0,typeorm_1.Not)(types_1.UploadStatus.SUCCESS)})).length)return}const e={ali:{},baidu:{}},a=await this.partRepository.findBy({file_id:t.id});for(const t of a)t.ali_status===types_1.UploadStatus.SUCCESS&&(e.ali[t.info.file]=t.ali_result),t.baidu_status===types_1.UploadStatus.SUCCESS&&(e.baidu[t.info.file]=t.baidu_result);await this.fileRepository.update(t.id,{upload_status:types_1.UploadStatus.SUCCESS,upload_data:e}),await this.partRepository.delete({file_id:t.id}),await this.onSuccess(t)}async onPartError(t,s){s.upload_status!==types_1.UploadStatus.ERROR&&(await this.fileRepository.update(s.id,{upload_status:types_1.UploadStatus.ERROR}),await this.onError(t,s))}constructor(t){this.baseParams=t,this.fileRepository=t.dataSource.getRepository(entities_1.UploadFile),this.partRepository=t.dataSource.getRepository(entities_1.UploadPart),this.thread_count=this.getValidThreads(t.threads??this.thread_count),t.onProgress&&(this.onProgress=t.onProgress.bind(this)),t.onSuccess&&(this.onSuccess=t.onSuccess.bind(this)),t.onError&&(this.onError=t.onError.bind(this))}async setThreads(t){const s=this.getValidThreads(t);this.thread_count=s,await this.run()}is_initing=!1;async init(){this.is_initing=!0;const t=`${this.hostType}_status`;this.partRepository.update({uid:this.baseParams.uid,[t]:types_1.UploadStatus.PROCESS},{[t]:types_1.UploadStatus.NULL}),this.is_initing=!1}async start(){await this.run()}async stopFiles(t){await this.partRepository.update({file_id:(0,typeorm_1.In)(t),is_paused:!1},{is_paused:!0}),await this.fileRepository.update({id:(0,typeorm_1.In)(t),is_paused:!1},{is_paused:!0});const s=this.threads.filter((s=>t.includes(s.file_id)));await Promise.all(s.map((t=>t.stop()))),await this.start()}async stopAll(){await this.partRepository.update({uid:this.baseParams.uid},{is_paused:!0}),await this.fileRepository.update({uid:this.baseParams.uid,is_folder:!1},{is_paused:!0}),this.thread_count=0,await this.run()}async repairFiles(t){const s=`${this.hostType}_status`;await this.partRepository.update({file_id:(0,typeorm_1.In)(t),[s]:types_1.UploadStatus.ERROR},{[s]:types_1.UploadStatus.NULL}),await this.fileRepository.update({id:(0,typeorm_1.In)(t),encode_status:types_1.EncodeStatus.ERROR},{encode_status:types_1.EncodeStatus.NULL}),await this.run()}async run(){for(;this.isRunning;)await new Promise((t=>{setTimeout(t,200)}));this.isRunning=!0;const t=`${this.hostType}_status`,s=this.threads.length,i=this.thread_count;if(s<i){const e=i-s;for(let s=0;s<e;s++){const s=await this.partRepository.findOne({where:{uid:this.baseParams.uid,is_paused:!1,[t]:types_1.UploadStatus.NULL},order:{created_at:"ASC"}});if(!s)break;const i=this.getThread(s);this.threads.push(i),await this.partRepository.update(s.id,{[t]:types_1.UploadStatus.PROCESS}),i.start()}if(this.threads.length<i){const t=await this.dequeueOneFile();this.isRunning=!1,t&&await this.run()}}else if(s>i){const t=s-i,e=this.threads.slice(0,t);for(const t of e)await t.stop()}this.isRunning=!1}async dequeueOneFile(){for(;isDequeueing;)await new Promise((t=>{setTimeout(t,50)}));isDequeueing=!0;try{const t=await this.fileRepository.findOne({where:{uid:this.baseParams.uid,is_paused:!1,is_folder:!1,encode_status:types_1.EncodeStatus.SUCCESS,upload_status:types_1.UploadStatus.NULL},order:{created_at:"ASC"}});if(!t)return!1;await this.fileRepository.update(t.id,{upload_status:types_1.UploadStatus.PROCESS}),await(0,prepare_1.prepare)({file_id:t.id,fileRepository:this.fileRepository,createCloudRecord:this.baseParams.createCloudRecord});const{source_data:s,txt_data:i,img_data:e,media_data:a}=t,o=[...s.parts??[],...i.parts??[],...e.parts??[],...a.parts??[]].map((s=>({uid:this.baseParams.uid,file_id:t.id,info:s,filepath:t.path,output_root:t.output_root,size:s.size})));return await this.partRepository.save(o),!0}finally{isDequeueing=!1}}}exports.BaseProcessor=BaseProcessor;
|
package/dist/types/main.d.ts
CHANGED
|
@@ -1,17 +1,56 @@
|
|
|
1
|
-
import {
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
1
|
+
import { UploadFile } from '@soga/entities';
|
|
2
|
+
import { RecordDetail, RecordFtype, RecordType } from '@soga/types';
|
|
3
|
+
import { DataSource } from 'typeorm';
|
|
4
|
+
export interface UploaderParams {
|
|
5
|
+
uid: number;
|
|
6
|
+
dataSource: DataSource;
|
|
7
|
+
onProgress?: (file: UploadFile) => Promise<void>;
|
|
8
|
+
onSuccess?: (file: UploadFile) => Promise<void>;
|
|
9
|
+
onError?: (error: Error, file: UploadFile) => Promise<void>;
|
|
10
|
+
createCloudRecord: (params: Partial<{
|
|
11
|
+
uid: number;
|
|
12
|
+
space_id: number;
|
|
13
|
+
name: string;
|
|
14
|
+
parent_id: number;
|
|
15
|
+
type: RecordType;
|
|
16
|
+
ftype: RecordFtype;
|
|
17
|
+
}>) => Promise<RecordDetail>;
|
|
18
|
+
getBaiduAccessToken: (baidu_host_id: number) => Promise<string>;
|
|
19
|
+
getAliAccessToken: (ali_host_id: number) => Promise<string>;
|
|
20
|
+
}
|
|
21
|
+
export interface ProcessorParams {
|
|
22
|
+
uid: number;
|
|
23
|
+
dataSource: DataSource;
|
|
24
|
+
threads?: number;
|
|
25
|
+
debug?: boolean;
|
|
26
|
+
onProgress?: (file: UploadFile) => Promise<void>;
|
|
27
|
+
onSuccess?: (file: UploadFile) => Promise<void>;
|
|
28
|
+
onError?: (error: Error, file: UploadFile) => Promise<void>;
|
|
29
|
+
createCloudRecord: (params: Partial<{
|
|
30
|
+
uid: number;
|
|
31
|
+
space_id: number;
|
|
32
|
+
name: string;
|
|
33
|
+
parent_id: number;
|
|
34
|
+
type: RecordType;
|
|
35
|
+
ftype: RecordFtype;
|
|
36
|
+
}>) => Promise<RecordDetail>;
|
|
37
|
+
}
|
|
38
|
+
export interface BaiduProcessorParams extends ProcessorParams {
|
|
39
|
+
getAccessToken: (baidu_host_id: number) => Promise<string>;
|
|
40
|
+
}
|
|
41
|
+
export interface GetBaiduProcessorParams extends BaiduProcessorParams {
|
|
42
|
+
unique: string | number;
|
|
43
|
+
}
|
|
44
|
+
export interface AliProcessorParams extends ProcessorParams {
|
|
45
|
+
getAccessToken: (ali_host_id: number) => Promise<string>;
|
|
46
|
+
}
|
|
47
|
+
export interface GetAliProcessorParams extends AliProcessorParams {
|
|
48
|
+
unique: string | number;
|
|
49
|
+
}
|
|
50
|
+
export type ThreadType = {
|
|
51
|
+
file_id: number;
|
|
52
|
+
part_id: number;
|
|
53
|
+
uid: number;
|
|
54
|
+
start: () => Promise<void>;
|
|
55
|
+
stop: () => Promise<void>;
|
|
17
56
|
};
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { DataSource, Repository } from 'typeorm';
|
|
2
|
+
import { UploadFile, UploadPart } from '@soga/entities';
|
|
3
|
+
import { UploaderParams } from '../types/main';
|
|
4
|
+
import { BaseProcessor } from '../processor/base';
|
|
5
|
+
export declare class Uploader {
|
|
6
|
+
protected params: UploaderParams;
|
|
7
|
+
protected uid: number;
|
|
8
|
+
protected dataSource: DataSource;
|
|
9
|
+
protected fileRepository: Repository<UploadFile>;
|
|
10
|
+
protected partRepository: Repository<UploadPart>;
|
|
11
|
+
constructor(params: UploaderParams);
|
|
12
|
+
protected processorMap: Map<number, BaseProcessor>;
|
|
13
|
+
init(): Promise<void>;
|
|
14
|
+
start(): Promise<void>;
|
|
15
|
+
startHosts({ ali_host_id, baidu_host_id, }: {
|
|
16
|
+
ali_host_id?: number;
|
|
17
|
+
baidu_host_id?: number;
|
|
18
|
+
}): Promise<void>;
|
|
19
|
+
stopAll(): Promise<void>;
|
|
20
|
+
stopFiles(ids: number[]): Promise<void>;
|
|
21
|
+
unpauseFiles(ids: number[]): Promise<void>;
|
|
22
|
+
repairFiles(ids: number[]): Promise<void>;
|
|
23
|
+
private startHost;
|
|
24
|
+
private startAli;
|
|
25
|
+
private startBaidu;
|
|
26
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.Uploader=void 0;const types_1=require("@soga/types"),typeorm_1=require("typeorm"),entities_1=require("@soga/entities"),baidu_1=require("../processor/baidu"),ali_1=require("../processor/ali");class Uploader{params;uid;dataSource;fileRepository;partRepository;constructor(s){this.params=s,this.uid=s.uid,this.dataSource=s.dataSource,this.fileRepository=this.dataSource.getRepository(entities_1.UploadFile),this.partRepository=this.dataSource.getRepository(entities_1.UploadPart)}processorMap=new Map;async init(){}async start(){await Promise.all([this.startAli(),this.startBaidu()])}async startHosts({ali_host_id:s,baidu_host_id:t}){s&&await this.startHost(s,types_1.HostType.ALI),t&&await this.startHost(t,types_1.HostType.BAIDU)}async stopAll(){const s=this.processorMap.values();for(const t of s)await t.stopAll()}async stopFiles(s){const t=this.processorMap.values();for(const e of t)await e.stopFiles(s)}async unpauseFiles(s){await this.partRepository.update({file_id:(0,typeorm_1.In)(s),is_paused:!0},{is_paused:!1}),await this.fileRepository.update({id:(0,typeorm_1.In)(s),is_paused:!0},{is_paused:!1});const t=this.processorMap.values();for(const s of t)await s.start()}async repairFiles(s){const t=this.processorMap.values();for(const e of t)await e.repairFiles(s)}async startHost(s,t){const e={uid:this.uid,dataSource:this.dataSource,unique:s,createCloudRecord:this.params.createCloudRecord.bind(this),onProgress:this.params.onProgress?.bind(this),onSuccess:this.params.onSuccess?.bind(this),onError:this.params.onError?.bind(this)};if(t==types_1.HostType.BAIDU){const t=await(0,baidu_1.getBaiduProcessor)({...e,getAccessToken:this.params.getBaiduAccessToken.bind(this)});this.processorMap.has(s)||this.processorMap.set(s,t),await t.start()}else if(t==types_1.HostType.ALI){const t=await(0,ali_1.getAliProcessor)({...e,getAccessToken:this.params.getAliAccessToken.bind(this)});this.processorMap.has(s)||this.processorMap.set(s,t),await t.start()}}async startAli(){const s=(await this.fileRepository.createQueryBuilder("file").select("DISTINCT file.ali_host_id").getRawMany()).map((s=>s.ali_host_id));if(s.length)for(const t of s)await this.startHost(t,types_1.HostType.ALI)}async startBaidu(){const s=(await this.fileRepository.createQueryBuilder("file").select("DISTINCT file.baidu_host_id").getRawMany()).map((s=>s.baidu_host_id));if(s.length)for(const t of s)await this.startHost(t,types_1.HostType.BAIDU)}}exports.Uploader=Uploader;
|
package/package.json
CHANGED
|
@@ -1,57 +1,59 @@
|
|
|
1
|
-
{
|
|
2
|
-
"name": "@soga/uploader",
|
|
3
|
-
"version": "0.0.
|
|
4
|
-
"publishConfig": {
|
|
5
|
-
"access": "public"
|
|
6
|
-
},
|
|
7
|
-
"description": "",
|
|
8
|
-
"main": "dist/main.js",
|
|
9
|
-
"types": "dist/main.d.ts",
|
|
10
|
-
"files": [
|
|
11
|
-
"dist"
|
|
12
|
-
],
|
|
13
|
-
"scripts": {
|
|
14
|
-
"build": "rimraf dist && tsc && ts-node ./scripts/minify",
|
|
15
|
-
"minify": "ts-node ./scripts/minify",
|
|
16
|
-
"demo_backup": "ts-node ./demo/demo.ts",
|
|
17
|
-
"demo": "ts-node ./demo/demo.ts",
|
|
18
|
-
"worker": "tsc && ts-node ./demo/worker.ts",
|
|
19
|
-
"test": "jest",
|
|
20
|
-
"dev": "ts-node ./src/main.ts",
|
|
21
|
-
"lint": "eslint . --ext .ts",
|
|
22
|
-
"prepublishOnly": "npm run build"
|
|
23
|
-
},
|
|
24
|
-
"devDependencies": {
|
|
25
|
-
"@types/fs-extra": "^11.0.4",
|
|
26
|
-
"@types/glob": "^8.1.0",
|
|
27
|
-
"@types/jest": "^29.5.4",
|
|
28
|
-
"@types/node": "^20.8.7",
|
|
29
|
-
"@typescript-eslint/eslint-plugin": "^6.4.1",
|
|
30
|
-
"@typescript-eslint/parser": "^6.4.1",
|
|
31
|
-
"better-sqlite3": "^11.8.1",
|
|
32
|
-
"eslint": "^8.47.0",
|
|
33
|
-
"eslint-config-prettier": "^9.0.0",
|
|
34
|
-
"eslint-plugin-jest": "^27.2.3",
|
|
35
|
-
"eslint-plugin-prettier": "^5.0.0",
|
|
36
|
-
"glob": "^10.3.3",
|
|
37
|
-
"jest": "^29.6.3",
|
|
38
|
-
"prettier": "^3.0.2",
|
|
39
|
-
"terser": "^5.19.2",
|
|
40
|
-
"ts-jest": "^29.1.1",
|
|
41
|
-
"ts-node": "^10.9.1",
|
|
42
|
-
"typescript": "^5.1.6"
|
|
43
|
-
},
|
|
44
|
-
"keywords": [],
|
|
45
|
-
"author": "",
|
|
46
|
-
"license": "ISC",
|
|
47
|
-
"dependencies": {
|
|
48
|
-
"@soga/
|
|
49
|
-
"@soga/
|
|
50
|
-
"@soga/
|
|
51
|
-
"@soga/types": "^0.0.
|
|
52
|
-
"@soga/utils": "^0.0.
|
|
53
|
-
"axios": "^1.7.9",
|
|
54
|
-
"fs-extra": "^11.2.0",
|
|
55
|
-
"
|
|
56
|
-
|
|
57
|
-
|
|
1
|
+
{
|
|
2
|
+
"name": "@soga/uploader",
|
|
3
|
+
"version": "0.0.4",
|
|
4
|
+
"publishConfig": {
|
|
5
|
+
"access": "public"
|
|
6
|
+
},
|
|
7
|
+
"description": "",
|
|
8
|
+
"main": "dist/main.js",
|
|
9
|
+
"types": "dist/main.d.ts",
|
|
10
|
+
"files": [
|
|
11
|
+
"dist"
|
|
12
|
+
],
|
|
13
|
+
"scripts": {
|
|
14
|
+
"build": "rimraf dist && tsc && ts-node ./scripts/minify",
|
|
15
|
+
"minify": "ts-node ./scripts/minify",
|
|
16
|
+
"demo_backup": "ts-node ./demo/demo.ts",
|
|
17
|
+
"demo": "ts-node ./demo/demo.ts",
|
|
18
|
+
"worker": "tsc && ts-node ./demo/worker.ts",
|
|
19
|
+
"test": "jest",
|
|
20
|
+
"dev": "ts-node ./src/main.ts",
|
|
21
|
+
"lint": "eslint . --ext .ts",
|
|
22
|
+
"prepublishOnly": "npm run build"
|
|
23
|
+
},
|
|
24
|
+
"devDependencies": {
|
|
25
|
+
"@types/fs-extra": "^11.0.4",
|
|
26
|
+
"@types/glob": "^8.1.0",
|
|
27
|
+
"@types/jest": "^29.5.4",
|
|
28
|
+
"@types/node": "^20.8.7",
|
|
29
|
+
"@typescript-eslint/eslint-plugin": "^6.4.1",
|
|
30
|
+
"@typescript-eslint/parser": "^6.4.1",
|
|
31
|
+
"better-sqlite3": "^11.8.1",
|
|
32
|
+
"eslint": "^8.47.0",
|
|
33
|
+
"eslint-config-prettier": "^9.0.0",
|
|
34
|
+
"eslint-plugin-jest": "^27.2.3",
|
|
35
|
+
"eslint-plugin-prettier": "^5.0.0",
|
|
36
|
+
"glob": "^10.3.3",
|
|
37
|
+
"jest": "^29.6.3",
|
|
38
|
+
"prettier": "^3.0.2",
|
|
39
|
+
"terser": "^5.19.2",
|
|
40
|
+
"ts-jest": "^29.1.1",
|
|
41
|
+
"ts-node": "^10.9.1",
|
|
42
|
+
"typescript": "^5.1.6"
|
|
43
|
+
},
|
|
44
|
+
"keywords": [],
|
|
45
|
+
"author": "",
|
|
46
|
+
"license": "ISC",
|
|
47
|
+
"dependencies": {
|
|
48
|
+
"@soga/common": "^0.1.6",
|
|
49
|
+
"@soga/entities": "^0.0.23",
|
|
50
|
+
"@soga/single-uploader": "^0.0.10",
|
|
51
|
+
"@soga/types": "^0.0.63",
|
|
52
|
+
"@soga/utils": "^0.0.15",
|
|
53
|
+
"axios": "^1.7.9",
|
|
54
|
+
"fs-extra": "^11.2.0",
|
|
55
|
+
"level": "^9.0.0",
|
|
56
|
+
"piscina": "^5.0.0-alpha.0",
|
|
57
|
+
"typeorm": "^0.3.20"
|
|
58
|
+
}
|
|
59
|
+
}
|
package/dist/types/runtime.d.ts
DELETED
|
@@ -1,42 +0,0 @@
|
|
|
1
|
-
import { UploadFile } from '@soga/entities';
|
|
2
|
-
import type { DataSource } from 'typeorm';
|
|
3
|
-
export type Params = {
|
|
4
|
-
host_id: number;
|
|
5
|
-
dataSource: DataSource;
|
|
6
|
-
uid: number;
|
|
7
|
-
threads?: number;
|
|
8
|
-
debug?: boolean;
|
|
9
|
-
onProgress?: (percent: number) => Promise<void>;
|
|
10
|
-
onSuccess?: (file: UploadFile) => Promise<void>;
|
|
11
|
-
onError?: (file: UploadFile) => Promise<void>;
|
|
12
|
-
onComplete?: (file: UploadFile) => Promise<void>;
|
|
13
|
-
};
|
|
14
|
-
export type GroupItem = {
|
|
15
|
-
index: number;
|
|
16
|
-
file: string;
|
|
17
|
-
start: number;
|
|
18
|
-
end: number;
|
|
19
|
-
finish: boolean;
|
|
20
|
-
};
|
|
21
|
-
type CompleteGroupItem = GroupItem & {
|
|
22
|
-
size: number;
|
|
23
|
-
};
|
|
24
|
-
export type CompleteGroupList = CompleteGroupItem[];
|
|
25
|
-
export declare enum UploadStep {
|
|
26
|
-
NULL = 0,
|
|
27
|
-
PROCESS = 1,
|
|
28
|
-
PRESUCCESS = 2,
|
|
29
|
-
SUCCESS = 3,
|
|
30
|
-
ERROR = 4
|
|
31
|
-
}
|
|
32
|
-
export type ThreadType = {
|
|
33
|
-
id: number;
|
|
34
|
-
uid: number;
|
|
35
|
-
file_id: number;
|
|
36
|
-
queue_id: number;
|
|
37
|
-
part_name: string;
|
|
38
|
-
group: string;
|
|
39
|
-
start: () => Promise<void>;
|
|
40
|
-
stop: () => Promise<void>;
|
|
41
|
-
};
|
|
42
|
-
export {};
|
package/dist/types/runtime.js
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
"use strict";var UploadStep;Object.defineProperty(exports,"__esModule",{value:!0}),exports.UploadStep=void 0,function(S){S[S.NULL=0]="NULL",S[S.PROCESS=1]="PROCESS",S[S.PRESUCCESS=2]="PRESUCCESS",S[S.SUCCESS=3]="SUCCESS",S[S.ERROR=4]="ERROR"}(UploadStep||(exports.UploadStep=UploadStep={}));
|
package/dist/uploader/ali.d.ts
DELETED
|
@@ -1,57 +0,0 @@
|
|
|
1
|
-
import { UploadQueue, UploadChunk } from '@soga/entities';
|
|
2
|
-
import { AliUploaderParams } from '../types/main';
|
|
3
|
-
import { ThreadType } from '../types/runtime';
|
|
4
|
-
import { BaseUploader } from './base';
|
|
5
|
-
import { FilePartItem, HostType } from '@soga/types';
|
|
6
|
-
export declare class AliUploader extends BaseUploader {
|
|
7
|
-
private axios;
|
|
8
|
-
protected params: AliUploaderParams;
|
|
9
|
-
protected hostType: HostType;
|
|
10
|
-
protected checked: Record<string, boolean>;
|
|
11
|
-
constructor(params: AliUploaderParams);
|
|
12
|
-
run(): Promise<void>;
|
|
13
|
-
getThread(chunk: UploadChunk): ThreadType;
|
|
14
|
-
private insertPrecreateInfo;
|
|
15
|
-
getPrecreateInfo({ queue, part_name, }: {
|
|
16
|
-
queue: UploadQueue;
|
|
17
|
-
part_name: string;
|
|
18
|
-
}): Promise<{
|
|
19
|
-
completed: boolean;
|
|
20
|
-
data: {
|
|
21
|
-
file_id: string;
|
|
22
|
-
upload_id?: undefined;
|
|
23
|
-
part_info_list?: undefined;
|
|
24
|
-
};
|
|
25
|
-
} | {
|
|
26
|
-
completed: boolean;
|
|
27
|
-
data: {
|
|
28
|
-
file_id: string;
|
|
29
|
-
upload_id: string;
|
|
30
|
-
part_info_list: {
|
|
31
|
-
upload_url: string;
|
|
32
|
-
}[];
|
|
33
|
-
};
|
|
34
|
-
}>;
|
|
35
|
-
getLatestUploadUrl({ file_id, upload_id, part_number, }: {
|
|
36
|
-
file_id: string;
|
|
37
|
-
upload_id: string;
|
|
38
|
-
part_number: number;
|
|
39
|
-
}): Promise<string>;
|
|
40
|
-
correctChunk(chunk: UploadChunk): Promise<void>;
|
|
41
|
-
init(): Promise<void>;
|
|
42
|
-
uploadChunk({ chunk, queue, need_update, }: {
|
|
43
|
-
chunk: UploadChunk;
|
|
44
|
-
queue: UploadQueue;
|
|
45
|
-
need_update: boolean;
|
|
46
|
-
}): Promise<boolean>;
|
|
47
|
-
getPostcreateInfo({ queue, part_name, }: {
|
|
48
|
-
queue: UploadQueue;
|
|
49
|
-
part_name: string;
|
|
50
|
-
}): Promise<{
|
|
51
|
-
file_id: string;
|
|
52
|
-
}>;
|
|
53
|
-
getProofCode({ part }: {
|
|
54
|
-
part: FilePartItem;
|
|
55
|
-
}): Promise<string>;
|
|
56
|
-
private initAxios;
|
|
57
|
-
}
|
package/dist/uploader/ali.js
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
"use strict";var __importDefault=this&&this.__importDefault||function(e){return e&&e.__esModule?e:{default:e}};Object.defineProperty(exports,"__esModule",{value:!0}),exports.AliUploader=void 0;const axios_1=__importDefault(require("axios")),runtime_1=require("../types/runtime"),base_1=require("./base"),utils_1=require("@soga/utils"),types_1=require("@soga/types"),fs_1=require("fs"),crypto_1=require("crypto"),typeorm_1=require("typeorm");class AliUploader extends base_1.BaseUploader{axios;params;hostType=types_1.HostType.ALI;checked={};constructor(e){super(e),this.params=e,this.initAxios()}async run(){for(;this.isRunning;)await new Promise((e=>{setTimeout(e,200)}));const e=this.threads.map((e=>e.group));this.isRunning=!0;const t=this.threads.length,i=this.thread_count;if(t<i){const a=i-t;for(let t=0;t<a;t++){const t=await this.chunkRepository.findOne({where:{uid:this.uid,type:this.hostType,step:runtime_1.UploadStep.NULL,group:e.length?(0,typeorm_1.Not)((0,typeorm_1.In)(e)):void 0,is_paused:!1},order:{created_at:"ASC"}});if(!t)break;const i=this.getThread(t);this.threads.push(i),await this.chunkRepository.update(t.id,{step:runtime_1.UploadStep.PROCESS}),e.push(i.group),i.start()}if(this.threads.length<i){const e=await this.dequeueChunk();this.isRunning=!1,e&&await this.run()}}this.isRunning=!1}getThread(e){const t=this.getGroupName(e.queue_id,e.part_name);return{id:e.id,uid:e.uid,file_id:e.file_id,queue_id:e.queue_id,part_name:e.part_name,group:t,start:async()=>{const{part_name:t,queue_id:i}=e;let a=await this.queueRepository.findOneBy({id:i});const{need_insert:r,need_update:s}=this.getPrecreateStatus(a,t);r&&(await this.insertPrecreateInfo(i,t),a=await this.queueRepository.findOneBy({id:i}));try{const t=await this.chunkRepository.findOneBy({id:e.id});if(!t)return;if(!await this.uploadChunk({queue:a,chunk:t,need_update:s}))throw new Error("upload chunk failed!");await this.onChunkSuccess({chunk_id:e.id})}catch(t){"AbortError"!==t.name?await this.onChunkError({chunk_id:e.id}):await this.chunkRepository.update(e.id,{step:runtime_1.UploadStep.NULL})}finally{await this.onChunkComplete({chunk_id:e.id})}},stop:async()=>{}}}async insertPrecreateInfo(e,t){if(await this.waitUpsertPrecreate(e,t))return;const i=await this.queueRepository.findOneBy({id:e}),a=i.parts[t];if(a.step==runtime_1.UploadStep.SUCCESS)return;const r=await this.getPrecreateInfo({queue:i,part_name:t});if(r.completed)await this.onPartSuccess({queue_id:e,part_name:t,result:{file_id:r.data.file_id}}),await this.increaseUploadedSize({queue_id:e,size:a.info.size});else{const i=await this.queueRepository.findOneBy({id:e});if(!i)return;i.precreate_info[t]={expired_at:Date.now()+342e4,data:r.data},await this.queueRepository.update(e,i)}}async getPrecreateInfo({queue:e,part_name:t}){const i=e.parts[t].info,a=await this.getProofCode({part:i}),r=(await this.getChunkList(e,i)).map(((e,t)=>({part_number:t+1,part_size:e.size}))),s=`dpan_p_${i.md5}.txt`,n={drive_id:this.params.drive_id,parent_file_id:this.params.parent_file_id,name:s,type:"file",check_name_mode:"refuse",part_info_list:r,size:i.size,content_hash_name:"sha1",content_hash:i.sha1,proof_code:a,proof_version:"v1"},o=await this.axios.post("/adrive/v1.0/openFile/create",n),{data:d}=o;return d.exist&&"available"===d.status||d.rapid_upload?{completed:!0,data:{file_id:d.file_id}}:d.upload_id?{completed:!1,data:{file_id:d.file_id,upload_id:d.upload_id,part_info_list:d.part_info_list}}:void 0}async getLatestUploadUrl({file_id:e,upload_id:t,part_number:i}){const a=this.params.drive_id,r=[{part_number:i}],s=await this.axios.post("/adrive/v1.0/openFile/getUploadUrl",{drive_id:a,file_id:e,upload_id:t,part_info_list:r}),{data:n}=s;return n.part_info_list.find((e=>e.part_number===i)).upload_url}async correctChunk(e){if(!e)return;const{part_name:t}=e,i=await this.queueRepository.findOneBy({id:e.queue_id});if(!i)return;const a=i.precreate_info[t];if(!a)return;const{upload_id:r,file_id:s}=a.data;(await this.axios.post("/adrive/v1.0/openFile/listUploadedParts",{upload_id:r,file_id:s,drive_id:this.params.drive_id})).data.uploaded_parts.find((t=>t.part_number==e.chunk_index+1))?await this.chunkRepository.update(e.id,{step:runtime_1.UploadStep.SUCCESS}):await this.chunkRepository.update(e.id,{step:runtime_1.UploadStep.NULL})}async init(){const e=await this.chunkRepository.findBy({uid:this.uid,type:this.hostType,step:(0,typeorm_1.In)([runtime_1.UploadStep.PROCESS,runtime_1.UploadStep.ERROR])}),t=[];for(const i of e)await this.correctChunk(i),t.includes(i.queue_id)||t.push(i.queue_id);for(const e of t)await this.reCalculatePercent({queue_id:e})}async uploadChunk({chunk:e,queue:t,need_update:i}){const{precreate_info:a}=t,r=a[e.part_name],{part_info_list:s,file_id:n,upload_id:o}=r.data,d=s.find((t=>t.part_number==e.chunk_index+1));if(!d)return!0;if(i){const e=await this.getLatestUploadUrl({file_id:n,upload_id:o,part_number:d.part_number});d.upload_url=e}try{const{file_path:t,start:i,end:a}=e.chunk_info,r=(0,fs_1.createReadStream)(t,{start:i,end:a});return await axios_1.default.put(d.upload_url,r,{headers:{"Content-Type":null},timeout:12e4}),!0}catch(t){return console.error(t),e.id,!1}}async getPostcreateInfo({queue:e,part_name:t}){const i=e.precreate_info[t].data,a=await this.axios.post("/adrive/v1.0/openFile/complete",{drive_id:this.params.drive_id,file_id:i.file_id,upload_id:i.upload_id}),{file_id:r}=a.data;return{file_id:r}}async getProofCode({part:e}){const{access_token:t}=await this.params.getAuthData(),{size:i}=e,a=(0,crypto_1.createHash)("md5").update(t).digest("hex").slice(0,16),r=BigInt(`0x${a}`),s=Number(r%BigInt(i)),n=s,o=Math.min(i,s+8),d=e.start+n,u=e.start+o;return(await(0,utils_1.getFileBufferSlice)(e.path,d,u-1)).toString("base64")}initAxios(){this.axios=axios_1.default.create({baseURL:"https://openapi.alipan.com"}),this.axios.interceptors.request.use((async e=>{const{access_token:t}=await this.params.getAuthData();return e.headers.Authorization=`Bearer ${t}`,e}),(e=>Promise.reject(e)))}}exports.AliUploader=AliUploader;
|
package/dist/uploader/baidu.d.ts
DELETED
|
@@ -1,45 +0,0 @@
|
|
|
1
|
-
import { UploadQueue, UploadChunk } from '@soga/entities';
|
|
2
|
-
import { BaiduUploaderParams } from '../types/main';
|
|
3
|
-
import { ThreadType } from '../types/runtime';
|
|
4
|
-
import { BaseUploader } from './base';
|
|
5
|
-
import { HostType } from '@soga/types';
|
|
6
|
-
export declare class BaiduUploader extends BaseUploader {
|
|
7
|
-
protected params: BaiduUploaderParams;
|
|
8
|
-
protected hostType: HostType;
|
|
9
|
-
constructor(params: BaiduUploaderParams);
|
|
10
|
-
init(): Promise<void>;
|
|
11
|
-
run(): Promise<void>;
|
|
12
|
-
getThread(chunk: UploadChunk): ThreadType;
|
|
13
|
-
private insertPrecreateInfo;
|
|
14
|
-
private updatePrecreateInfo;
|
|
15
|
-
getPrecreateInfo({ queue, part_name, upload_id, }: {
|
|
16
|
-
queue: UploadQueue;
|
|
17
|
-
part_name: string;
|
|
18
|
-
upload_id?: string;
|
|
19
|
-
}): Promise<{
|
|
20
|
-
completed: boolean;
|
|
21
|
-
data: {
|
|
22
|
-
upload_id: string;
|
|
23
|
-
block_list: number[];
|
|
24
|
-
fs_id?: undefined;
|
|
25
|
-
};
|
|
26
|
-
} | {
|
|
27
|
-
completed: boolean;
|
|
28
|
-
data: {
|
|
29
|
-
fs_id: number;
|
|
30
|
-
upload_id?: undefined;
|
|
31
|
-
block_list?: undefined;
|
|
32
|
-
};
|
|
33
|
-
}>;
|
|
34
|
-
uploadChunk({ chunk, queue, }: {
|
|
35
|
-
chunk: UploadChunk;
|
|
36
|
-
queue: UploadQueue;
|
|
37
|
-
}): Promise<boolean>;
|
|
38
|
-
getPostcreateInfo({ queue, part_name, }: {
|
|
39
|
-
queue: UploadQueue;
|
|
40
|
-
part_name: string;
|
|
41
|
-
}): Promise<{
|
|
42
|
-
fs_id: number;
|
|
43
|
-
}>;
|
|
44
|
-
private getSearchParams;
|
|
45
|
-
}
|
package/dist/uploader/baidu.js
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
"use strict";var __importDefault=this&&this.__importDefault||function(e){return e&&e.__esModule?e:{default:e}};Object.defineProperty(exports,"__esModule",{value:!0}),exports.BaiduUploader=void 0;const axios_1=__importDefault(require("axios")),runtime_1=require("../types/runtime"),base_1=require("./base"),utils_1=require("@soga/utils"),form_data_1=__importDefault(require("form-data")),types_1=require("@soga/types"),typeorm_1=require("typeorm");class BaiduUploader extends base_1.BaseUploader{params;hostType=types_1.HostType.BAIDU;constructor(e){super(e),this.params=e}async init(){const e=await this.chunkRepository.findBy({uid:this.uid,type:this.hostType,step:(0,typeorm_1.In)([runtime_1.UploadStep.PROCESS,runtime_1.UploadStep.ERROR])});if(!e.length)return;const t=[];for(const a of e)t.includes(a.queue_id)||t.push(a.queue_id);for(const e of t)await this.reCalculatePercent({queue_id:e});await this.chunkRepository.update({uid:this.uid,type:this.hostType,step:(0,typeorm_1.In)([runtime_1.UploadStep.PROCESS,runtime_1.UploadStep.ERROR])},{step:runtime_1.UploadStep.NULL})}async run(){for(;this.isRunning;)await new Promise((e=>{setTimeout(e,200)}));this.isRunning=!0;const e=this.threads.length,t=this.thread_count;if(e<t){const a=t-e;for(let e=0;e<a;e++){const e=await this.chunkRepository.findOne({where:{uid:this.uid,type:this.hostType,step:runtime_1.UploadStep.NULL,is_paused:!1},order:{created_at:"ASC"}});if(!e)break;const t=this.getThread(e);this.threads.push(t),await this.chunkRepository.update(e.id,{step:runtime_1.UploadStep.PROCESS}),t.start()}if(this.threads.length<t){const e=await this.dequeueChunk();this.isRunning=!1,e&&await this.run()}}this.isRunning=!1}getThread(e){const t=this.getGroupName(e.queue_id,e.part_name);return{id:e.id,uid:e.uid,file_id:e.file_id,queue_id:e.queue_id,part_name:e.part_name,group:t,start:async()=>{const{part_name:t,queue_id:a}=e;let i=await this.queueRepository.findOneBy({id:a});const{need_insert:s,need_update:r,need_upsert:n}=this.getPrecreateStatus(i,t);s&&await this.insertPrecreateInfo(a,t),r&&(await this.updatePrecreateInfo(a,t),await this.reCalculatePercent({queue_id:a})),n&&(i=await this.queueRepository.findOneBy({id:a}));try{const t=await this.chunkRepository.findOneBy({id:e.id});if(!t)return;if(!await this.uploadChunk({queue:i,chunk:t}))throw new Error("upload chunk failed!");await this.onChunkSuccess({chunk_id:e.id})}catch(t){"AbortError"!==t.name?await this.onChunkError({chunk_id:e.id}):await this.chunkRepository.update(e.id,{step:runtime_1.UploadStep.NULL})}finally{await this.onChunkComplete({chunk_id:e.id})}},stop:async()=>{}}}async insertPrecreateInfo(e,t){if(await this.waitUpsertPrecreate(e,t))return;const a=await this.queueRepository.findOneBy({id:e}),i=a.parts[t];if(i.step==runtime_1.UploadStep.SUCCESS)return;const s=await this.getPrecreateInfo({queue:a,part_name:t});if(s.completed)await this.onPartSuccess({queue_id:e,part_name:t,result:{fs_id:s.data.fs_id}}),await this.increaseUploadedSize({queue_id:e,size:i.info.size});else{const a={expired_at:Date.now()+216e5,data:s.data},i=await this.queueRepository.findOneBy({id:e});if(!i)return;i.precreate_info[t]=a,await this.queueRepository.update(e,i)}}async updatePrecreateInfo(e,t){if(await this.waitUpsertPrecreate(e,t))return;const a=await this.queueRepository.findOneBy({id:e});if(a.parts[t].step==runtime_1.UploadStep.SUCCESS)return;const{upload_id:i}=a.precreate_info[t].data,s=await this.getPrecreateInfo({queue:a,part_name:t,upload_id:i});if(!s.completed){const a={expired_at:Date.now()+216e5,data:s.data},i=await this.queueRepository.findOneBy({id:e});if(!i)return;i.precreate_info[t]=a,await this.queueRepository.update(e,i),await this.chunkRepository.update({queue_id:e,part_name:t,chunk_index:(0,typeorm_1.In)(s.data.block_list)},{step:runtime_1.UploadStep.NULL})}}async getPrecreateInfo({queue:e,part_name:t,upload_id:a}){const i=e.parts[t].info,{size:s,md4:r,md5:n}=i,o=await this.getMd5List(e,i),u=`${this.params.cloud_folder}/${n}.bin`,{access_token:d}=await this.params.getAccessToken(),p=`https://pan.baidu.com/rest/2.0/xpan/file?method=precreate&access_token=${d}`,c=this.getSearchParams({path:u,size:`${s}`,isdir:"0",block_list:JSON.stringify(o),autoinit:"1",rtype:"2","content-md5":n,"slice-md5":r,uploadid:a||void 0}),h=await axios_1.default.post(p,c,{headers:{"User-Agent":this.params.ua}}),{data:_}=h;return 1===_.return_type?{completed:!1,data:{upload_id:_.uploadid,block_list:_.block_list}}:{completed:!0,data:{fs_id:_.info.fs_id}}}async uploadChunk({chunk:e,queue:t}){const{access_token:a}=await this.params.getAccessToken(),{precreate_info:i}=t,s=i[e.part_name],{upload_id:r}=s.data,n=e.chunk_info.md5,o=`${this.params.cloud_folder}/${n}.bin`;try{const{file_path:t,start:i,end:s,md5:n,index:u}=e.chunk_info,d=await(0,utils_1.getFileBufferSlice)(t,i,s),p=`https://d.pcs.baidu.com/rest/2.0/pcs/superfile2?access_token=${a}&method=upload&type=tmpfile&path=${o}&uploadid=${r}&partseq=${u}`,c=new form_data_1.default;c.append("file",d,`${n}`);return!!await axios_1.default.post(p,c,{timeout:18e4,headers:{"User-Agent":this.params.ua}})}catch(e){return!1}}async getPostcreateInfo({queue:e,part_name:t}){const a=e.parts[t].info,i=e.precreate_info[t],{upload_id:s}=i.data,r=await this.getMd5List(e,a),n=`/apps/dpan/uploader/${a.md5}.bin`,o=a.size,{access_token:u}=await this.params.getAccessToken(),d=`https://pan.baidu.com/rest/2.0/xpan/file?method=create&access_token=${u}`,p=this.getSearchParams({path:n,size:`${o}`,isdir:"0",block_list:JSON.stringify(r),uploadid:s||void 0,rtype:"2"}),c=await axios_1.default.post(d,p,{headers:{"User-Agent":this.params.ua}});if(!c)return;const{data:h}=c;if(h.errno){const e=`upload create file error: ${h.errno}`;throw new Error(e)}return{fs_id:h.fs_id}}getSearchParams(e){Object.keys(e).forEach((t=>{void 0===e[t]&&delete e[t]}));return new URLSearchParams(e).toString()}}exports.BaiduUploader=BaiduUploader;
|
package/dist/uploader/base.d.ts
DELETED
|
@@ -1,82 +0,0 @@
|
|
|
1
|
-
import { DataSource, Repository } from 'typeorm';
|
|
2
|
-
import { Params, ThreadType } from '../types/runtime';
|
|
3
|
-
import { UploadFile, UploadQueue, UploadChunk } from '@soga/entities';
|
|
4
|
-
import { FilePartItem, HostType, WorkerPercent } from '@soga/types';
|
|
5
|
-
export declare abstract class BaseUploader {
|
|
6
|
-
protected host_id: number;
|
|
7
|
-
protected chunkSize: number;
|
|
8
|
-
protected baseParams: Params;
|
|
9
|
-
protected onProgress: (percent: WorkerPercent) => Promise<void>;
|
|
10
|
-
protected onSuccess: (file: UploadFile) => Promise<void>;
|
|
11
|
-
protected onError: (file: UploadFile) => Promise<void>;
|
|
12
|
-
protected onComplete: (file: UploadFile) => Promise<void>;
|
|
13
|
-
protected uid: number;
|
|
14
|
-
protected isRunning: boolean;
|
|
15
|
-
protected processing: Record<string, boolean>;
|
|
16
|
-
protected dataSource: DataSource;
|
|
17
|
-
protected fileRepository: Repository<UploadFile>;
|
|
18
|
-
protected queueRepository: Repository<UploadQueue>;
|
|
19
|
-
protected chunkRepository: Repository<UploadChunk>;
|
|
20
|
-
protected threads: ThreadType[];
|
|
21
|
-
protected thread_count: number;
|
|
22
|
-
protected hostType: HostType;
|
|
23
|
-
protected maxThreads: number;
|
|
24
|
-
abstract run(): Promise<void>;
|
|
25
|
-
abstract getThread(chunk: UploadChunk): ThreadType;
|
|
26
|
-
abstract getPostcreateInfo(params: {
|
|
27
|
-
queue: UploadQueue;
|
|
28
|
-
part_name: string;
|
|
29
|
-
}): Promise<object>;
|
|
30
|
-
constructor(params: Params);
|
|
31
|
-
setThreads(threads: number): Promise<void>;
|
|
32
|
-
start(): Promise<void>;
|
|
33
|
-
repairFiles(ids: number): Promise<void>;
|
|
34
|
-
deleteFiles(ids: number[]): Promise<void>;
|
|
35
|
-
stopAll(): Promise<void>;
|
|
36
|
-
stopFiles(ids: number[]): Promise<void>;
|
|
37
|
-
protected dequeueChunk(): Promise<boolean>;
|
|
38
|
-
protected onFileSuccess({ queue_id }: {
|
|
39
|
-
queue_id: number;
|
|
40
|
-
}): Promise<void>;
|
|
41
|
-
protected onPartSuccess({ queue_id, part_name, result, }: {
|
|
42
|
-
queue_id: number;
|
|
43
|
-
part_name: string;
|
|
44
|
-
result: object;
|
|
45
|
-
}): Promise<void>;
|
|
46
|
-
protected increaseUploadedSize({ queue_id, size, }: {
|
|
47
|
-
queue_id: number;
|
|
48
|
-
size: number;
|
|
49
|
-
}): Promise<void>;
|
|
50
|
-
protected onChunkSuccess({ chunk_id }: {
|
|
51
|
-
chunk_id: number;
|
|
52
|
-
}): Promise<void>;
|
|
53
|
-
protected onChunkError({ chunk_id }: {
|
|
54
|
-
chunk_id: number;
|
|
55
|
-
}): Promise<void>;
|
|
56
|
-
protected onChunkComplete({ chunk_id }: {
|
|
57
|
-
chunk_id: number;
|
|
58
|
-
}): Promise<void>;
|
|
59
|
-
protected reCalculatePercent({ queue_id }: {
|
|
60
|
-
queue_id: number;
|
|
61
|
-
}): Promise<void>;
|
|
62
|
-
protected dequeueOneFile(): Promise<boolean>;
|
|
63
|
-
protected dequeueOnePart({ part, queue_id, }: {
|
|
64
|
-
part: FilePartItem;
|
|
65
|
-
queue_id: number;
|
|
66
|
-
}): Promise<(Partial<FilePartItem> & {
|
|
67
|
-
file_path: string;
|
|
68
|
-
})[]>;
|
|
69
|
-
protected getValidThreads(threads: number): number;
|
|
70
|
-
protected getGroupName(queue_id: number, part_name: string): string;
|
|
71
|
-
protected getPrecreateStatus(queue: UploadQueue, part_name: string): {
|
|
72
|
-
need_insert: boolean;
|
|
73
|
-
need_update: boolean;
|
|
74
|
-
need_upsert: boolean;
|
|
75
|
-
};
|
|
76
|
-
protected getChunkList(queue: UploadQueue, part: FilePartItem): Promise<(Partial<FilePartItem> & {
|
|
77
|
-
file_path: string;
|
|
78
|
-
})[]>;
|
|
79
|
-
protected getMd5List(queue: UploadQueue, part: FilePartItem): Promise<string[]>;
|
|
80
|
-
protected waitUpsertPrecreate(queue_id: number, part_name: string): Promise<boolean>;
|
|
81
|
-
private getFileParts;
|
|
82
|
-
}
|
package/dist/uploader/base.js
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.BaseUploader=void 0;const typeorm_1=require("typeorm"),runtime_1=require("../types/runtime"),entities_1=require("@soga/entities"),types_1=require("@soga/types"),chunk_1=require("../utils/chunk"),utils_1=require("@soga/utils");class BaseUploader{host_id;chunkSize=4194304;baseParams;onProgress=async()=>{};onSuccess=async()=>{};onError=async()=>{};onComplete=async()=>{};uid;isRunning=!1;processing={};dataSource;fileRepository;queueRepository;chunkRepository;threads=[];thread_count=3;hostType;maxThreads=10;constructor(t){this.baseParams=t,this.host_id=t.host_id,this.uid=t.uid,this.dataSource=t.dataSource,this.fileRepository=t.dataSource.getRepository(entities_1.UploadFile),this.queueRepository=t.dataSource.getRepository(entities_1.UploadQueue),this.chunkRepository=t.dataSource.getRepository(entities_1.UploadChunk),t.onProgress&&(this.onProgress=t.onProgress.bind(this)),t.onSuccess&&(this.onSuccess=t.onSuccess.bind(this)),t.onError&&(this.onError=t.onError.bind(this)),t.onComplete&&(this.onComplete=t.onComplete.bind(this)),this.thread_count=this.getValidThreads(t.threads??3)}async setThreads(t){const e=this.getValidThreads(t);this.thread_count=e,await this.run()}async start(){await this.run()}async repairFiles(t){const e=await this.queueRepository.findOneBy({file_id:t});if(!e)return;const i=Object.values(e.parts);for(const t of i)t.step===runtime_1.UploadStep.ERROR&&(await this.chunkRepository.update({uid:this.uid,queue_id:e.id,part_name:t.info.file,step:runtime_1.UploadStep.ERROR},{step:runtime_1.UploadStep.NULL}),t.step=runtime_1.UploadStep.NULL);e.step=runtime_1.UploadStep.NULL,await this.queueRepository.update(e.id,e),await this.run()}async deleteFiles(t){await this.queueRepository.delete({file_id:(0,typeorm_1.In)(t)}),await this.chunkRepository.delete({file_id:(0,typeorm_1.In)(t)})}async stopAll(){await this.queueRepository.update({uid:this.uid},{is_paused:!0}),await this.chunkRepository.update({uid:this.uid},{is_paused:!0}),this.thread_count=0,await this.run()}async stopFiles(t){const e=this.threads.filter((e=>t.includes(e.file_id)));for(const t of e)await t.stop();await this.queueRepository.update({file_id:(0,typeorm_1.In)(t)},{is_paused:!0}),await this.chunkRepository.update({file_id:(0,typeorm_1.In)(t)},{is_paused:!0})}async dequeueChunk(){const t=await this.queueRepository.findOne({where:{uid:this.uid,type:this.hostType,is_paused:!1,step:(0,typeorm_1.In)([runtime_1.UploadStep.NULL,runtime_1.UploadStep.PROCESS])},order:{step:"DESC"}});if(t){const e=Object.values(t.parts).filter((t=>t.step===runtime_1.UploadStep.NULL)).map((t=>t.info));if(!e?.length)return!1;const{length:i}=e,s={parts:[e[0]],size:e[0].size},a=1073741824;for(let t=1;t<i&&!(s.size>a);t++)s.parts.push(e[t]),s.size+=e[t].size;for(const e of s.parts){const i=await this.dequeueOnePart({part:e,queue_id:t.id});t.parts[e.file].step=runtime_1.UploadStep.PROCESS,t.chunk_list_info[e.file]=i}const o=Object.values(t.parts).find((t=>t.step===runtime_1.UploadStep.NULL));return t.step=o?runtime_1.UploadStep.PROCESS:runtime_1.UploadStep.PRESUCCESS,await this.queueRepository.update(t.id,t),!0}return!!await this.dequeueOneFile()&&await this.dequeueChunk()}async onFileSuccess({queue_id:t}){const e=await this.queueRepository.findOneBy({id:t});if(!e)return;const{file_id:i}=e,s=`${this.hostType}_upload_status`;await this.fileRepository.update(i,{[s]:types_1.UploadStatus.SUCCESS}),this.baseParams.debug||await this.queueRepository.delete({id:t});const a=await this.fileRepository.findOneBy({id:i});if(!a)return;const o=`${this.hostType}_upload_status`;if(a[o]===types_1.UploadStatus.SUCCESS)return;if(Object.values(types_1.HostType).filter((t=>0!=a[`${t}_host_id`])).filter((t=>a[`${t}_upload_status`]!=types_1.UploadStatus.SUCCESS)).length)return;await this.fileRepository.update(i,{upload_status:types_1.UploadStatus.SUCCESS});const u=await this.fileRepository.findOneBy({id:i});await this.onSuccess(u)}async onPartSuccess({queue_id:t,part_name:e,result:i}){const s=await this.queueRepository.findOneBy({id:t});if(!s)return;const a=s.type,o=s.file_id,u=await this.fileRepository.findOneBy({id:o}),{upload_data:r}=u;r[a]||(r[a]={}),r[a][e]=i,await this.fileRepository.update(u.id,{upload_data:r}),s.parts[e].step=runtime_1.UploadStep.SUCCESS,await this.queueRepository.update(t,s),await this.chunkRepository.delete({queue_id:t,part_name:e});if(Object.values(s.parts).find((t=>t.step!=runtime_1.UploadStep.SUCCESS)))return;const n=`${this.hostType}_upload_status`;await this.fileRepository.update(o,{[n]:types_1.UploadStatus.SUCCESS}),await this.onFileSuccess({queue_id:t})}async increaseUploadedSize({queue_id:t,size:e}){const i=await this.queueRepository.findOneBy({id:t});if(!i)return;const{uploaded_size:s,total_size:a}=i,o=s+(e??0);await this.queueRepository.update(t,{uploaded_size:o});const u=o/a,r=`upload_${this.hostType}`,n={id:i.file_id,type:"percent",data:{step:types_1.UploadProcessStep[r],percent:u}};this.onProgress(n)}async onChunkSuccess({chunk_id:t}){await this.chunkRepository.update(t,{step:runtime_1.UploadStep.SUCCESS});const e=await this.chunkRepository.findOneBy({id:t}),{queue_id:i,chunk_info:s,part_name:a}=e,{size:o=0}=s;await this.increaseUploadedSize({queue_id:i,size:o});if(await this.chunkRepository.findOneBy({queue_id:i,part_name:a,step:(0,typeorm_1.Not)(runtime_1.UploadStep.SUCCESS)}))return;const u=await this.queueRepository.findOneBy({id:i}),r=await this.getPostcreateInfo({queue:u,part_name:a});await this.onPartSuccess({queue_id:i,part_name:a,result:r});const n=this.getGroupName(i,a);delete this.processing[n]}async onChunkError({chunk_id:t}){await this.chunkRepository.update(t,{step:runtime_1.UploadStep.ERROR});const e=await this.chunkRepository.findOneBy({id:t}),{queue_id:i,part_name:s}=e,a=await this.queueRepository.findOneBy({id:i});if(!a)return;if(a.parts[s].step===runtime_1.UploadStep.ERROR)return;a.parts[s].step=runtime_1.UploadStep.ERROR,await this.queueRepository.update(i,a);const{file_id:o}=a,u=await this.fileRepository.findOneBy({id:o});if(!u)return;const r=`${this.hostType}_upload_status`;if(u[r]===types_1.UploadStatus.ERROR)return;if(await this.fileRepository.update(o,{[r]:types_1.UploadStatus.ERROR}),u.upload_status===types_1.UploadStatus.ERROR)return;await this.fileRepository.update(o,{upload_status:types_1.UploadStatus.ERROR});const n=await this.fileRepository.findOneBy({id:o});await this.onError(n),await this.onComplete(n)}async onChunkComplete({chunk_id:t}){this.threads=this.threads.filter((e=>e.id!==t)),await this.run()}async reCalculatePercent({queue_id:t}){const e=await this.queueRepository.findOneBy({id:t});let i=0;const s=Object.values(e.parts).filter((t=>t.step==runtime_1.UploadStep.SUCCESS));s.length&&(i+=s.reduce(((t,e)=>t+e.info.size),0));const a=Object.values(e.parts).filter((t=>t.step==runtime_1.UploadStep.PROCESS||t.step==runtime_1.UploadStep.PRESUCCESS));if(a.length){const e=a.map((t=>t.info.file));i=(await this.chunkRepository.findBy({queue_id:t,part_name:(0,typeorm_1.In)(e)})).reduce(((t,e)=>t+e.chunk_info.size),0)}await this.queueRepository.update(t,{uploaded_size:Math.min(i,e.total_size)})}async dequeueOneFile(){const t=`${this.hostType}_host_id`,e=`${this.hostType}_upload_status`,i=await this.fileRepository.findOneBy({is_paused:!1,encode_status:types_1.EncodeStatus.SUCCESS,[t]:this.host_id,[e]:types_1.UploadStatus.NULL});if(!i)return!1;const s=this.getFileParts(i),a={parts:{},size:0};for(const t of s)a.parts[t.file]={step:runtime_1.UploadStep.NULL,info:t},a.size+=t.size;return await this.queueRepository.save(this.queueRepository.create({uid:this.uid,type:this.hostType,file_id:i.id,is_paused:!1,step:runtime_1.UploadStep.NULL,uploaded_size:0,total_size:a.size,parts:a.parts})),await this.fileRepository.update(i.id,{upload_status:types_1.UploadStatus.PROCESS,[e]:types_1.UploadStatus.PROCESS}),!0}async dequeueOnePart({part:t,queue_id:e}){const i=this.getGroupName(e,t.file),s=(0,chunk_1.groupChunk)(t,this.chunkSize),a=[],o=[];let u=0;const r=await this.queueRepository.findOneBy({id:e});for(const n of s){const{path:s}=t;let p;this.hostType==types_1.HostType.BAIDU&&(p=await(0,utils_1.calculateMd5)({file:s,start:n.start,end:n.end}));const d={...n,md5:p,file_path:s};a.push(d),o.push({uid:this.uid,group:i,file_id:r.file_id,queue_id:e,part_name:t.file,type:this.hostType,chunk_index:u++,step:runtime_1.UploadStep.NULL,chunk_info:d})}return await this.dataSource.createQueryBuilder().insert().into(entities_1.UploadChunk).values(o).execute(),a}getValidThreads(t){return Math.min(Math.max(t??0,0),this.maxThreads)}getGroupName(t,e){return`${this.hostType}-${t}-${e}`}getPrecreateStatus(t,e){const i={need_insert:!1,need_update:!1,need_upsert:!1},s=t.precreate_info[e];return s?s.expired_at<Date.now()&&(i.need_update=!0):i.need_insert=!0,i.need_upsert=i.need_insert||i.need_update,i}async getChunkList(t,e){return t.chunk_list_info[e.file]}async getMd5List(t,e){return(await this.getChunkList(t,e)).map((t=>t.md5))}async waitUpsertPrecreate(t,e){const i=this.getGroupName(t,e);if(this.processing[i]){const i=async()=>{await new Promise((t=>{setTimeout(t,300)}));const s=await this.queueRepository.findOneBy({id:t});s&&s.parts[e].step!==runtime_1.UploadStep.SUCCESS&&(!s.precreate_info[e]||s.precreate_info[e].expired_at<Date.now())&&await i()};return await i(),!0}return this.processing[i]=!0,!1}getFileParts(t){const{media_data:e,source_data:i,txt_data:s,img_data:a}=t;return[...i?.parts??[],...e?.parts??[],...s?.parts??[],...a?.parts??[]]}}exports.BaseUploader=BaseUploader;
|
package/dist/utils/chunk.d.ts
DELETED
package/dist/utils/chunk.js
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
"use strict";function groupChunk(e,t=4194304){const s=[],i=e.size;if(i<=t)s.push({file:e.file,start:e.start,end:e.end,finish:!0});else{const n=i/t,r=Math.floor(n),f=n===r;for(let i=0;i<r;i++)s.push({file:e.file,start:e.start+i*t,end:e.start+(i+1)*t-1,finish:!!f&&i===r});if(!f){const i=e.start+r*t;s.push({file:e.file,start:i,end:e.end,finish:!0})}}return s.map(((e,t)=>({...e,size:e.end-e.start+1,index:t})))}Object.defineProperty(exports,"__esModule",{value:!0}),exports.groupChunk=groupChunk;
|