@soga/uploader 0.2.23 → 0.2.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/hooks/complete.js +1 -1
- package/dist/hooks/trasform.js +1 -1
- package/dist/host-uploader/ali.js +1 -1
- package/dist/host-uploader/baidu.js +1 -1
- package/dist/host-uploader/base.d.ts +3 -8
- package/dist/host-uploader/base.js +1 -1
- package/dist/main.js +1 -1
- package/dist/uploader.d.ts +4 -2
- package/dist/uploader.js +1 -1
- package/package.json +5 -4
package/dist/hooks/complete.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.complete=complete;const sdk_1=require("@soga/sdk"),types_1=require("@soga/types");async function complete({file_id:e,sdk_domain:i,fileRepository:s}){const t=await s.findOneBy({id:e});let a=t.task_record_id;const
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.complete=complete;const sdk_1=require("@soga/sdk"),types_1=require("@soga/types");async function complete({file_id:e,sdk_domain:i,fileRepository:s}){const t=await s.findOneBy({id:e});let a=t.task_record_id;const r=(0,sdk_1.getSdk)(i),d={};let o;if(t.type==types_1.RecordType.AFFIX){const e=await r.getRecordInfo({space_id:t.space_id,record_id:t.task_record_id,refresh:!0}),{parent_id:i}=e;a=i,await async function(e,i){e.encoded.affix?.forEach((e=>{e.parts.forEach((e=>{d[e.file]={md5:e.md5,size:e.size,source:!0,preview:!0}}))}));const s=await i.getRecordInfo({space_id:e.space_id,record_id:e.task_record_id,refresh:!0});s.manifest||(s.manifest={});o=s.manifest,o.meta||(o.meta={host_size:0});o.parts||(o.parts={});o.affix||(o.affix=[]);e.encoded.affix?.forEach((e=>{const{parts:i,...s}=e;o.affix.push({...s,parts:i.map((e=>e.file))})})),Object.assign(o.parts,d),p(),c()}(t,r)}else{if(0!==t.pid){a=(await s.findOneBy({id:t.pid})).cloud_info.id}await async function(e){const{type:i,inputs:s,encoded:t}=e;if(t.source?.parts){let e=!0;i!=types_1.RecordType.VIDEO&&i!=types_1.RecordType.AUDIO||(e=!1),i==types_1.RecordType.TXT&&(e=!1),t.source.parts.forEach((i=>{d[i.file]={md5:i.md5,size:i.size,source:!0,preview:e}}))}t.media?.parts&&t.media.parts.forEach((e=>{d[e.file]={md5:e.md5,size:e.size,source:!1,preview:!0}}));t.img?.parts&&t.img.parts.forEach((e=>{d[e.file]={md5:e.md5,size:e.size,source:!1,preview:!0}}));t.txt?.parts&&t.txt.parts.forEach((e=>{d[e.file]={md5:e.md5,size:e.size,source:!1,preview:!0}}));const a=s[0],r={host_size:0,filesize:a.filesize,filename:a.filename,btime:a.local_btime,ctime:a.local_ctime,mtime:a.local_mtime};o={meta:r,parts:d},t.source?.parts&&(o.source={head:t.source.head,parts:t.source.parts.map((e=>e.file))});if(t.media?.parts){const{parts:e,...i}=t.media;o.media={...i,parts:e.map((e=>e.file))}}if(t.img?.parts){const{meta:e,parts:i}=t.img;o.img={meta:e,preview:i.map((e=>({file:e.file,start:e.start,end:e.end}))),parts:i.map((e=>e.file))}}if(t.txt?.parts){const{map:e,pad:i,pages:s,parts:a}=t.txt;o.txt={entrance:e,pad:i,pages:s,parts:a.map((e=>e.file))}}return c(),p(),o}(t)}function c(){const e=Object.values(d),i={host:0,preview:0,source:0};e.forEach((e=>{i.host+=e.size})),o.meta.host_size=i.host}function p(){if(t.ali_host_id&&!o.ali){o.ali={drive_id:"",host_id:t.ali_host_id,info:{}};const e=t.ali_upload_result;console.log("part_map: ",Object.keys(d)),Object.keys(d).forEach((i=>{const s=e[i];o.ali.info[i]={file_id:s.file_id,sha1:s.sha1},o.ali.drive_id||(o.ali.drive_id=s.drive_id)}))}if(t.baidu_host_id){o.baidu={host_id:t.baidu_host_id,info:{}};const e=t.baidu_upload_result;Object.keys(d).forEach((i=>{const s=e[i];o.baidu.info[i]={fs_id:s.fs_id,md4:s.md4}}))}}await r.updateRecord({space_id:t.space_id,record_id:t.cloud_info.id,parent_id:a,manifest:JSON.stringify(o)})}
|
package/dist/hooks/trasform.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.transform=transform;const entities_1=require("@soga/entities"),types_1=require("@soga/types"),fs_extra_1=require("fs-extra"),typeorm_1=require("typeorm");async function transform(e){const i=e.dataSource.getRepository(entities_1.UploadFile),t=e.dataSource.getRepository(entities_1.UploadSuccess),a=await i.findOneBy({id:e.file_id}),o={root_id:a.root_id,pid:0,space_id:a.space_id,space_name:a.space_name,uid:a.uid,inputs:a.inputs.map((e=>({filename:e.filename,filepath:e.filepath,filesize:e.filesize}))),cloud_id:a.cloud_info.id,cloud_name:a.cloud_info.name,type:a.type},d=0==a.root_id;if(!d){const e=await t.findOneBy({root_id:a.root_id,pid:0});if(e)o.pid=e.id;else{const e=await i.findOneBy({id:a.root_id}),d=await t.save(t.create({root_id:a.root_id,pid:0,space_id:e.space_id,space_name:e.space_name,uid:e.uid,inputs:e.inputs?.map((e=>({filename:e.filename,filepath:e.filepath,filesize:e.filesize}))),cloud_id:e.cloud_info.id,cloud_name:e.cloud_info.name,type:e.type}));o.pid=d.id}await t.update({id:
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.transform=transform;const entities_1=require("@soga/entities"),types_1=require("@soga/types"),fs_extra_1=require("fs-extra"),typeorm_1=require("typeorm");async function transform(e){const i=e.dataSource.getRepository(entities_1.UploadFile),t=e.dataSource.getRepository(entities_1.UploadSuccess),a=await i.findOneBy({id:e.file_id}),o={root_id:a.root_id,pid:0,space_id:a.space_id,space_name:a.space_name,uid:a.uid,inputs:a.inputs.map((e=>({filename:e.filename,filepath:e.filepath,filesize:e.filesize}))),cloud_id:a.cloud_info.id,cloud_name:a.cloud_info.name,type:a.type},d=0==a.root_id;if(!d){const e=await t.findOneBy({root_id:a.root_id,pid:0});if(e)o.pid=e.id;else{const e=await i.findOneBy({id:a.root_id}),d=await t.save(t.create({root_id:a.root_id,pid:0,space_id:e.space_id,space_name:e.space_name,uid:e.uid,inputs:e.inputs?.map((e=>({filename:e.filename,filepath:e.filepath,filesize:e.filesize}))),cloud_id:e.cloud_info.id,cloud_name:e.cloud_info.name,type:e.type}));o.pid=d.id}await t.update({id:o.pid},{updated_at:new Date})}if(await t.save(t.create(o)),await i.delete({id:e.file_id}),await(0,fs_extra_1.remove)(a.output_root),!d){await i.findOneBy({root_id:a.root_id,type:(0,typeorm_1.Not)(types_1.RecordType.FOLDER)})?await i.increment({id:a.root_id},"completed_count",1):(await i.delete({root_id:a.root_id}),await i.delete({id:a.root_id}))}}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";var __importDefault=this&&this.__importDefault||function(e){return e&&e.__esModule?e:{default:e}};Object.defineProperty(exports,"__esModule",{value:!0}),exports.getAliUploader=exports.AliUploader=void 0;const base_1=require("./base"),types_1=require("@soga/types"),piscina_1=__importDefault(require("piscina")),node_worker_threads_1=require("node:worker_threads"),piscina=new piscina_1.default({filename:require.resolve("@soga/part-uploader")}),uploaders=new Map;class AliUploader extends base_1.BaseUploader{params;constructor(e){super(e,types_1.HostType.ALI),this.params=e}getThread(e){const t=new AbortController,{port1:
|
|
1
|
+
"use strict";var __importDefault=this&&this.__importDefault||function(e){return e&&e.__esModule?e:{default:e}};Object.defineProperty(exports,"__esModule",{value:!0}),exports.getAliUploader=exports.AliUploader=void 0;const base_1=require("./base"),types_1=require("@soga/types"),piscina_1=__importDefault(require("piscina")),node_worker_threads_1=require("node:worker_threads"),piscina=new piscina_1.default({filename:require.resolve("@soga/part-uploader")}),uploaders=new Map;class AliUploader extends base_1.BaseUploader{params;constructor(e){super(e,types_1.HostType.ALI),this.params=e}getThread(e){const t=new AbortController,{port1:a,port2:r}=new node_worker_threads_1.MessageChannel;return{file_id:e.file_id,part_id:e.id,uid:e.uid,start:async()=>{try{const{file_id:s,host_id:i,id:o,info:d}=e,l=await this.fileRepository.findOneBy({id:s}),{output_root:p}=l;r.on("message",(async e=>{"percent"==e.type&&1==e.data.percent&&console.log(`ali_${s}-${o}=====================`),await this.onPartProgress(e)})),r.on("error",(e=>{console.log("ali upload part error evt: "),console.log(e)}));const n={file_id:s,host_id:i,part_id:o,output_root:p,part:d,cloud_folder_id:l.cloud_info.hosts.ali?.file_id,sdk_domain:this.params.sdk_domain,port:a},_=await piscina.run(n,{name:"uploadAli",signal:t.signal,transferList:[a]});_&&(await this.partRepository.update(e.id,{upload_result:{ali:_},upload_status:types_1.UploadStatus.SUCCESS}),console.log(`ali_${s}-${o}----------------------------------`),await this.onPartSuccess({file_id:s,part_id:o,host_type:types_1.HostType.ALI}))}catch(t){"AbortError"==t.name?this.partRepository.update(e.id,{upload_status:types_1.UploadStatus.NULL}):await this.onPartError(t,await this.fileRepository.findOneBy({id:e.file_id}))}finally{this.threads=this.threads.filter((t=>t.part_id!=e.id)),r.close(),await this.start()}},stop:async()=>{t.abort(),this.threads=this.threads.filter((t=>t.part_id!==e.id))}}}}exports.AliUploader=AliUploader;const getAliUploader=async e=>{const{uid:t,host_id:a}=e,r=`${t}_${a}`;let s=uploaders.get(r);return s||(s=new AliUploader(e),s.setThreads(3),uploaders.set(r,s)),s};exports.getAliUploader=getAliUploader;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";var __importDefault=this&&this.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(exports,"__esModule",{value:!0}),exports.getBaiduUploader=exports.BaiduUploader=void 0;const base_1=require("./base"),types_1=require("@soga/types"),piscina_1=__importDefault(require("piscina")),node_worker_threads_1=require("node:worker_threads"),piscina=new piscina_1.default({filename:require.resolve("@soga/part-uploader")}),uploaders=new Map;class BaiduUploader extends base_1.BaseUploader{params;constructor(t){super(t,types_1.HostType.BAIDU),this.params=t}getThread(t){const e=new AbortController,{port1:a,port2:
|
|
1
|
+
"use strict";var __importDefault=this&&this.__importDefault||function(t){return t&&t.__esModule?t:{default:t}};Object.defineProperty(exports,"__esModule",{value:!0}),exports.getBaiduUploader=exports.BaiduUploader=void 0;const base_1=require("./base"),types_1=require("@soga/types"),piscina_1=__importDefault(require("piscina")),node_worker_threads_1=require("node:worker_threads"),piscina=new piscina_1.default({filename:require.resolve("@soga/part-uploader")}),uploaders=new Map;class BaiduUploader extends base_1.BaseUploader{params;constructor(t){super(t,types_1.HostType.BAIDU),this.params=t}getThread(t){const e=new AbortController,{port1:a,port2:r}=new node_worker_threads_1.MessageChannel;return{file_id:t.file_id,part_id:t.id,uid:t.uid,start:async()=>{try{const{file_id:s,host_id:i,id:o,info:d}=t,p=await this.fileRepository.findOneBy({id:s}),{output_root:u}=p;r.on("message",(async t=>{await this.onPartProgress(t)})),r.on("error",(t=>{console.log("baidu upload part error evt: "),console.log(t)}));const l={file_id:s,host_id:i,part_id:o,output_root:u,part:d,cloud_folder_path:p.cloud_info.hosts.baidu?.path,sdk_domain:this.params.sdk_domain,port:a},n=await piscina.run(l,{name:"uploadBaidu",signal:e.signal,transferList:[a]});n&&(await this.partRepository.update(t.id,{upload_result:{baidu:n},upload_status:types_1.UploadStatus.SUCCESS}),await this.onPartSuccess({file_id:s,part_id:o,host_type:types_1.HostType.BAIDU}))}catch(e){"AbortError"==e.name?this.partRepository.update(t.id,{upload_status:types_1.UploadStatus.NULL}):await this.onPartError(e,await this.fileRepository.findOneBy({id:t.file_id}))}finally{this.threads=this.threads.filter((e=>e.part_id!=t.id)),r.close(),await this.start()}},stop:async()=>{e.abort(),this.threads=this.threads.filter((e=>e.part_id!==t.id))}}}}exports.BaiduUploader=BaiduUploader;const getBaiduUploader=async t=>{const{uid:e,host_id:a}=t,r=`${e}_${a}`;let s=uploaders.get(r);return s||(s=new BaiduUploader(t),s.setThreads(3),uploaders.set(r,s)),s};exports.getBaiduUploader=getBaiduUploader;
|
|
@@ -18,10 +18,7 @@ export declare abstract class BaseUploader {
|
|
|
18
18
|
abstract getThread(part: UploadPart): ThreadType;
|
|
19
19
|
protected getValidThreads(threads: number): number;
|
|
20
20
|
protected onPartProgress(params: UploadWorkerPercent): Promise<void>;
|
|
21
|
-
protected
|
|
22
|
-
protected checkAllUploaded(file: UploadFile): Promise<void>;
|
|
23
|
-
private processing_file_map;
|
|
24
|
-
protected onPartSuccess({ part_id, file_id, host_type, }: {
|
|
21
|
+
protected onPartSuccess({ file_id, host_type, }: {
|
|
25
22
|
part_id: number;
|
|
26
23
|
file_id: number;
|
|
27
24
|
host_type: HostType;
|
|
@@ -29,11 +26,9 @@ export declare abstract class BaseUploader {
|
|
|
29
26
|
protected onPartError(err: Error, file: UploadFile): Promise<void>;
|
|
30
27
|
constructor(baseParams: UploaderParams, host_type: HostType);
|
|
31
28
|
setThreads(threads: number): Promise<void>;
|
|
32
|
-
is_initing: boolean;
|
|
33
|
-
init(): Promise<void>;
|
|
34
29
|
start(): Promise<void>;
|
|
35
|
-
stopFiles(ids: number[]
|
|
36
|
-
stopAll(
|
|
30
|
+
stopFiles(ids: number[]): Promise<void>;
|
|
31
|
+
stopAll(): Promise<void>;
|
|
37
32
|
private run;
|
|
38
33
|
protected dequeueOneFile(): Promise<void>;
|
|
39
34
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.BaseUploader=void 0;const typeorm_1=require("typeorm"),entities_1=require("@soga/entities"),types_1=require("@soga/types"),prepare_1=require("../hooks/prepare"),
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.BaseUploader=void 0;const typeorm_1=require("typeorm"),entities_1=require("@soga/entities"),types_1=require("@soga/types"),prepare_1=require("../hooks/prepare"),utils_1=require("@soga/utils");let isDequeueing=!1;class BaseUploader{hostType;onProgress=async()=>{};onSuccess=async()=>{};onError=async()=>{};successRepository;fileRepository;partRepository;isRunning=!1;thread_count=1;maxThreads=10;threads=[];baseParams;getValidThreads(t){return Math.min(Math.max(t??0,0),this.maxThreads)}async onPartProgress(t){const{id:s,data:e,type:i}=t;if("percent"!=i)return;const{part_id:a,percent:o}=e,r=await this.partRepository.findOneBy({id:a});if(!r)return;if(r.upload_status==types_1.UploadStatus.ERROR)return;if(r.upload_status==types_1.UploadStatus.SUCCESS)return;await this.partRepository.update(a,{upload_percent:o});const p=await this.partRepository.findBy({file_id:s,host_type:this.hostType});let n=0,d=0;for(const t of p)n+=t.info.size,d+=t.info.size*t.upload_percent;const u=d/n,h=await this.fileRepository.findOneBy({id:s}),l=types_1.UploadProcessStep[`upload_${this.hostType}`];h.progress[l].percent=u,h.percent=(0,utils_1.getProgressPercent)(h.progress),await this.fileRepository.update(s,{progress:h.progress,percent:h.percent}),await this.onProgress(h)}async onPartSuccess({file_id:t,host_type:s}){const e=await this.fileRepository.findOneBy({id:t}),i=`${s}_upload_status`;if(e[i]==types_1.UploadStatus.ERROR)return;if(await this.partRepository.findOneBy({file_id:e.id,host_type:s,upload_status:(0,typeorm_1.Not)(types_1.UploadStatus.SUCCESS)}))return;const a=await this.partRepository.findBy({file_id:e.id,host_type:s}),o={},r={};for(const t of a)t.host_type==types_1.HostType.BAIDU&&(o[t.info.file]={...t.upload_result.baidu,md4:t.info.md4}),t.host_type==types_1.HostType.ALI&&(r[t.info.file]={...t.upload_result.ali,sha1:t.info.sha1});const p=`${s}_upload_result`;let n=o;s==types_1.HostType.ALI&&(n=r);const d=await this.fileRepository.findOneBy({id:e.id}),u=types_1.UploadProcessStep[`upload_${this.hostType}`];d.progress[u].percent=1,d.percent=(0,utils_1.getProgressPercent)(d.progress),await this.fileRepository.update(e.id,{[i]:types_1.UploadStatus.SUCCESS,[p]:n,progress:d.progress,percent:d.percent});const h=await this.fileRepository.findOneBy({id:e.id});await this.onSuccess(h)}async onPartError(t,s){console.log("on part error: ",s.id),console.log(t);const e=`${this.hostType}_upload_status`;s[e]!=types_1.UploadStatus.ERROR&&(await this.fileRepository.update(s.id,{[e]:types_1.UploadStatus.ERROR}),await this.onError(t,s))}constructor(t,s){this.hostType=s,this.baseParams=t;const{dataSource:e}=t;this.fileRepository=e.getRepository(entities_1.UploadFile),this.successRepository=e.getRepository(entities_1.UploadSuccess),this.partRepository=e.getRepository(entities_1.UploadPart),this.thread_count=this.getValidThreads(t.threads??this.thread_count),t.onProgress&&(this.onProgress=t.onProgress.bind(this)),t.onSuccess&&(this.onSuccess=t.onSuccess.bind(this)),t.onError&&(this.onError=t.onError.bind(this))}async setThreads(t){const s=this.getValidThreads(t);this.thread_count=s,await this.run()}async start(){await this.run()}async stopFiles(t){const s=this.threads.filter((s=>t.includes(s.file_id)));await Promise.all(s.map((t=>t.stop()))),await this.start()}async stopAll(){await Promise.all(this.threads.map((t=>t.stop())))}async run(){for(;this.isRunning;)await new Promise((t=>{setTimeout(t,200)}));this.isRunning=!0;const t=Date.now(),s=await this.partRepository.findBy({uid:this.baseParams.uid,is_paused:!1,upload_status:types_1.UploadStatus.PROCESS,host_id:this.baseParams.host_id});if(s.length){const t=this.threads.map((t=>t.part_id)),e=s.filter((s=>!t.includes(s.id)));if(e.length){const t=e.map((t=>t.id));await this.partRepository.update({id:(0,typeorm_1.In)(t),upload_status:types_1.UploadStatus.PROCESS},{upload_status:types_1.UploadStatus.NULL})}}console.log("exception time: ",Date.now()-t);const e=this.threads.length,i=this.thread_count;if(e<i){const t=i-e,s=async()=>await this.partRepository.findOne({where:{uid:this.baseParams.uid,is_paused:!1,upload_status:types_1.UploadStatus.NULL,host_id:this.baseParams.host_id},order:{created_at:"ASC"}});for(let e=0;e<t;e++){const t=await s();if(!t)break;const e=this.getThread(t);this.threads.push(e),await this.partRepository.update(t.id,{upload_status:types_1.UploadStatus.PROCESS}),e.start()}if(this.threads.length<i){await this.dequeueOneFile();await s()&&(this.isRunning=!1,await this.run())}}else if(e>i){const t=e-i,s=this.threads.slice(0,t);for(const t of s)await t.stop()}this.isRunning=!1}async dequeueOneFile(){for(;isDequeueing;)await new Promise((t=>{setTimeout(t,50)}));isDequeueing=!0;try{const t=`${this.hostType}_upload_status`,s=`${this.hostType}_host_id`,e=await this.fileRepository.findOne({where:{uid:this.baseParams.uid,is_paused:!1,encode_status:types_1.EncodeStatus.SUCCESS,[t]:(0,typeorm_1.IsNull)(),upload_status:(0,typeorm_1.In)([types_1.UploadStatus.NULL,types_1.UploadStatus.PROCESS])},order:{created_at:"ASC"}});if(!e)return;await this.fileRepository.update(e.id,{[t]:types_1.UploadStatus.PROCESS,upload_status:types_1.UploadStatus.PROCESS}),await(0,prepare_1.prepare)({file_id:e.id,sdk_domain:this.baseParams.sdk_domain,fileRepository:this.fileRepository});const{encoded:i}=e,{affix:a,source:o,txt:r,img:p,media:n}=i,d=[...o?.parts??[],...r?.parts??[],...p?.parts??[],...n?.parts??[]];a?.forEach((t=>{t.parts?.forEach((t=>{d.push(t)}))}));const u=e[s],h=d.map((t=>({uid:this.baseParams.uid,file_id:e.id,info:t,output_root:e.output_root,host_id:u,host_type:this.hostType})));return void await this.partRepository.save(h)}finally{isDequeueing=!1}}}exports.BaseUploader=BaseUploader;
|
package/dist/main.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";var __createBinding=this&&this.__createBinding||(Object.create?function(e,t,r
|
|
1
|
+
"use strict";var __createBinding=this&&this.__createBinding||(Object.create?function(e,t,i,r){void 0===r&&(r=i);var a=Object.getOwnPropertyDescriptor(t,i);a&&!("get"in a?!t.__esModule:a.writable||a.configurable)||(a={enumerable:!0,get:function(){return t[i]}}),Object.defineProperty(e,r,a)}:function(e,t,i,r){void 0===r&&(r=i),e[r]=t[i]}),__exportStar=this&&this.__exportStar||function(e,t){for(var i in e)"default"===i||Object.prototype.hasOwnProperty.call(t,i)||__createBinding(t,e,i)};Object.defineProperty(exports,"__esModule",{value:!0}),exports.getUploader=void 0,__exportStar(require("./host-uploader/baidu"),exports),__exportStar(require("./host-uploader/ali"),exports);const uploader_1=require("./uploader"),instanceMap=new Map,getUploader=async e=>{for(;instanceMap.get(e.uid)?.initing;)await new Promise((e=>setTimeout(e,100)));if(instanceMap.get(e.uid)?.uploader)return instanceMap.get(e.uid).uploader;const t=instanceMap.keys();for(const i of t)if(i!=e.uid){for(;instanceMap.get(i)?.initing;)await new Promise((e=>setTimeout(e,50)));instanceMap.delete(i)}const i={uploader:new uploader_1.Uploader(e),initing:!0};return instanceMap.set(e.uid,i),await i.uploader.init(),instanceMap.set(e.uid,i),i.initing=!1,i.uploader};exports.getUploader=getUploader;
|
package/dist/uploader.d.ts
CHANGED
|
@@ -10,10 +10,12 @@ export declare class Uploader {
|
|
|
10
10
|
protected partRepository: Repository<UploadPart>;
|
|
11
11
|
constructor(params: GetUploaderParams);
|
|
12
12
|
protected processorMap: Map<number, BaseUploader>;
|
|
13
|
+
private successing;
|
|
14
|
+
private onSuccess;
|
|
13
15
|
init(): Promise<void>;
|
|
14
16
|
start(): Promise<void>;
|
|
15
|
-
stopAll(
|
|
16
|
-
stopFiles(ids: number[]
|
|
17
|
+
stopAll(): Promise<void>;
|
|
18
|
+
stopFiles(ids: number[]): Promise<void>;
|
|
17
19
|
repairAllParts(): Promise<void>;
|
|
18
20
|
repairParts(file_ids: number[]): Promise<void>;
|
|
19
21
|
private startHost;
|
package/dist/uploader.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.Uploader=void 0;const types_1=require("@soga/types"),typeorm_1=require("typeorm"),entities_1=require("@soga/entities"),baidu_1=require("./host-uploader/baidu"),ali_1=require("./host-uploader/ali");class Uploader{params;uid;dataSource;fileRepository;partRepository;constructor(t){this.params=t,this.uid=t.uid,this.dataSource=t.dataSource,this.fileRepository=this.dataSource.getRepository(entities_1.UploadFile),this.partRepository=this.dataSource.getRepository(entities_1.UploadPart)}processorMap=new Map;async init(){}async start(){await Promise.all([this.startAli(),this.startBaidu()])}async stopAll(
|
|
1
|
+
"use strict";Object.defineProperty(exports,"__esModule",{value:!0}),exports.Uploader=void 0;const types_1=require("@soga/types"),typeorm_1=require("typeorm"),entities_1=require("@soga/entities"),baidu_1=require("./host-uploader/baidu"),ali_1=require("./host-uploader/ali"),complete_1=require("./hooks/complete"),trasform_1=require("./hooks/trasform");class Uploader{params;uid;dataSource;fileRepository;partRepository;constructor(t){this.params=t,this.uid=t.uid,this.dataSource=t.dataSource,this.fileRepository=this.dataSource.getRepository(entities_1.UploadFile),this.partRepository=this.dataSource.getRepository(entities_1.UploadPart)}processorMap=new Map;successing=!1;async onSuccess(t){for(;this.successing;)await new Promise((t=>setTimeout(t,100)));this.successing=!0,await(0,complete_1.complete)({file_id:t.id,fileRepository:this.fileRepository,sdk_domain:this.params.sdk_domain}),await(0,trasform_1.transform)({file_id:t.id,dataSource:this.dataSource}),console.log("onSuccess",t),this.successing=!1}async init(){}async start(){const t=await this.fileRepository.findBy({uid:this.uid,upload_status:types_1.UploadStatus.PROCESS});for(const s of t){const{ali_host_id:t,ali_upload_status:a,baidu_host_id:i,baidu_upload_status:e}=s;t&&a!=types_1.UploadStatus.SUCCESS||(i&&e!=types_1.UploadStatus.SUCCESS||await this.onSuccess(s))}await Promise.all([this.startAli(),this.startBaidu()])}async stopAll(){const t=this.processorMap.values();for(const s of t)await s.stopAll()}async stopFiles(t){const s=this.processorMap.values();for(const a of s)await a.stopFiles(t)}async repairAllParts(){await this.partRepository.update({uid:this.uid,upload_status:types_1.UploadStatus.ERROR},{upload_status:types_1.UploadStatus.NULL})}async repairParts(t){await this.partRepository.update({file_id:(0,typeorm_1.In)(t),upload_status:types_1.UploadStatus.ERROR},{upload_status:types_1.UploadStatus.NULL})}async startHost(t,s){if(this.processorMap.has(t)){const s=this.processorMap.get(t);return void await s.start()}const a={uid:this.uid,sdk_domain:this.params.sdk_domain,dataSource:this.dataSource,host_id:t,onProgress:this.params.onProgress,onSuccess:this.onSuccess.bind(this),onError:this.params.onError};if(s==types_1.HostType.BAIDU){const s=await(0,baidu_1.getBaiduUploader)(a);this.processorMap.set(t,s),await s.start()}else if(s==types_1.HostType.ALI){const s=await(0,ali_1.getAliUploader)(a);this.processorMap.set(t,s),await s.start()}}async startAli(){const t=(await this.fileRepository.createQueryBuilder("file").select("DISTINCT file.ali_host_id").where("file.uid = :uid",{uid:this.uid}).getRawMany()).map((t=>t.ali_host_id)).filter((t=>!!t));if(t.length)for(const s of t)await this.startHost(s,types_1.HostType.ALI)}async startBaidu(){const t=(await this.fileRepository.createQueryBuilder("file").select("DISTINCT file.baidu_host_id").where("file.uid = :uid",{uid:this.uid}).getRawMany()).map((t=>t.baidu_host_id)).filter((t=>!!t));if(t.length)for(const s of t)await this.startHost(s,types_1.HostType.BAIDU)}}exports.Uploader=Uploader;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@soga/uploader",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.24",
|
|
4
4
|
"publishConfig": {
|
|
5
5
|
"access": "public"
|
|
6
6
|
},
|
|
@@ -21,6 +21,7 @@
|
|
|
21
21
|
"prepublishOnly": "npm run build"
|
|
22
22
|
},
|
|
23
23
|
"devDependencies": {
|
|
24
|
+
"@soga/encoder": "^0.2.24",
|
|
24
25
|
"@types/fs-extra": "^11.0.4",
|
|
25
26
|
"@types/glob": "^8.1.0",
|
|
26
27
|
"@types/jest": "^29.5.14",
|
|
@@ -47,12 +48,12 @@
|
|
|
47
48
|
"typeorm": "*"
|
|
48
49
|
},
|
|
49
50
|
"dependencies": {
|
|
50
|
-
"@soga/entities": "^0.2.
|
|
51
|
-
"@soga/part-uploader": "^0.2.
|
|
51
|
+
"@soga/entities": "^0.2.24",
|
|
52
|
+
"@soga/part-uploader": "^0.2.24",
|
|
52
53
|
"@soga/sdk": "^0.2.22",
|
|
53
54
|
"@soga/types": "^0.2.22",
|
|
54
55
|
"@soga/utils": "^0.2.22",
|
|
55
56
|
"piscina": "^4.9.2"
|
|
56
57
|
},
|
|
57
|
-
"gitHead": "
|
|
58
|
+
"gitHead": "a996cac62fa189a6a7e64f16ec247f4c04e07a4b"
|
|
58
59
|
}
|